Exemplo n.º 1
2
   // Use this for initialization
   void Start()
   {
      String[] names = new string[] {"eng.cube.bigrams", "eng.cube.fold", "eng.cube.lm", "eng.cube.nn", "eng.cube.params", "eng.cube.size", "eng.cube.word-freq", "eng.tesseract_cube.nn", "eng.traineddata"};
      String outputPath = Path.Combine(Application.persistentDataPath, "tessdata");
      if (!Directory.Exists(outputPath))
         Directory.CreateDirectory(outputPath);
      
      foreach (String n in names)
      {
         TextAsset textAsset = Resources.Load<TextAsset>(Path.Combine("tessdata", n));  
         String filePath = Path.Combine(outputPath, n);
#if UNITY_METRO
         UnityEngine.Windows.File.WriteAllBytes(filePath, textAsset.bytes);
#else
         if (!File.Exists(filePath))
            File.WriteAllBytes(filePath, textAsset.bytes);
#endif
      }

      _ocr = new Tesseract(outputPath, "eng", OcrEngineMode.TesseractCubeCombined);

      Debug.Log("OCR engine loaded.");

      Image<Bgr, Byte> img = new Image<Bgr, byte>(480, 200);
      
      String message = "Hello, World";
      CvInvoke.PutText(img, message, new Point(50, 100), Emgu.CV.CvEnum.FontFace.HersheySimplex, 1.0, new MCvScalar(255, 255, 255));

      _ocr.Recognize(img);


      Tesseract.Character[] characters = _ocr.GetCharacters();
      foreach (Tesseract.Character c in characters)
      {
         CvInvoke.Rectangle(img, c.Region, new MCvScalar(255, 0, 0));
      }

      String messageOcr = _ocr.GetText().TrimEnd('\n', '\r'); // remove end of line from ocr-ed text   
      Debug.Log("Detected text: "+ message);

      Texture2D texture = TextureConvert.InputArrayToTexture2D(img, FlipType.Vertical);

      this.GetComponent<GUITexture>().texture = texture;
      this.GetComponent<GUITexture>().pixelInset = new Rect(-img.Width / 2, -img.Height / 2, img.Width, img.Height);
   }
Exemplo n.º 2
0
 private void btnAnalyzeImage_Click(object sender, EventArgs e)
 {
     if(picBox.Image == null)
     {
         MessageBox.Show("Load an image first!");
     }
     else
     {
         statusLabelOCR.Text = "Analyzing invoice image...";
         Task.Run(() =>
         {
             using (var img = new Image<Bgr, byte>(path))
             {
                 string tessdata = Environment.GetEnvironmentVariable("EMGU_ROOT") + @"\bin\tessdata";
                 using (var ocrProvider = new Tesseract(tessdata, "eng", OcrEngineMode.TesseractCubeCombined))
                 {
                     ocrProvider.Recognize(img);
                     string text = ocrProvider.GetText().TrimEnd();
                     rtbOcrResult.Invoke((MethodInvoker)delegate
                                        {
                                            statusLabelOCR.Text = "Analysis completed.";
                                            rtbOcrResult.AppendText(text);
                                        });
                     
                 }
             }
         });
         
     }
 }
Exemplo n.º 3
0
 static void Main(string[] args)
 {
     using (var image = new Image<Bgr, byte>(Path.GetFullPath("text.png")))
     {
         using (var tess = new Tesseract("", "eng", OcrEngineMode.TesseractCubeCombined))
         {
             tess.Recognize(image);
             var text = tess.GetText().TrimEnd();
         }
     }
 }
Exemplo n.º 4
0
 public static Tesseract.Charactor[] DoOcr(Image<Gray, byte> img)
 {
     Tesseract.Charactor[] chars;
     string text;
     using (var tesseract = new Tesseract("tessdata", "eng", Tesseract.OcrEngineMode.OEM_TESSERACT_CUBE_COMBINED)
         )
     {
         tesseract.Recognize(img);
         text = tesseract.GetText();
         chars = tesseract.GetCharactors();
     }
     Console.WriteLine(text);
     Console.ReadLine();
     return chars;
 }
Exemplo n.º 5
0
        //główne operacje na obrazie
        public void ocr()
        {
            //otworzenie pliku
            FileStream srcstream = new FileStream(pic_file, FileMode.Open, FileAccess.Read, FileShare.ReadWrite);
            //stworzenie bitmapy
            Bitmap source = new Bitmap(srcstream);
            //zmiana ustawień webform

            Panel1.Visible = false;
            Image1.Dispose();

            Label2.Text = "Processing...";
            Panel3.Visible = true;

            //Preperation code
            Bitmap ext = source;

            //AForge.Imaging.Filters.
            //Przekształcenie obrazu na skalę odcieni szarości - testować dla obrazów o różnej kolorystyce(opracować system wyznaczania parametrów filtru na podstawie RGB zdjęcia)
            AForge.Imaging.Filters.Grayscale grScl = new AForge.Imaging.Filters.Grayscale(0.2125, 0.0154, 0.0721 );
            source = grScl.Apply(source);
            //Zwiększenie kontrastu
            AForge.Imaging.Filters.ContrastStretch conCor = new AForge.Imaging.Filters.ContrastStretch();

            source = conCor.Apply(source);
            //Wyostrzenie
            AForge.Imaging.Filters.Sharpen shp = new AForge.Imaging.Filters.Sharpen();
            source = shp.Apply(source);

            //Segmentation code
            bool procesed = false;
               // Image2.Width = 350;
               // Image2.Height = (int)((source.Height * 200) / source.Width);

            try
            {
                Emgu.CV.Image<Bgr, Byte> to_rec = new Emgu.CV.Image<Bgr, byte>(source);
                Do_ocr = new Tesseract("tessdata", "eng", Tesseract.OcrEngineMode.OEM_DEFAULT);
                try
                {
                    Do_ocr.Recognize<Bgr>(to_rec);
                    //recognizedText.Text = ocr.GetText();
                    PastOCRBox.Text = Do_ocr.GetText();
                   // StatusBox.Text = "Finished! Ready for next one...";
                    Do_ocr.Dispose();
                    to_rec.Dispose();
                }
                catch (Exception exp)
                {
                    Label2.Text = "Recognition error! " + exp.Message;
                    Do_ocr.Dispose();
                    return;
                }
            }
            catch (Exception exp)
            {
                Label2.Text = "OCR engine failed! " + exp.Message;
                return;
            }

            //czyszczenie z plików tymczasowych

              //  source.Save("D:\\test.bmp");
              //  ext.Save("D:\\testcor.bmp");
            source.Dispose();
            srcstream.Close();
            srcstream.Dispose();
            //System.IO.File.Delete(pic_file);
            System.IO.File.Delete(Server.MapPath("~/img/prev.bmp"));
            System.IO.File.Delete(Server.MapPath("~/img/tmp.bmp"));
            //przygotować wygląd strony po rozpoznawaniu
            Panel3.Visible = false;
            Label1.Visible = false;
            Panel0.Visible = false;
            Panel5.Visible = false;

            Panel4.Visible = true;
        }
Exemplo n.º 6
0
        private void server()
        {
            int port = 8001;

            IPAddress ipAd = IPAddress.Parse(ip);

            // use local m/c IP address, and
            // use the same in the client
            ASCIIEncoding asen = new ASCIIEncoding();
            /* Initializes the Listener */
            TcpListener myList = new TcpListener(IPAddress.Any, port);

            Console.WriteLine("The server is running at port 8001...");
            Console.WriteLine("The local End point is  :" +
                              myList.LocalEndpoint);
            /* Start Listeneting at the specified port */

            byte[] b = new byte[2000000];
            myList.Start();

            while (!stoped)
            {
                    char[] recived = new char[2000000];
                    Console.WriteLine("Waiting for a connection.....");

                    TcpClient s = myList.AcceptTcpClient();
                    Console.WriteLine("Connection accepted " +s.Client.RemoteEndPoint);
                    NetworkStream stream = s.GetStream();

                    var reader = new StreamReader(stream);

                    var k = stream.Read(b, 0, 2000000);

                    Console.WriteLine("Recieved...");

                    ImageConverter conv = new ImageConverter();
                    Bitmap rec = new Bitmap((Bitmap)conv.ConvertFrom(b));

                    Console.WriteLine("Recognicion...");
                    Tesseract ocr = new Tesseract("tessdata", "pol+eng", Tesseract.OcrEngineMode.OEM_DEFAULT);

                    ocr.Recognize<Bgr>(new Emgu.CV.Image<Bgr, Byte>(rec));

                    string to_send_str = ocr.GetText();
                    ocr.Dispose();
                    Console.Write(to_send_str + "\n");

                    stream.Write(asen.GetBytes(to_send_str),0,to_send_str.Length);

                    Console.WriteLine("\nSent Acknowledgement");
                    /* clean up */
                    s.Close();

               }
            myList.Stop();
            Console.WriteLine("Server stopped");
        }
Exemplo n.º 7
0
        private void FindLicensePlate(
            VectorOfVectorOfPoint contours,
            int[,] hierachy,
            int idx,
            IInputArray gray,
            IInputArray canny,
            List <IInputOutputArray> licensePlateImagesList,
            List <IInputOutputArray> filteredLicensePlateImagesList,
            List <RotatedRect> detectedLicensePlateRegionList,
            List <String> licenses)
        {
            if (hierachy.Length != 0)
            {
                for (; idx >= 0; idx = hierachy[idx, 0])
                {
                    int numberOfChildren = GetNumberOfChildren(hierachy, idx);
                    //if it does not contains any children (charactor), it is not a license plate region
                    if (numberOfChildren == 0)
                    {
                        continue;
                    }

                    using (VectorOfPoint contour = contours[idx])
                    {
                        if (CvInvoke.ContourArea(contour) > 400)
                        {
                            if (numberOfChildren < 3)
                            {
                                //If the contour has less than 3 children, it is not a license plate (assuming license plate has at least 3 charactor)
                                //However we should search the children of this contour to see if any of them is a license plate
                                FindLicensePlate(contours, hierachy, hierachy[idx, 2], gray, canny, licensePlateImagesList,
                                                 filteredLicensePlateImagesList, detectedLicensePlateRegionList, licenses);
                                continue;
                            }

                            RotatedRect box = CvInvoke.MinAreaRect(contour);
                            if (box.Angle < -45.0)
                            {
                                float tmp = box.Size.Width;
                                box.Size.Width  = box.Size.Height;
                                box.Size.Height = tmp;
                                box.Angle      += 90.0f;
                            }
                            else if (box.Angle > 45.0)
                            {
                                float tmp = box.Size.Width;
                                box.Size.Width  = box.Size.Height;
                                box.Size.Height = tmp;
                                box.Angle      -= 90.0f;
                            }

                            double whRatio = (double)box.Size.Width / box.Size.Height;
                            if (!(3.0 < whRatio && whRatio < 10.0))
                            //if (!(1.0 < whRatio && whRatio < 2.0))
                            {
                                //if the width height ratio is not in the specific range,it is not a license plate
                                //However we should search the children of this contour to see if any of them is a license plate
                                //Contour<Point> child = contours.VNext;
                                if (hierachy[idx, 2] > 0)
                                {
                                    FindLicensePlate(contours, hierachy, hierachy[idx, 2], gray, canny, licensePlateImagesList,
                                                     filteredLicensePlateImagesList, detectedLicensePlateRegionList, licenses);
                                }
                                continue;
                            }

                            using (UMat tmp1 = new UMat())
                                using (UMat tmp2 = new UMat())
                                {
                                    PointF[] srcCorners = box.GetVertices();

                                    PointF[] destCorners = new PointF[] {
                                        new PointF(0, box.Size.Height - 1),
                                        new PointF(0, 0),
                                        new PointF(box.Size.Width - 1, 0),
                                        new PointF(box.Size.Width - 1, box.Size.Height - 1)
                                    };

                                    using (Mat rot = CvInvoke.GetAffineTransform(srcCorners, destCorners))
                                    {
                                        CvInvoke.WarpAffine(gray, tmp1, rot, Size.Round(box.Size));
                                    }

                                    //resize the license plate such that the front is ~ 10-12. This size of front results in better accuracy from tesseract
                                    Size   approxSize = new Size(240, 180);
                                    double scale      = Math.Min(approxSize.Width / box.Size.Width, approxSize.Height / box.Size.Height);
                                    Size   newSize    = new Size((int)Math.Round(box.Size.Width * scale), (int)Math.Round(box.Size.Height * scale));
                                    CvInvoke.Resize(tmp1, tmp2, newSize, 0, 0, Inter.Cubic);

                                    //removes some pixels from the edge
                                    int       edgePixelSize = 3;
                                    Rectangle newRoi        = new Rectangle(new Point(edgePixelSize, edgePixelSize),
                                                                            tmp2.Size - new Size(2 * edgePixelSize, 2 * edgePixelSize));
                                    UMat plate = new UMat(tmp2, newRoi);

                                    UMat filteredPlate = FilterPlate(plate);

                                    //Tesseract.Character[] words;
                                    StringBuilder strBuilder = new StringBuilder();
                                    using (UMat tmp = filteredPlate.Clone())
                                    {
                                        Emgu.CV.OCR.Tesseract.Character[] words;

                                        _ocr.Recognize(tmp);
                                        strBuilder.Append(_ocr.GetText());

                                        words = _ocr.GetCharacters();

                                        if (words.Length == 0)
                                        {
                                            continue;
                                        }

                                        for (int i = 0; i < words.Length; i++)
                                        {
                                            strBuilder.Append(words[i].Text);
                                        }
                                    }

                                    licenses.Add(strBuilder.ToString());

                                    //изображения номеров
                                    licensePlateImagesList.Add(plate);
                                    filteredLicensePlateImagesList.Add(filteredPlate);
                                    detectedLicensePlateRegionList.Add(box);
                                }
                        }
                    }
                }
            }
        }