Пример #1
0
        public static Newtonsoft.Json.Linq.JObject  Execute(IElement ele, System.Activities.Presentation.Model.ModelItem model)
        {
            var wordlimit     = model.GetValue <string>("WordLimit");
            var casesensitive = model.GetValue <bool>("CaseSensitive");
            var isfrontside   = model.GetValue <bool>("IsFrontSide");
            var lang          = Config.local.ocrlanguage;


            string basepath = Interfaces.Extensions.DataDirectory;


            ImageElement[] result = null;
            var            _ocr   = new Baidu.Aip.Ocr.Ocr(ocr.API_KEY, ocr.SECRET_KEY);

            _ocr.Timeout = 60000;//设置超时时间


            // OpenRPA.Interfaces.Image.Util.SaveImageStamped(ele.element, "OCR");
            Bitmap sourceimg = null;

            if (ele is ImageElement)
            {
                sourceimg = ((ImageElement)ele).element;
            }
            else
            {
                sourceimg = Interfaces.Image.Util.Screenshot(ele.Rectangle.X, ele.Rectangle.Y, ele.Rectangle.Width, ele.Rectangle.Height);
            }

            String idCardSide;

            if (isfrontside)
            {
                idCardSide = "front";
            }
            else
            {
                idCardSide = "back";
            }
            MemoryStream ms = new MemoryStream();

            sourceimg.Save(ms, System.Drawing.Imaging.ImageFormat.Bmp);
            byte[] imageBytes = ms.GetBuffer();
            ms.Close();

            var word_results = _ocr.Idcard(imageBytes, idCardSide);

            Log.Debug("adding element cords to results: " + word_results);

            return(word_results);
        }
Пример #2
0
        public static ImageElement[] Execute(IElement ele, System.Activities.Presentation.Model.ModelItem model)
        {
            var wordlimit     = model.GetValue <string>("WordLimit");
            var casesensitive = model.GetValue <bool>("CaseSensitive");
            var lang          = Config.local.ocrlanguage;

            string basepath = Interfaces.Extensions.DataDirectory;
            string path     = System.IO.Path.Combine(basepath, "tessdata");

            ocr.TesseractDownloadLangFile(path, Config.local.ocrlanguage);
            ocr.TesseractDownloadLangFile(path, "osd");

            ImageElement[] result;
            var            _ocr = new Emgu.CV.OCR.Tesseract(path, lang.ToString(), Emgu.CV.OCR.OcrEngineMode.TesseractLstmCombined);

            _ocr.Init(path, lang.ToString(), Emgu.CV.OCR.OcrEngineMode.TesseractLstmCombined);
            _ocr.PageSegMode = Emgu.CV.OCR.PageSegMode.SparseText;

            // OpenRPA.Interfaces.Image.Util.SaveImageStamped(ele.element, "OCR");
            Bitmap sourceimg = null;

            if (ele is ImageElement)
            {
                sourceimg = ((ImageElement)ele).element;
            }
            else
            {
                sourceimg = Interfaces.Image.Util.Screenshot(ele.Rectangle.X, ele.Rectangle.Y, ele.Rectangle.Width, ele.Rectangle.Height);
            }
            using (var img = new Emgu.CV.Image <Emgu.CV.Structure.Bgr, byte>(sourceimg))
            {
                result = ocr.OcrImage2(_ocr, img.Mat, wordlimit, casesensitive);
            }
            Log.Debug("adding element cords to results: " + ele.Rectangle.ToString());
            foreach (var R in result)
            {
                var rect = new System.Drawing.Rectangle(R.Rectangle.X + ele.Rectangle.X, R.Rectangle.Y + ele.Rectangle.Y, R.Rectangle.Width, R.Rectangle.Height);
                R.Rectangle = rect;
                Log.Debug("Found: '" + R.Text + "' at " + R.Rectangle.ToString());
            }
            return(result);
        }