Ejemplo n.º 1
0
        protected override void StartLoop(NativeActivityContext context)
        {
            // var match = Element.Get(context);
            var    wordlimit     = WordLimit.Get(context);
            var    lang          = Config.local.ocrlanguage;
            var    casesensitive = CaseSensitive.Get(context);
            string basepath      = Interfaces.Extensions.DataDirectory;
            string path          = System.IO.Path.Combine(basepath, "tessdata");

            ocr.TesseractDownloadLangFile(path, Config.local.ocrlanguage);
            ocr.TesseractDownloadLangFile(path, "osd");
            var ele = Element.Get(context);

            // ele.element.Save(@"c:\temp\dump.png", System.Drawing.Imaging.ImageFormat.Png);

            // var result = ocr.GetTextcomponents(path, Config.local.ocrlanguage, ele.element);
            // var result = ocr.GetTextcomponents(path, Config.local.ocrlanguage, @"c:\temp\dump.png");

            ImageElement[] result;
            var            _ocr = new Emgu.CV.OCR.Tesseract(path, lang.ToString(), Emgu.CV.OCR.OcrEngineMode.TesseractLstmCombined);

            _ocr.Init(path, lang.ToString(), Emgu.CV.OCR.OcrEngineMode.TesseractLstmCombined);
            _ocr.PageSegMode = Emgu.CV.OCR.PageSegMode.SparseText;

            // OpenRPA.Interfaces.Image.Util.SaveImageStamped(ele.element, "OCR");
            Bitmap sourceimg = null;

            if (ele is ImageElement)
            {
                sourceimg = ((ImageElement)ele).element;
            }
            else
            {
                sourceimg = Interfaces.Image.Util.Screenshot(ele.Rectangle.X, ele.Rectangle.Y, ele.Rectangle.Width, ele.Rectangle.Height);
            }
            using (var img = new Emgu.CV.Image <Emgu.CV.Structure.Bgr, byte>(sourceimg))
            {
                result = ocr.OcrImage2(_ocr, img.Mat, wordlimit, casesensitive);
            }
            Log.Debug("adding element cords to results: " + ele.Rectangle.ToString());
            foreach (var R in result)
            {
                var rect = new System.Drawing.Rectangle(R.Rectangle.X + ele.Rectangle.X, R.Rectangle.Y + ele.Rectangle.Y, R.Rectangle.Width, R.Rectangle.Height);
                R.Rectangle = rect;
                Log.Debug("Found: '" + R.Text + "' at " + R.Rectangle.ToString());
            }
            context.SetValue(Result, result);

            IEnumerator <ImageElement> _enum = result.ToList().GetEnumerator();

            context.SetValue(_elements, _enum);
            bool more = _enum.MoveNext();

            if (more)
            {
                IncIndex(context);
                SetTotal(context, result.Length);
                context.ScheduleAction(Body, _enum.Current, OnBodyComplete);
            }
        }
Ejemplo n.º 2
0
        protected override void Execute(NativeActivityContext context)
        {
            // var match = Element.Get(context);
            var    wordlimit     = WordLimit.Get(context);
            var    lang          = Config.local.ocrlanguage;
            var    casesensitive = CaseSensitive.Get(context);
            var    isfrontside   = IsFrontSide.Get(context);
            string basepath      = Interfaces.Extensions.DataDirectory;
            string path          = System.IO.Path.Combine(basepath, "tessdata");
            //ocr.TesseractDownloadLangFile(path, Config.local.ocrlanguage);
            //ocr.TesseractDownloadLangFile(path, "osd");
            var ele = Element.Get(context);

            // ele.element.Save(@"c:\temp\dump.png", System.Drawing.Imaging.ImageFormat.Png);

            // var result = ocr.GetIdTextcomponents(path, Config.local.ocrlanguage, ele.element);
            // var result = ocr.GetIdTextcomponents(path, Config.local.ocrlanguage, @"c:\temp\dump.png");

            ImageElement[] result;
            // 百度OCR
            var _ocr = new Baidu.Aip.Ocr.Ocr(ocr.API_KEY, ocr.SECRET_KEY);

            _ocr.Timeout = 60000;//设置超时时间
            //_ocr.Init(path, lang.ToString(), Emgu.CV.OCR.OcrEngineMode.TesseractLstmCombined);
            //_ocr.PageSegMode = Emgu.CV.OCR.PageSegMode.SparseText;

            // OpenRPA.Interfaces.Image.Util.SaveImageStamped(ele.element, "OCR");
            Bitmap sourceimg = null;

            if (ele is ImageElement)
            {
                // 传入的是图片
                sourceimg = ((ImageElement)ele).element;
            }
            else
            {
                // 传入非图片,开始截图
                sourceimg = Interfaces.Image.Util.Screenshot(ele.Rectangle.X, ele.Rectangle.Y, ele.Rectangle.Width, ele.Rectangle.Height);
            }
            String idCardSide;

            if (isfrontside)
            {
                idCardSide = "front";
            }
            else
            {
                idCardSide = "back";
            }
            MemoryStream ms = new MemoryStream();

            sourceimg.Save(ms, System.Drawing.Imaging.ImageFormat.Bmp);
            byte[] imageBytes = ms.GetBuffer();
            ms.Close();

            var word_results = _ocr.Idcard(imageBytes, idCardSide);

            context.SetValue(Result, word_results["words_result"]);

            var image_status = word_results["image_status"].ToString();

            context.SetValue(OcrStatus, image_status);

            //IEnumerator<ImageElement> _enum = result.ToList().GetEnumerator();
            //context.SetValue(_elements, _enum);
            //bool more = _enum.MoveNext();
            //if (more)
            //{
            //    context.ScheduleAction(Body, _enum.Current, OnBodyComplete);
            //}
        }