public Mat OtsuThresh(Mat mat)
        {
            Mat output = new Mat();

            Imgproc.Threshold(mat, output, 0, 255, Imgproc.ThreshBinary + Imgproc.ThreshOtsu);
            return(output);
        }
        public IEnumerable <Match> FindMatches(IPattern Template, double Similarity)
        {
            using var result = new DisposableMat();

            // max is used for tmccoeff
            Imgproc.MatchTemplate(Mat, (Template as DroidCvPattern)?.Mat, result.Mat, Imgproc.TmCcoeffNormed);

            Imgproc.Threshold(result.Mat, result.Mat, 0.1, 1, Imgproc.ThreshTozero);

            while (true)
            {
                using var minMaxLocResult = Core.MinMaxLoc(result.Mat);
                var score = minMaxLocResult.MaxVal;

                if (score >= Similarity)
                {
                    var loc    = minMaxLocResult.MaxLoc;
                    var region = new Region((int)loc.X, (int)loc.Y, Template.Width, Template.Height);

                    yield return(new Match(region, score));

                    using var mask = new DisposableMat();
                    // Flood fill eliminates the problem of nearby points to a high similarity point also having high similarity
                    const double floodFillDiff = 0.05;
                    Imgproc.FloodFill(result.Mat, mask.Mat, loc, new Scalar(0),
                                      new Rect(),
                                      new Scalar(floodFillDiff), new Scalar(floodFillDiff),
                                      0);
                }
                else
                {
                    break;
                }
            }
        }
        public DroidCvPattern(Stream Stream, bool MakeMask)
        {
            var buffer = new byte[Stream.Length];

            using var ms = new MemoryStream(buffer);
            Stream.CopyTo(ms);

            using var raw = new DisposableMat(new MatOfByte(buffer));

            if (MakeMask)
            {
                using var rgbaMat = new DisposableMat(Imgcodecs.Imdecode(raw.Mat, Imgcodecs.CvLoadImageUnchanged));

                Mat = new Mat();
                Imgproc.CvtColor(rgbaMat.Mat, Mat, Imgproc.ColorRgba2gray);

                Mask = new Mat();
                // Extract alpha channel
                Core.ExtractChannel(rgbaMat.Mat, Mask, 3);
                // Mask containing 0 or 255
                Imgproc.Threshold(Mask, Mask, 0, 255, Imgproc.ThreshBinary);
            }
            else
            {
                Mat = Imgcodecs.Imdecode(raw.Mat, Imgcodecs.CvLoadImageGrayscale);
            }
        }
Exemplo n.º 4
0
        /// <summary>
        /// Equalizes the histogram of an image and thresholds the result.
        /// </summary>
        /// <param name="src"> Image to eq & thresh </param>
        /// <param name="thresh"> thresh level </param>
        /// <param name="max"> max value to use </param>
        /// <returns> binary image (high pixels are in range) </returns>
        private Mat GetWhiteFromHistogramEq(Mat src, byte thresh, byte max)
        {
            Mat gray = new Mat(src.Size(), src.Type());

            Imgproc.CvtColor(src, gray, Imgproc.ColorBgr2gray);
            Imgproc.EqualizeHist(gray, gray);
            Imgproc.Threshold(gray, gray, thresh, max, Imgproc.ThreshBinary);
            return(gray);
        }
Exemplo n.º 5
0
        /// <summary>
        ///
        /// </summary>
        /// <param name="img"></param>
        /// <returns></returns>
        public static Bitmap greyImg(Bitmap img, double threshValue1, double blurValue)
        {
            //Matrix für das Bild
            Mat imgMat = new Mat();

            //Bild zu Matrix umwandeln
            Utils.BitmapToMat(img, imgMat);

            //-----------------Bild bearbeiten---------------------

            //Variablen
            //Size s = new Size(10.0, 10.0);
            Size s = new Size(blurValue, blurValue);

            OpenCV.Core.Point p = new OpenCV.Core.Point(0, 0);

            //TODO Matrix größe beachten?
            Bitmap bmp       = null;
            Mat    tmpgrey   = new Mat(10, 10, CvType.Cv8uc1, new Scalar(4));
            Mat    tmpblur   = new Mat(10, 10, CvType.Cv8uc1, new Scalar(4));
            Mat    tmpthresh = new Mat(10, 10, CvType.Cv8uc1, new Scalar(4));
            Mat    imgresult = new Mat(10, 10, CvType.Cv8uc1, new Scalar(4));

            try
            {
                //Grau
                Imgproc.CvtColor(imgMat, tmpgrey, Imgproc.ColorBgr2gray, 4);

                //Blur
                Imgproc.Blur(tmpgrey, tmpblur, s, p);

                //Thresh
                //Orginal
                //Imgproc.Threshold(tmpblur, tmpthresh, 90, 255, Imgproc.ThreshBinary);
                Imgproc.Threshold(tmpblur, tmpthresh, threshValue1, 255, Imgproc.ThreshBinary);

                //Kontrast
                //tmpthresh.ConvertTo(imgresult, -1, 9.0, 10);

                bmp = Bitmap.CreateBitmap(tmpthresh.Cols(), tmpthresh.Rows(), Bitmap.Config.Argb8888);
                Utils.MatToBitmap(tmpthresh, bmp);
            }
            catch (CvException e) { System.Console.WriteLine(e.Message); }


            return(bmp);
        }
Exemplo n.º 6
0
        public override bool OnOptionsItemSelected(IMenuItem item)
        {
            Log.Info(Tag, "Menu Item selected " + item);

            if (item == _itemPickPhoto)
            {
                var imageIntent = new Intent();
                imageIntent.SetType("image/*");
                imageIntent.SetAction(Intent.ActionGetContent);
                StartActivityForResult(Intent.CreateChooser(imageIntent, "Select photo"), 0);
            }
            else if (item == _itemGray)
            {
                // 灰度图
                _gray = new Mat(_raw.Width(), _raw.Height(), CvType.Cv8uc1);
                Imgproc.CvtColor(_raw, _gray, Imgproc.ColorRgb2gray);
                ShowImage(_gray);
            }
            else if (item == _itemThreshold)
            {
                // 二值化
                _threshold = new Mat(_image.Width, _image.Height, CvType.Cv8uc1);
                Imgproc.Threshold(_gray, _threshold, 168, 255, Imgproc.ThreshBinary);
                ShowImage(_threshold);
            }
            else if (item == _itemFindContours)
            {
                // 查找最大连通区域
                IList <MatOfPoint> contours = new JavaList <MatOfPoint>();
                Mat hierarchy = new Mat();
                var target    = _threshold.Clone();
                Imgproc.FindContours(target, contours, hierarchy, Imgproc.RetrExternal, Imgproc.ChainApproxNone);

                MatOfPoint max = new MatOfPoint();
                double     contour_area_max = 0;
                if (contours.Any())
                {
                    foreach (var contour in contours)
                    {
                        var contour_area_temp = Math.Abs(Imgproc.ContourArea(contour));
                        if (contour_area_temp > contour_area_max)
                        {
                            contour_area_max = contour_area_temp;
                            max = contour;
                        }
                    }
                }

                var last = new JavaList <MatOfPoint>();
                last.Add(max);

                Imgproc.DrawContours(_raw, last, -1, new Scalar(255, 0, 0));

                ShowImage(_raw);
            }
            else if (item == _itemCreateTrimap)
            {
                // 生成三元图  暂时先用生成的图替代
                var imageIntent = new Intent();
                imageIntent.SetType("image/*");
                imageIntent.SetAction(Intent.ActionGetContent);
                StartActivityForResult(Intent.CreateChooser(imageIntent, "Select photo"), 1);
            }
            else if (item == _itemSharedMatting)
            {
                // 扣图
                var sharedMatting = new SharedMatting();
                sharedMatting.SetImage(_raw);
                sharedMatting.SetTrimap(_trimap);
                sharedMatting.SolveAlpha();
            }

            return(base.OnOptionsItemSelected(item));
        }
Exemplo n.º 7
0
        public static async Task <string> detectAndExtractText(Bitmap img)
        {
            //Matrix für die Bilder
            Mat large = new Mat();
            Mat small = new Mat();
            Mat rgb   = new Mat();

            //Bild zu Matrix umwandeln
            Utils.BitmapToMat(img, large);

            // downsample and use it for processing
            Imgproc.PyrDown(large, rgb);

            //Grey
            Imgproc.CvtColor(rgb, small, Imgproc.ColorBgr2gray);

            //Gradiant
            Mat  grad        = new Mat();
            Size morphsize   = new Size(3.0, 3.0);
            Mat  morphKernel = Imgproc.GetStructuringElement(Imgproc.MorphEllipse, morphsize);

            Imgproc.MorphologyEx(small, grad, Imgproc.MorphGradient, morphKernel);

            //Binarize
            Mat bw = new Mat();

            Imgproc.Threshold(grad, bw, 0.0, 255.0, Imgproc.ThreshBinary | Imgproc.ThreshOtsu);

            // connect horizontally oriented regions
            Mat  connected   = new Mat();
            Size connectsize = new Size(9.0, 1.0);

            morphKernel = Imgproc.GetStructuringElement(Imgproc.MorphRect, connectsize);
            Imgproc.MorphologyEx(bw, connected, Imgproc.MorphClose, morphKernel);

            // find contours
            Mat mask = Mat.Zeros(bw.Size(), CvType.Cv8uc1);

            JavaList <MatOfPoint> contours = new JavaList <MatOfPoint>();
            Mat hierarchy = new Mat();

            OpenCV.Core.Point contourPoint = new OpenCV.Core.Point(0, 0);

            Imgproc.FindContours(connected, contours, hierarchy, Imgproc.RetrCcomp, Imgproc.ChainApproxSimple, contourPoint);

            Scalar zero        = new Scalar(0, 0, 0);
            Scalar contourscal = new Scalar(255, 255, 255);

            Scalar rectScalar = new Scalar(0, 255, 0);


            OpenCV.Core.Rect rect;
            Mat    maskROI;
            double r;

            double[] contourInfo;

            string resulttext = "";
            string part;

            Bitmap bmpOcr;
            Mat    croppedPart;


            for (int i = 0; i >= 0;)
            {
                rect = Imgproc.BoundingRect(contours[i]);

                maskROI = new Mat(mask, rect);
                maskROI.SetTo(zero);

                //fill the contour
                Imgproc.DrawContours(mask, contours, i, contourscal, Core.Filled);

                // ratio of non-zero pixels in the filled region
                r = (double)Core.CountNonZero(maskROI) / (rect.Width * rect.Height);

                /* assume at least 45% of the area is filled if it contains text */
                /* constraints on region size */

                /* these two conditions alone are not very robust. better to use something
                 * like the number of significant peaks in a horizontal projection as a third condition */
                if (r > .45 && (rect.Height > 8 && rect.Width > 8))
                {
                    //Imgproc.Rectangle(rgb, rect.Br(), rect.Tl(), rectScalar, 2);
                    try
                    {
                        croppedPart = rgb.Submat(rect);

                        bmpOcr = Bitmap.CreateBitmap(croppedPart.Width(), croppedPart.Height(), Bitmap.Config.Argb8888);
                        Utils.MatToBitmap(croppedPart, bmpOcr);

                        part = await OCR.getText(bmpOcr);

                        resulttext = resulttext + part;
                        Console.WriteLine("------------------Durchlauf-------------");
                    }
                    catch (Exception e)
                    {
                        Android.Util.Log.Debug("Fehler", "cropped part data error " + e.Message);
                    }
                }


                //Nächste Element bestimmen
                contourInfo = hierarchy.Get(0, i);
                i           = (int)contourInfo[0];
            }


            return(resulttext);
        }