Exemple #1
0
        public void Run()
        {
            Mat gray    = new Mat(@"G:\其他项目\YuZe.ColoredPencil\YuZe.ColoredPencil\P10424-172645.jpg", ImreadModes.Grayscale);
            Mat binary  = new Mat();
            Mat dilate1 = new Mat();
            Mat dilate2 = new Mat();

            byte[] kernelValues = { 0, 1, 0, 1, 1, 1, 0, 1, 0 }; // cross (+)
            Mat    kernel       = new Mat(3, 3, MatType.CV_8UC1, kernelValues);

            // Binarize
            Cv2.Threshold(gray, binary, 0, 255, ThresholdTypes.Otsu);

            // empty kernel
            Cv2.Dilate(binary, dilate1, null);
            // + kernel
            Cv2.Dilate(binary, dilate2, kernel);

            using (new Window("binary", binary, WindowFlags.Normal))
                using (new Window("dilate (kernel = null)", dilate1, WindowFlags.Normal))
                    using (new Window("dilate (kernel = +)", dilate2, WindowFlags.Normal))

                        Cv2.WaitKey(0);
            Cv2.DestroyAllWindows();
        }
Exemple #2
0
 private void mnuFilterMorphologyDilate_Click(object sender, EventArgs e)
 {
     // 膨張
     Cv2.Dilate(_matDisp, _matDisp, null);
     // 画像の描画
     DrawMatImage(_matDisp);
 }
Exemple #3
0
        private static void testBuiltinFilters()
        {
            using (var src = new Mat(@"..\..\Images\Car.jpg", ImreadModes.AnyDepth | ImreadModes.AnyColor))
            {
                using (var dst = new Mat())
                {
                    src.CopyTo(dst);

                    using (new Window("src", image: src))
                    {
                        Cv2.Erode(src, dst, new Mat());
                        using (new Window("Erode", image: dst))
                        {
                            Cv2.Dilate(src, dst, new Mat());
                            using (new Window("Dilate", image: dst))
                            {
                                Cv2.BitwiseNot(src, dst);
                                using (new Window("Invert", image: dst))
                                {
                                    Cv2.WaitKey();
                                }
                            }
                        }
                    }
                }
            }
        }
 void Dilate(Mat image, int number)
 {
     for (int i = 0; i < number; i++)
     {
         Cv2.Dilate(image, image, new Mat());
     }
 }
        System.Drawing.Bitmap Decaptcha(BlockOcr blockOcr, BotData data)
        {
            using (var captcha = blockOcr.Base64ImageDecoder(ReplaceValues(Base64Captcha, data)))
            {
                using (var src = captcha.ToMat())
                {
                    using (var binaryMask = new Mat())
                    {
                        var linesColor = Scalar.FromRgb(0x74, 0x74, 0x74);

                        Cv2.InRange(src, linesColor, linesColor, binaryMask);

                        using (var masked = new Mat())
                        {
                            src.CopyTo(masked, binaryMask);

                            int linesDilate = 3;
                            using (var element = Cv2.GetStructuringElement(MorphShapes.Ellipse, new Size(linesDilate, linesDilate)))
                            {
                                Cv2.Dilate(masked, masked, element);
                            }

                            try
                            {
                                Cv2.CvtColor(masked, masked, ColorConversionCodes.RGB2GRAY);
                            }
                            catch { }

                            using (var dst = src.EmptyClone())
                            {
                                Cv2.Inpaint(src, masked, dst, 3, InpaintMethod.NS);

                                linesDilate = 2;
                                using (var element = Cv2.GetStructuringElement(MorphShapes.Ellipse, new Size(linesDilate, linesDilate)))
                                {
                                    Cv2.Dilate(dst, dst, element);
                                }

                                Cv2.GaussianBlur(dst, dst, new Size(5, 5), 0);

                                using (var dst2 = dst.BilateralFilter(5, 75, 75))
                                {
                                    try
                                    {
                                        Cv2.CvtColor(dst2, dst2, ColorConversionCodes.RGB2GRAY);
                                    }
                                    catch { }

                                    Cv2.Threshold(dst2, dst2, 255, 255, ThresholdTypes.Otsu);

                                    Cv2.FastNlMeansDenoising(dst2, dst2, 44);

                                    return(dst2.ToBitmap().Clone() as System.Drawing.Bitmap);
                                }
                            }
                        }
                    }
                }
            }
        }
Exemple #6
0
        static void Main(string[] args)
        {
            Mat src    = Cv2.ImRead("card.jpg");
            Mat gray   = new Mat();
            Mat binary = new Mat();
            Mat morp   = new Mat();
            Mat canny  = new Mat();
            Mat dst    = src.Clone();

            Mat kernel = Cv2.GetStructuringElement(MorphShapes.Rect, new Size(3, 3));

            Cv2.CvtColor(src, gray, ColorConversionCodes.BGR2GRAY);
            Cv2.Threshold(gray, binary, 150, 255, ThresholdTypes.Binary);
            Cv2.Dilate(binary, morp, kernel, new Point(-1, -1));
            Cv2.Erode(morp, morp, kernel, new Point(-1, -1), 3);
            Cv2.Dilate(morp, morp, kernel, new Point(-1, -1), 2);
            Cv2.Canny(morp, canny, 0, 0, 3);

            LineSegmentPoint[] lines = Cv2.HoughLinesP(canny, 1, Cv2.PI / 180, 140, 50, 10);

            for (int i = 0; i < lines.Length; i++)
            {
                Cv2.Line(dst, lines[i].P1, lines[i].P2, Scalar.Yellow, 2);
            }

            Cv2.ImShow("dst", dst);
            Cv2.WaitKey(0);
            Cv2.DestroyAllWindows();
        }
Exemple #7
0
        public List <Rect> FindBounds(Mat mask)
        {
            List <Rect> result = new List <Rect>();

            Cv2.Erode(mask, mask, new Mat());
            Cv2.Dilate(mask, mask, new Mat());
            Point[][]        contours;         //vector<vector<Point>> contours;
            HierarchyIndex[] hierarchyIndexes; //vector<Vec4i> hierarchy;
            Cv2.FindContours(
                mask,
                out contours,
                out hierarchyIndexes,
                ContourRetrieval.External,
                ContourChain.ApproxSimple
                );
            var contourIndex = 0;

            while ((contourIndex >= 0) && contours.Length != 0)
            {
                var contour = contours[contourIndex];

                var boundingRect = Cv2.BoundingRect(contour); //Find bounding rect for each contour
                result.Add(boundingRect);
                contourIndex = hierarchyIndexes[contourIndex].Next;
            }
            return(result);
        }
Exemple #8
0
        public Mat BarcodeRegion(Mat src_)
        {
            //Cv2.Resize(src, src, new Size(src.Size().Width / 2, src.Size().Height / 2));
            Mat src = src_.Clone();

            Cv2.CvtColor(src, src, ColorConversionCodes.RGB2GRAY);
            Cv2.GaussianBlur(src, src, new Size(3, 3), 0);
            Mat img_X = new Mat();
            Mat img_Y = new Mat();

            Cv2.Sobel(src, img_X, MatType.CV_16S, 1, 0);
            Cv2.Sobel(src, img_Y, MatType.CV_16S, 0, 1);

            Cv2.ConvertScaleAbs(img_X, img_X, 1, 0);
            Cv2.ConvertScaleAbs(img_Y, img_Y, 1, 0);

            Mat margin = img_X - img_Y;

            //Cv2.ImShow("img_Y", margin);
            //Cv2.WaitKey();
            Cv2.Resize(margin, margin, new Size(margin.Width * 0.3, margin.Height * 1.5), 0, 0, InterpolationFlags.Area);
            Cv2.Blur(margin, margin, new Size(3, 3));
            Cv2.MedianBlur(margin, margin, 3);

            Mat imgthreshold = new Mat();

            Cv2.Threshold(margin, imgthreshold, 80, 255, ThresholdTypes.Binary);
            //Cv2.AdaptiveThreshold(margin, imgthreshold, 255, AdaptiveThresholdTypes.GaussianC, ThresholdTypes.Binary, 3, -1);
            Cv2.ImShow("thresh", imgthreshold);
            Cv2.WaitKey();

            //先在水平方向上膨胀,填充条码中间的空隙
            Mat element = Cv2.GetStructuringElement(MorphShapes.Cross, new Size(5, 1));

            Cv2.MorphologyEx(imgthreshold, imgthreshold, MorphTypes.Dilate, element);
            //在垂直方向上腐蚀,分离条码和字符
            element = Cv2.GetStructuringElement(MorphShapes.Cross, new Size(1, 5));
            Cv2.MorphologyEx(imgthreshold, imgthreshold, MorphTypes.Erode, element);

            //去除字符
            element = Cv2.GetStructuringElement(MorphShapes.Cross, new Size(10, 10));
            Cv2.MorphologyEx(imgthreshold, imgthreshold, MorphTypes.Open, element);
            Cv2.MorphologyEx(imgthreshold, imgthreshold, MorphTypes.Close, element);


            element = Cv2.GetStructuringElement(MorphShapes.Cross, new Size(10, 10));
            Cv2.Erode(imgthreshold, imgthreshold, element);
            Cv2.Erode(imgthreshold, imgthreshold, element);
            Cv2.Dilate(imgthreshold, imgthreshold, element);
            Cv2.Resize(imgthreshold, imgthreshold, new Size(src.Width, src.Height), 0, 0, InterpolationFlags.Area);
            Cv2.ImShow("thresh", imgthreshold);
            Cv2.WaitKey();

            return(imgthreshold);


            //计算每个区域的最大内接矩,然后算其包含图像的黑白区域比例

            //Cv2.Dilate(imgthreshold, imgthreshold, element);
        }
        //分水岭分割函数封装
        private Mat waterShed(Mat src, int MEADIANBlUR_KSIZE, Size ELEMENT_SIZE)
        {
            var imageGray   = new Mat();
            var thresh      = new Mat();
            var fg          = new Mat();
            var bgt         = new Mat();
            var bg          = new Mat();
            var marker      = new Mat();
            var marker32    = new Mat();
            var m           = new Mat();
            var res         = new Mat();
            var threshOpen  = new Mat();
            var threshClose = new Mat();

            Cv2.CvtColor(src, imageGray, ColorConversionCodes.BGR2GRAY);
            Cv2.EqualizeHist(imageGray, imageGray);                  //直方图均衡化
            Cv2.MedianBlur(imageGray, imageGray, MEADIANBlUR_KSIZE); //中值滤波
            Cv2.Threshold(imageGray, thresh, 0, 255, ThresholdTypes.Otsu);
            Cv2.Erode(thresh, fg, 0, null, 2);
            Cv2.Dilate(thresh, bgt, 0, null, 3);
            Cv2.Threshold(bgt, bg, 1, 128, ThresholdTypes.BinaryInv);
            marker = fg + bg;
            marker.ConvertTo(marker32, MatType.CV_32SC1);
            Cv2.Watershed(src, marker32);
            Cv2.ConvertScaleAbs(marker32, m);
            Cv2.Threshold(m, thresh, 0, 255, ThresholdTypes.Otsu);
            var element = Cv2.GetStructuringElement(MorphShapes.Rect, ELEMENT_SIZE); //获取自定义核

            Cv2.MorphologyEx(thresh, threshOpen, MorphTypes.Open, element);          //开运算
            Cv2.MorphologyEx(threshOpen, threshClose, MorphTypes.Close, element);    //闭运算;
            Cv2.BitwiseAnd(src, src, res, threshClose);
            return(res);
        }
Exemple #10
0
        static void Main(string[] args)
        {
            Mat src   = Cv2.ImRead("colorball.png");
            Mat image = new Mat();
            Mat dst   = src.Clone();

            Mat kernel = Cv2.GetStructuringElement(MorphShapes.Rect, new Size(3, 3));

            Cv2.CvtColor(src, image, ColorConversionCodes.BGR2GRAY);
            Cv2.Dilate(image, image, kernel, new Point(-1, -1), 3);
            Cv2.GaussianBlur(image, image, new Size(13, 13), 3, 3, BorderTypes.Reflect101);
            Cv2.Erode(image, image, kernel, new Point(-1, -1), 3);

            CircleSegment[] circles = Cv2.HoughCircles(image, HoughMethods.Gradient, 1, 100, 100, 35, 0, 0);

            for (int i = 0; i < circles.Length; i++)
            {
                Point center = new Point(circles[i].Center.X, circles[i].Center.Y);

                Cv2.Circle(dst, center, (int)circles[i].Radius, Scalar.White, 3);
                Cv2.Circle(dst, center, 5, Scalar.AntiqueWhite, Cv2.FILLED);
            }

            Cv2.ImShow("dst", dst);
            Cv2.WaitKey(0);
            Cv2.DestroyAllWindows();
        }
Exemple #11
0
        private IEnumerable <IEnumerable <Mat> > SplitImages(Mat image)
        {
            var dilate  = new Mat();
            var element = Cv2.GetStructuringElement(StructuringElementShape.Rect, new Size(image.Width / 40, 1));

            Cv2.Dilate(image.Threshold(127, 255, ThresholdType.BinaryInv), dilate, element);

            HierarchyIndex[] hierarchyIndices;
            Point[][]        contours;
            Cv2.FindContours(dilate, out contours, out hierarchyIndices, ContourRetrieval.External, ContourChain.ApproxNone);

            var contourRows = SortToRows(contours);

            var imageRows = contourRows.Select(r => r.Select(c =>
            {
                var img = new Mat();
                Cv2.CopyMakeBorder(img[Cv2.BoundingRect(c)], img, 10, 10, 10, 10, BorderType.Constant, Scalar.White);

                // using (new Window("image", segment)) { Cv2.WaitKey(); }

                return(img);
            }));

            return(imageRows);
        }
        /// <summary>
        /// First operation handling captcha, convert image to grayscale, remove all little noises, make it b&w.
        /// </summary>
        /// <param name="captchaBmp">Bitmap of captcha</param>
        /// <param name="isGenerateLocalCopies">Turn this on if you want to save captcha files that processed to see the steps</param>
        /// <returns>Bitmap of processed captcha</returns>
        private Bitmap ConvertCaptchaToBlackAndWhite(Bitmap captchaBmp, bool isGenerateLocalCopies = false)
        {
            // load the file
            Mat captchaMat = BitmapConverter.ToMat(captchaBmp);

            using (var src = captchaMat)
            {
                using (var binaryMask = new Mat())
                {
                    // lines color is different than text
                    var linesColor = Scalar.FromRgb(0x70, 0x70, 0x70);

                    // build a mask of lines
                    Cv2.InRange(src, linesColor, linesColor, binaryMask);
                    using (var masked = new Mat())
                    {
                        // build the corresponding image
                        // dilate lines a bit because aliasing may have filtered borders too much during masking
                        src.CopyTo(masked, binaryMask);
                        int linesDilate = 3;
                        using (var element = Cv2.GetStructuringElement(MorphShapes.Ellipse, new OpenCvSharp.Size(linesDilate, linesDilate)))
                        {
                            Cv2.Dilate(masked, masked, element);
                        }

                        // convert mask to grayscale
                        Cv2.CvtColor(masked, masked, ColorConversionCodes.BGR2GRAY);
                        using (var dst = src.EmptyClone())
                        {
                            // repaint big lines
                            Cv2.Inpaint(src, masked, dst, 3, InpaintMethod.NS);

                            // destroy small lines
                            linesDilate = 2;
                            using (var element = Cv2.GetStructuringElement(MorphShapes.Ellipse, new OpenCvSharp.Size(linesDilate, linesDilate)))
                            {
                                Cv2.Dilate(dst, dst, element);
                            }

                            Cv2.GaussianBlur(dst, dst, new OpenCvSharp.Size(5, 5), 0);
                            using (var dst2 = dst.BilateralFilter(5, 75, 75))
                            {
                                // basically make it B&W
                                Cv2.CvtColor(dst2, dst2, ColorConversionCodes.BGR2GRAY);
                                Cv2.Threshold(dst2, dst2, 255, 255, ThresholdTypes.Otsu);

                                if (isGenerateLocalCopies)
                                {
                                    dst2.SaveImage(Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location) + "\\blackandwhite.png");
                                }

                                // save the file
                                return(dst2.ToBitmap());
                            }
                        }
                    }
                }
            }
        }
Exemple #13
0
        /// <summary>
        /// imageにiteration回膨張処理を施す
        /// </summary>
        public static Bitmap DilateImage(Bitmap image, int iteration)
        {
            var mat         = new Mat();
            var neiborhood8 = new Mat(new OpenCvSharp.Size(3, 3), MatType.CV_8U);

            Cv2.Dilate(image.ToMat(), mat, null, null, iteration);
            return(mat.ToBitmap());
        }
 /// <summary>
 /// 膨胀
 /// </summary>
 /// <param name="image">图片对象</param>
 /// <returns></returns>
 public static Image Swell(this Image image)
 {
     using (Mat src = new Bitmap(image).ToMat())
     {
         Cv2.Dilate(src, src, new Mat());
         return(src.ToBitmap());
     }
 }
        private List <Rect> DetectCars(Mat frame, BackgroundSubtractorMOG bgSubtractor, bool groupRectangles, bool isDebug = true)
        {
            Mat fgMask = new Mat();

            //get the mask of what changed.  Everything unchanged will be black
            bgSubtractor.Apply(frame, fgMask);

            //pad detected changes to a minimum size of 10x10
            var kernel  = Cv2.GetStructuringElement(MorphShapes.Rect, new Size(10, 10));
            var closing = new Mat();
            var opening = new Mat();

            Cv2.MorphologyEx(fgMask, closing, MorphTypes.Close, kernel);
            Cv2.MorphologyEx(closing, opening, MorphTypes.Open, kernel);
            Cv2.Dilate(opening, fgMask, kernel);

            var cars      = new List <Rect>();
            var hierarchy = new Mat();

            Cv2.FindContours(fgMask, out Mat[] contours, hierarchy, RetrievalModes.External, ContourApproximationModes.ApproxTC89L1);
            foreach (var contour in contours)
            {
                var boundingRect = Cv2.BoundingRect(contour);
                //TODO:  Add to config
                if (boundingRect.Width < 130 ||
                    boundingRect.Height < 40 ||
                    boundingRect.Bottom < 120 ||
                    boundingRect.Height > 210)
                {
                    continue;
                }

                cars.Add(boundingRect);
            }

            //group rectangles together so you don't rectangles inside of rectangles
            if (groupRectangles)
            {
                var duplicateCars = new List <Rect>(cars);
                duplicateCars.AddRange(cars);

                Cv2.GroupRectangles(duplicateCars, 1, 1);
                cars = duplicateCars;
            }

            if (isDebug)
            {
                foreach (var boundingRect in cars)
                {
                    Cv2.Rectangle(frame, boundingRect, Scalar.Blue, 5);
                }

                //Cv2.ImShow("Mask", fgMask);
                //Cv2.WaitKey(1);
            }

            return(cars);
        }
    /// <summary>
    /// Called each time the Vuforia state is updated
    /// Tracks the given color and sets the position and rotation of the spraycan
    /// </summary>
    void OnTrackablesUpdated()
    {
        if (mFormatRegistered)
        {
            if (mAccessCameraImage)
            {
                Vuforia.Image image = CameraDevice.Instance.GetCameraImage(mPixelFormat);   // get the current camera image in the given pixel format

                if (image != null)
                {
                #if UNITY_EDITOR
                    inputMat = new Mat(image.Height, image.Width, MatType.CV_8UC1, image.Pixels);
                #else
                    inputMat = new Mat(image.Height, image.Width, MatType.CV_8UC3, image.Pixels);   // store the image's pixels in an OpenCV mat
                #endif

                    Cv2.Resize(inputMat, smallMat, new Size(480, 270));                                                                       // resizing for performance reasons (keep aspect ratio!)
                    Cv2.GaussianBlur(smallMat, blurredMat, new Size(11, 11), 0);                                                              // blur image to reduce noise
                    Cv2.CvtColor(blurredMat, hsvMat, ColorConversionCodes.RGB2HSV);                                                           // convert to HSV colors
                    Cv2.InRange(hsvMat, lowerHSVColor, upperHSVColor, thresholdMat);                                                          // filter out all pixels matching the given HSV range

                    Cv2.Erode(thresholdMat, thresholdMat, Cv2.GetStructuringElement(MorphShapes.Ellipse, new Size(3, 3)), null, 2);           // shave off pixels from blobs to eliminate small blobs
                    Cv2.Dilate(thresholdMat, thresholdMat, Cv2.GetStructuringElement(MorphShapes.Ellipse, new Size(3, 3)), null, 2);          // strengthen the remaining blobs

                    Cv2.FindContours(thresholdMat, out contours, hierarchy, RetrievalModes.External, ContourApproximationModes.ApproxSimple); // detect the blobs and save them as contours

                    if (contours.Length > 0)
                    {
                        Mat contour = contours.Aggregate((i, j) => i.ContourArea() > j.ContourArea() ? i : j);  // find the blob with the biggest ContourArea/Size

                        Point2f point;
                        float   radius;
                        Cv2.MinEnclosingCircle(contour, out point, out radius); // get the radius for passing a final threshold

                        if (radius > 5)
                        {
                            Moments moments = Cv2.Moments(contour); // use moments to calculate the center point of the biggest blob
                            double  area    = moments.M00;
                            double  m01     = moments.M01;
                            double  m10     = moments.M10;

                            double posX = m10 / area;
                            double posY = m01 / area;

                            double rotX = MapValue(posX, 0, 480, -31.5, 31.5);  // map the values to match coordinates usable in Unity
                            double rotY = MapValue(posY, 0, 270, -19.75, 19.75);

                            posX = MapValue(posX, 0, 480, -6, 6);
                            posY = MapValue(posY, 0, 270, 3.5, -3.5);

                            this.transform.localPosition    = new Vector3((float)posX, (float)posY, 10); // apply the changes to position and rotation
                            this.transform.localEulerAngles = new Vector3((float)rotY, (float)rotX, 0);
                        }
                    }
                }
            }
        }
    }
Exemple #17
0
        private void loadImage()
        {
            using (var iplImage = new Mat(@"..\..\Images\Penguin.png", ImreadModes.AnyDepth | ImreadModes.AnyColor))
            {
                Cv2.Dilate(iplImage, iplImage, new Mat());

                Image1.Source = iplImage.ToWriteableBitmap(PixelFormats.Bgr24);
            }
        }
Exemple #18
0
        private static void PreProcessing()
        {
            //Cv2.GaussianBlur(_mask, _mask, new Size(4, 4), 8, 6);
            Cv2.Erode(_mask, _mask, null);
            //Cv2.Erode(_mask, _mask, null);

            Cv2.Dilate(_mask, _mask, null);
            //Cv2.Dilate(_mask, _mask, null);
        }
Exemple #19
0
        // PreProcessing Part
        private Bitmap PreProcessing(Mat img)
        {
            var element = Cv2.GetStructuringElement(MorphShapes.Rect, new OpenCvSharp.Size(5, 5), new OpenCvSharp.Point(0, 0));

            Cv2.Threshold(img, img, 100, 255, ThresholdTypes.Binary);
            Cv2.Erode(img, img, element);
            Cv2.Dilate(img, img, element);

            return(OpenCvSharp.Extensions.BitmapConverter.ToBitmap(img));
        }
        public static void soften(ref Mat input_image)
        {
            Cv2.MedianBlur(input_image, input_image, 7);

            OpenCvSharp.Point morph_point = new OpenCvSharp.Point(1, 1);
            Mat erodeElement  = Cv2.GetStructuringElement(shape: MorphShapes.Rect, new OpenCvSharp.Size(3, 3));
            Mat dilateElement = Cv2.GetStructuringElement(shape: MorphShapes.Rect, new OpenCvSharp.Size(3, 3));

            Cv2.Erode(input_image, input_image, erodeElement, morph_point, 5, borderType: BorderTypes.Reflect);
            Cv2.Dilate(input_image, input_image, dilateElement, morph_point, 5, borderType: BorderTypes.Reflect);
        }
Exemple #21
0
        private async Task CaptureLoop(CancellationToken cancelToken)
        {
            byte[] dilateArray =
            { 1,     1, 1, 1, 1,
              1, 1, 1, 1, 1,
              1, 1, 1, 1, 1,
              1, 1, 1, 1, 1,
              1, 1, 1, 1, 1, };

            Texture2D stencilViewTexture = new Texture2D(width, height);
            Texture2D rgbViewTexture     = new Texture2D(width, height);
            Texture2D inpaintViewTexture = new Texture2D(width, height);

            HumanStencil_Image.texture = stencilViewTexture;
            RGB_Image.texture          = rgbViewTexture;
            Inpaint_Image.texture      = inpaintViewTexture;

            while (!cancelToken.IsCancellationRequested)
            {
                await Task.Delay(10);

                if (RGB_Texture == null || Stencil_Texture == null)
                {
                    continue;
                }

                using (Mat stencilMat = OpenCvSharp.Unity.TextureToMat(Stencil_Texture))
                    using (Mat rgbMat = OpenCvSharp.Unity.TextureToMat(RGB_Texture))
                        using (Mat inpaintMat = new Mat())
                        {
                            #region stencil texture
                            Cv2.CvtColor(stencilMat, stencilMat, ColorConversionCodes.BGR2GRAY);
                            Cv2.Dilate(stencilMat, stencilMat, InputArray.Create(dilateArray));
                            Cv2.Resize(stencilMat, stencilMat, new OpenCvSharp.Size(width, height));
                            stencilViewTexture = OpenCvSharp.Unity.MatToTexture(stencilMat, stencilViewTexture);
                            #endregion

                            #region rgb texture
                            Cv2.Resize(rgbMat, rgbMat, new OpenCvSharp.Size(width, height));
                            Cv2.Flip(rgbMat, rgbMat, FlipMode.Y);
                            rgbViewTexture = OpenCvSharp.Unity.MatToTexture(rgbMat, rgbViewTexture);
                            #endregion

                            #region inpaint
                            Cv2.Inpaint(rgbMat, stencilMat, inpaintMat, 3, InpaintMethod.NS);
                            inpaintViewTexture = OpenCvSharp.Unity.MatToTexture(inpaintMat, inpaintViewTexture);
                            #endregion

                            stencilMat.Dispose();
                            rgbMat.Dispose();
                            inpaintMat.Dispose();
                        }
            }
        }
Exemple #22
0
        public List <Mat> ProcessPuzzle(Mat unwarpedPuzzle)
        {
            Cv2.Erode(unwarpedPuzzle, horzSubtract, kernelHorz);
            Cv2.Dilate(horzSubtract, horzSubtract, kernelHorz);

            Cv2.Erode(unwarpedPuzzle, vertSubtract, kernelVert);
            Cv2.Dilate(vertSubtract, vertSubtract, kernelVert);

            unwarpedSudoku = unwarpedSudoku - (horzSubtract + vertSubtract);

            return(getIndividualBoxes(unwarpedSudoku));
        }
Exemple #23
0
        static void Main(string[] args)
        {
            Mat src = Cv2.ImRead("dandelion.jpg", ImreadModes.Grayscale);
            Mat dst = new Mat();

            Mat kernel = Cv2.GetStructuringElement(MorphShapes.Cross, new Size(7, 7));

            Cv2.Dilate(src, dst, kernel, new Point(-1, -1), 3, BorderTypes.Reflect101, new Scalar(0));

            Cv2.ImShow("dst", dst);
            Cv2.WaitKey(0);
            Cv2.DestroyAllWindows();
        }
        private Mat morphOps(Mat thresh, int x)
        {
            //create structuring element that will be used to "dilate"
            Mat erodeElement  = Cv2.GetStructuringElement(MorphShapes.Rect, new OpenCvSharp.Size(1, 1));
            Mat dilateElement = Cv2.GetStructuringElement(MorphShapes.Rect, new OpenCvSharp.Size(x, x));

            Cv2.Erode(thresh, thresh, erodeElement);
            Cv2.Dilate(thresh, thresh, dilateElement);
            Cv2.Dilate(thresh, thresh, dilateElement);
            Cv2.Erode(thresh, thresh, dilateElement);
            Cv2.Erode(thresh, thresh, dilateElement);

            return(thresh);
        }
Exemple #25
0
        private static Bitmap PrepareImage(Bitmap image)
        {
            //Mat src = new Mat(@"D:\tesseract4\docs\tables\balans_1kv_2013_21.jpg", ImreadModes.GrayScale);
            //var src = Cv2.ImRead(@"D:\tesseract4\docs\tables\balans_1kv_2013_21.jpg");
            var gray = image.ToGrayscaleMat();

            var bw = new Mat();

            Cv2.AdaptiveThreshold(~gray, bw, 256, AdaptiveThresholdTypes.MeanC, ThresholdTypes.Binary, 25, -2);

            var horizontal = bw.Clone();
            var vertical   = bw.Clone();
            var scale      = 15;

            var horizontalSize      = horizontal.Cols / scale;
            var verticalSize        = vertical.Rows / scale;
            var horizontalStructure = Cv2.GetStructuringElement(MorphShapes.Rect, new OpenCvSharp.Size(horizontalSize, 1));
            var verticalStructure   = Cv2.GetStructuringElement(MorphShapes.Rect, new OpenCvSharp.Size(1, verticalSize));

            Cv2.Erode(horizontal, horizontal, horizontalStructure, new OpenCvSharp.Point(-1, -1));
            Cv2.Dilate(horizontal, horizontal, horizontalStructure, new OpenCvSharp.Point(-1, -1));

            Cv2.Erode(vertical, vertical, verticalStructure, new OpenCvSharp.Point(-1, -1));
            Cv2.Dilate(vertical, vertical, verticalStructure, new OpenCvSharp.Point(-1, -1));

            //SaveImage(vertical, "vertical");

            //Cv2.Canny(src, dst, 50, 200);

            //using (new Window(horizontal))
            //{
            //  Cv2.WaitKey();
            //}
            var mask = horizontal + vertical;
            //SaveImage(mask, "mask");
            var newMask = new Mat();

            Cv2.AdaptiveThreshold(~mask, newMask, 256, AdaptiveThresholdTypes.MeanC, ThresholdTypes.Binary, 7, -2);
            //SaveImage(newMask, "newMask");

            newMask = mask + newMask;

            SaveImage(newMask, "hyperMask");

            //var withOutTable = gray + newMask;

            //SaveImage(withOutTable, "withOutTable");

            return(BitmapConverter.ToBitmap(mask));
        }
Exemple #26
0
        private static void MorphOps(Mat thresh)
        {
            //create structuring element that will be used to "dilate" and "erode" image.
            //the element chosen here is a 3px by 3px rectangle
            var erodeElement = Cv2.GetStructuringElement(MorphShapes.Rect, new Size(3, 3));
            //dilate with larger element so make sure object is nicely visible
            var dilateElement = Cv2.GetStructuringElement(MorphShapes.Rect, new Size(8, 8));

            Cv2.Erode(thresh, thresh, erodeElement);
            Cv2.Erode(thresh, thresh, erodeElement);

            Cv2.Dilate(thresh, thresh, dilateElement);
            Cv2.Dilate(thresh, thresh, dilateElement);
        }
Exemple #27
0
    void CamUpdate()
    {
        CvUtil.GetWebCamMat(webCamTexture, ref mat);

        mog2.Apply(mat, fg, 0.05f);
        Cv2.GaussianBlur(fg, fg, new Size(21, 21), 0);
        Cv2.Threshold(fg, fg, 30, 255, ThresholdTypes.Binary);
        Cv2.Dilate(fg, fg, nm, default(Point?), 2);
        Cv2.CvtColor(fg, fg, ColorConversionCodes.GRAY2BGRA);
        Cv2.Add(mat, fg, fg);

        CvConvert.MatToTexture2D(fg, ref tex);
        rawImage.texture = tex;
    }
Exemple #28
0
        private int[] CalculateLabelWidthHeight(string className)
        {
            Mat zeros = Mat.Zeros(new Size(500, 50), MatType.CV_8UC1);

            Cv2.PutText(zeros, className, new Point(50, 50), HersheyFonts.HersheyDuplex, fontScale, white, thickness: 1, lineType: LineTypes.AntiAlias);
            Cv2.Resize(zeros, zeros, new Size(0, 0), fx: 0.5, fy: 0.5);
            Cv2.Dilate(zeros, zeros, Mat.Ones(new Size(2, 3), MatType.CV_8UC1));
            Cv2.Resize(zeros, zeros, new Size(0, 0), fx: 2, fy: 2);
            Cv2.Threshold(zeros, zeros, 1, 255, ThresholdTypes.Binary);
            Point[][] contours = Cv2.FindContoursAsArray(zeros, RetrievalModes.External, ContourApproximationModes.ApproxSimple);
            Rect      rect     = Cv2.BoundingRect(contours[0]);

            return(new int[] { rect.Width + 2, rect.Height + 2 });
        }
        private static Point[][] GetContoursForParagraph(Mat imageToProcesing)
        {
            Point[][]        wordsContours;
            HierarchyIndex[] hierarchyIndexes;
            var kernelH = Cv2.GetStructuringElement(MorphShapes.Rect, new Size(1, 1));

            Cv2.Erode(imageToProcesing, imageToProcesing, kernelH, null, 4);
            Cv2.Dilate(imageToProcesing, imageToProcesing, kernelH, null, 4);
            FindContours(imageToProcesing, out wordsContours, out hierarchyIndexes);
            Mat newOutput = new Mat();

            newOutput = CreateMaskParagraph(imageToProcesing);
            Point[][] contoursMask; //vector<vector<Point>> contours;
            FindContours(newOutput, out contoursMask, out hierarchyIndexes);
            return(contoursMask);
        }
        private Mat histMasking(Mat cameraMat)
        {
            Mat hsvMat = new Mat();
            Mat dstMat = new Mat();

            Cv2.CvtColor(cameraMat, hsvMat, ColorConversionCodes.BGR2HSV);
            Cv2.CalcBackProject(new[] { hsvMat }, new[] { 0, 1 }, handHist, dstMat, new[] { new Rangef(0, 180), new Rangef(0, 255) });

            if (filter2D)
            {
                Mat filter2DMat = Cv2.GetStructuringElement(filter2DShape, (new Size(filter2DSize.x, filter2DSize.y)));
                Cv2.Filter2D(dstMat, dstMat, -1, filter2DMat);
            }

            if (thresholdFilter)
            {
                Cv2.Threshold(dstMat, dstMat, thresholdLow, thresholdHigh, ThresholdTypes.Binary);
            }

            if (morphologyTransformation)
            {
                Mat morphologyTransformationMat =
                    Cv2.GetStructuringElement(morphologyTransformationShape, (new Size(morphologyTransformationSize.x, morphologyTransformationSize.y)));
                Cv2.MorphologyEx(dstMat, dstMat, morphologyTransformationType, morphologyTransformationMat, null, morphologyTransformationIterations);
            }

            if (dilatateFilter)
            {
                Mat dilatateShapeMat = Cv2.GetStructuringElement(this.dilatateShape, (new Size(dilatateSize.x, dilatateSize.y)));
                Cv2.Dilate(dstMat, dstMat, dilatateShapeMat, null, dilatateIterations);
            }

            if (erodeFilter)
            {
                Mat erodeShapeMat = Cv2.GetStructuringElement(this.erodeShape, (new Size(erodeSize.x, erodeSize.y)));
                Cv2.Erode(dstMat, dstMat, erodeShapeMat, null, erodeIterations);
            }

            handMask = dstMat;

            if (bitwiseFilter)
            {
                // Cv2.BitwiseAnd(cameraMat, dstMat, dstMat); //TODO: we need it to mask our hand, in this form it return exception
            }

            return(dstMat);
        }