static CellLines DetectHorizontalLines(Mat binary, int hThreshold, int textSize)
        {
            CellLines horizontals = new CellLines();

            using (Mat horizontal = binary.Clone())
                using (Mat hKernel = Cv2.GetStructuringElement(shape: MorphShapes.Rect, ksize: new Size(hThreshold, SIZE_LINE)))
                    using (Mat kernelDilate = Cv2.GetStructuringElement(shape: MorphShapes.Rect, ksize: new Size(SIZE_DILATE, SIZE_DILATE)))
                    {
                        // horizontal
                        Cv2.Erode(src: horizontal, dst: horizontal, element: hKernel, anchor: new Point(-1, -1));
                        Cv2.Dilate(src: horizontal, dst: horizontal, element: hKernel, anchor: new Point(-1, -1));

                        // 중간에 살짝 끊어진 라인을 잇기 위해 라인을 확장시킨다.
                        Cv2.Dilate(src: horizontal, dst: horizontal, element: kernelDilate, anchor: new Point(-1, -1));

                        Point[][]        horizontalContours;
                        HierarchyIndex[] horizontalHierarchy;
                        Cv2.FindContours(image: horizontal, contours: out horizontalContours, hierarchy: out horizontalHierarchy, mode: RetrievalModes.External, method: ContourApproximationModes.ApproxSimple, offset: new Point(0, 0));

                        int  startX, startY, endX, endY, index = 0;
                        Rect rect;

                        for (int i = 0; i < horizontalHierarchy.Length; i++)
                        {
                            rect = Cv2.BoundingRect(curve: horizontalContours[i]);

                            startX = rect.X;
                            startY = rect.Y + (int)(rect.Height * 0.5);
                            endX   = rect.X + rect.Width;
                            endY   = startY;

                            if (rect.Width > textSize)
                            {
                                horizontals.Add(new CellLine(index: index++, startX: startX, startY: startY, endX: endX, endY: endY, thickness: rect.Height));
                            }
                        }
                    }

            return(new CellLines(horizontals.OrderBy(line => line.CenterY)));
        }
        /// <summary>
        /// Detect the eye state from the landmark points.
        /// </summary>
        /// <param name="frame">The current video frame.</param>
        /// <param name="shape">The current landmark points.</param>
        private void DetectEyeState(System.Drawing.Bitmap frame, FullObjectDetection shape)
        {
            // get all landmark points of the left eye
            var leftEye = from i in Enumerable.Range(36, 6)
                          let p = shape.GetPart((uint)i)
                                  select new OpenCvSharp.Point(p.X, p.Y);

            // get all landmark points of the right eye
            var rightEye = from i in Enumerable.Range(42, 6)
                           let p = shape.GetPart((uint)i)
                                   select new OpenCvSharp.Point(p.X, p.Y);

            // draw the eye areas into a new image
            using (var mask = new Mat(new Size(frame.Width, frame.Height), MatType.CV_8UC1))
            {
                mask.SetTo(0);
                Cv2.FillConvexPoly(mask, leftEye, Scalar.White);
                Cv2.FillConvexPoly(mask, rightEye, Scalar.White);

                // calculate surface area of both eyes
                int area = Cv2.CountNonZero(mask);

                // the maximum possible area is 40% of the surface area of both eyeballs
                int    r1             = (shape.GetPart(39).X - shape.GetPart(36).X) / 2;
                int    r2             = (shape.GetPart(45).X - shape.GetPart(42).X) / 2;
                double normalizedArea = 0.4 * Math.PI * r1 * r1 + 0.4 * Math.PI * r2 * r2;

                // calculate fractional area and normalize on a 0-100 scale
                var value = (int)(100 * area / normalizedArea - 20);
                eyeStateValue = value >= 0 && value <= 100 ? value : 0;

                // calculate bounding box around eyes
                var rect = Cv2.BoundingRect(Enumerable.Union(leftEye, rightEye));
                rect.Inflate(30, 30);

                // copy the eye image to the picturebox
                var maskImg = BitmapConverter.ToBitmap(mask.Clone(rect));
                eyeBox.Image = maskImg;
            }
        }
        public List <Rect> DetectLetters(Mat input)
        {
            Mat mask = Mat.Zeros(input.Size(), MatType.CV_8UC1);

            Mat[] contours;
            var   bounds    = new List <Rect>();
            var   hierarchy = new List <Vec4i>();

            input.FindContours(out contours, OutputArray.Create(hierarchy), ContourRetrieval.CComp, ContourChain.ApproxSimple, new Point(0, 0));
            for (var idx = 0; idx >= 0; idx = hierarchy[idx][0])
            {
                var rect = Cv2.BoundingRect(contours[idx]);
                Cv2.DrawContours(mask, contours, idx, new Scalar(255, 255, 255));
                var maskRoi = new Mat(mask, rect);
                var fill    = (double)maskRoi.CountNonZero() / (double)(rect.Width * rect.Height);
                if (fill > _minFill && rect.Height > 8 && rect.Width > 8)
                {
                    bounds.Add(rect);
                }
            }
            return(bounds);
        }
Exemple #4
0
    public Point[] getContours(Mat image)
    {
        HierarchyIndex[] hierarchyIndexes;
        Point[][]        contours;
        Cv2.FindContours(
            image,
            out contours,
            out hierarchyIndexes,
            RetrievalModes.External,
            ContourApproximationModes.ApproxNone);

        if (contours.Length == 0)
        {
            return(null);
        }

        // Find biggest contours
        var contourIndex        = 0;
        var previousArea        = 0;
        var biggestContourRect  = Cv2.BoundingRect(contours[0]);
        var biggestContourIndex = 0;

        while ((contourIndex >= 0))
        {
            var contour = contours[contourIndex];

            var boundingRect     = Cv2.BoundingRect(contour);         //Find bounding rect for each contour
            var boundingRectArea = boundingRect.Width * boundingRect.Height;
            if (boundingRectArea > previousArea)
            {
                biggestContourRect  = boundingRect;
                previousArea        = boundingRectArea;
                biggestContourIndex = contourIndex;
            }

            contourIndex = hierarchyIndexes[contourIndex].Next;
        }
        return(contours[biggestContourIndex]);
    }
Exemple #5
0
        public static void DrawSquares(Mat image, List <List <Point> > squares)
        {
            for (int i = 0; i < squares.Count; i++)
            {
                Point p     = squares[i][0];
                int   n     = (int)squares[i].Count;
                int   shift = 1;

                Rect r = Cv2.BoundingRect(InputArray.Create <Point>(squares[i]));
                r.X      = r.X + r.Width / 4;
                r.Y      = r.Y + r.Height / 4;
                r.Width  = r.Width / 2;
                r.Height = r.Height / 2;

                Mat    roi   = new Mat(image, r);
                Scalar color = Scalar.Red;// Cv2.Mean(roi);
                Cv2.Polylines(image, squares, true, color, 2, LineTypes.AntiAlias, shift);

                var center = new Point(r.X + r.Width / 2, r.Y + r.Height / 2);
                Cv2.Ellipse(image, center, new Size(r.Width / 2, r.Height / 2), 0, 0, 360, color, 2, LineTypes.AntiAlias);
            }
        }
Exemple #6
0
        /**
         * 去掉小数点等噪点
         */
        private static Mat DropSmallAreaNoise(Mat ImgBinary)
        {
            var Binary2 = new Mat();

            Cv2.BitwiseNot(ImgBinary, Binary2);
            Point[][] Contours = Cv2.FindContoursAsArray(Binary2, RetrievalModes.External, ContourApproximationModes.ApproxSimple);

            // 找出面积最大的区域
            int MaxArea = 0;

            foreach (Point[] Contour in Contours)
            {
                Rect Region = Cv2.BoundingRect(Contour);
                var  Area1  = Region.Width * Region.Height;
                if (Area1 > MaxArea)
                {
                    MaxArea = Area1;
                }
            }

            // 构造图像掩码
            Mat MaskMat = Mat.Zeros(Binary2.Rows, Binary2.Cols, MatType.CV_8UC1);

            foreach (Point[] Contour in Contours)
            {
                Rect Region = Cv2.BoundingRect(Contour);
                var  Area1  = Region.Width * Region.Height;
                if (Region.Height > Region.Width && (0.0 + Region.Height) / Region.Width < 3 && Area1 * 4 < MaxArea)
                {
                    // 设置感兴趣区域为纯白色(假定白色为背景色)
                    MaskMat[Region].SetTo(new Scalar(255));
                }
            }

            var Result = new Mat();

            Cv2.BitwiseOr(ImgBinary, MaskMat, Result);
            return(Result);
        }
Exemple #7
0
        public static SoftwareBitmap ResizeToActual(SoftwareBitmap Input)
        {
            using (Mat inputMat = Input.SoftwareBitmapToMat())
                using (Mat tempMat = new Mat(inputMat.Rows, inputMat.Cols, MatType.CV_8UC4))
                {
                    Cv2.CvtColor(inputMat, tempMat, ColorConversionCodes.BGRA2GRAY);
                    Cv2.Threshold(tempMat, tempMat, 100, 255, ThresholdTypes.Binary);

                    using (Mat Hie = new Mat(tempMat.Size(), tempMat.Type()))
                    {
                        Cv2.FindContours(tempMat, out Mat[] Result, Hie, RetrievalModes.External, ContourApproximationModes.ApproxNone);

                        try
                        {
                            if (Result.FirstOrDefault() is Mat Contour)
                            {
                                Rect Area = Cv2.BoundingRect(Contour);
                                return(inputMat[Area].Clone().MatToSoftwareBitmap());
                            }
                            else
                            {
                                return(SoftwareBitmap.Copy(Input));
                            }
                        }
                        catch
                        {
                            return(SoftwareBitmap.Copy(Input));
                        }
                        finally
                        {
                            foreach (Mat Item in Result)
                            {
                                Item.Dispose();
                            }
                        }
                    }
                }
        }
Exemple #8
0
        private Image <Rgba32> Swap(Mat img1, IList <Point2f> points1, Mat img2, IList <Point2f> points2)
        {
            var img1Warped = img2.Clone();

            img1.ConvertTo(img1, MatType.CV_32F);
            img1Warped.ConvertTo(img1Warped, MatType.CV_32F);

            var rect      = new Rect(0, 0, img1Warped.Cols, img1Warped.Rows);
            var hullIndex = Cv2.ConvexHullIndices(points2);

            var hull1 = hullIndex.Select(i => points1[i]).ToList();
            var hull2 = hullIndex.Select(i => points2[i]).ToList();

            var dt = GetDelaunayTriangulationIndexes(rect, hull2).ToList();

            foreach (var triangleIndexes in dt)
            {
                var t1 = triangleIndexes.Select(i => hull1[i]).ToList();
                var t2 = triangleIndexes.Select(i => hull2[i]).ToList();
                WarpTriangle(img1, img1Warped, t1, t2);
            }

            var hull8U = hull2.Select(p => new Point((int)p.X, (int)p.Y)).ToList();

            using (var mask = Mat.Zeros(img2.Rows, img2.Cols, MatType.CV_8UC3).ToMat())
            {
                Cv2.FillConvexPoly(mask, hull8U, new Scalar(255, 255, 255));
                var r      = Cv2.BoundingRect(hull2).Intersect(rect);
                var center = r.Location + new Point(r.Width / 2, r.Height / 2);
                img1Warped.ConvertTo(img1Warped, MatType.CV_8UC3);
                img2.ConvertTo(img2, MatType.CV_8UC3);
                using (var outputMat = new Mat())
                {
                    Cv2.SeamlessClone(img1Warped, img2, mask, center, outputMat, SeamlessCloneMethods.NormalClone);
                    return(Image.Load(outputMat.ToBytes()));
                }
            }
        }
        private Rect CalculateSquareRect(Mat[] contours)
        {
            if (contours.Length <= 1)
            {
                return(Rect.Empty);
            }

            Array.Sort(contours, (x, y) => y.ContourArea().CompareTo(x.ContourArea()));

            Mat biggestContour = contours[1];

            Mat approx  = new Mat();
            var epsilon = 0.1f * biggestContour.ArcLength(true);

            Cv2.ApproxPolyDP(biggestContour, approx, epsilon, true);

            Rect  approxRect = Cv2.BoundingRect(approx);
            float ar         = approxRect.Width / (float)approxRect.Height;

            Cv2.Rectangle(frame, approxRect, Scalar.Green, 2);

            return(approxRect);
        }
Exemple #10
0
 private static IEnumerable <int[]> GetDelaunayTriangulationIndexes(Rect rect, List <Point2f> points)
 {
     rect = Cv2.BoundingRect(points).Union(rect);
     using (var subdivision = new Subdiv2D(rect))
     {
         subdivision.Insert(points);
         var triangulation = subdivision.GetTriangleList();
         foreach (var cell in triangulation)
         {
             var p1 = new Point2f(cell.Item0, cell.Item1);
             var p2 = new Point2f(cell.Item2, cell.Item3);
             var p3 = new Point2f(cell.Item4, cell.Item5);
             if (rect.Contains(p1) && rect.Contains(p2) && rect.Contains(p3))
             {
                 var indexA  = points.IndexOf(p1);
                 var indexB  = points.IndexOf(p2);
                 var indexC  = points.IndexOf(p3);
                 var indexes = new[] { indexA, indexB, indexC };
                 yield return(indexes);
             }
         }
     }
 }
Exemple #11
0
        private static List <Point[]> PointsBySizeToOtherRects(List <Point[]> pointSize)
        {
            // Berechnen der duchscnittshöhe
            var avareageHeight = 0.0;

            foreach (var cont in pointSize)
            {
                Rect rect = Cv2.BoundingRect(cont);
                avareageHeight += rect.Height;
            }
            avareageHeight /= pointSize.Count;
            Console.WriteLine("avareageHeight" + avareageHeight);
            List <Point[]> pointsBySizeToOtherRects = new List <Point[]>();

            foreach (var cont in pointSize)
            {
                Rect rect = Cv2.BoundingRect(cont);
                if (rect.Height > avareageHeight * 0.7)
                {
                    pointsBySizeToOtherRects.Add(cont);
                }
            }
            return(pointsBySizeToOtherRects);
        }
Exemple #12
0
        //展示车牌
        private void ProcessAndShowImage(Bitmap image, PlateLocator.ParameterList pl)
        {
            currentTabCount = 0;

            Mat matIn = image.ToMat();

            AddTag("原图", matIn);

            //转为hsv图片
            Mat matHsv = matIn.CvtColor(ColorConversionCodes.BGR2HSV);

            AddTag("HSV", matHsv);

            //对v均衡化后在合并
            Mat[] matToHsv = new Mat[3];
            Cv2.Split(matHsv, out matToHsv);
            Cv2.EqualizeHist(matToHsv[2], matToHsv[2]);
            Mat equalizeHistHsv = new Mat();

            Cv2.Merge(matToHsv, equalizeHistHsv);
            AddTag("均衡HSV", equalizeHistHsv);

            //在均衡化后的hsv颜色空间红寻找黄色和蓝色区域
            Mat    matYellow  = new Mat();
            Mat    matBlue    = new Mat();
            Scalar yellow_low = new Scalar(15, 70, 70);
            Scalar yellow_up  = new Scalar(40, 255, 255);
            Scalar blue_low   = new Scalar(100, 70, 70);
            Scalar blue_up    = new Scalar(140, 255, 255);

            Cv2.InRange(equalizeHistHsv, yellow_low, yellow_up, matYellow);
            Cv2.InRange(equalizeHistHsv, blue_low, blue_up, matBlue);
            Mat matAll = matBlue + matYellow;

            AddTag("黄区", matYellow);
            AddTag("蓝区", matBlue);
            AddTag("黄蓝区", matAll);

            //使用形态学操作对选定颜色区域进行处理
            Mat matAllDilate = new Mat();
            Mat matAllErode  = new Mat();
            Mat element      = Cv2.GetStructuringElement(MorphShapes.Rect, new OpenCvSharp.Size(7, 3));

            Cv2.Dilate(matAll, matAllDilate, element);
            Cv2.Erode(matAllDilate, matAllErode, element);
            AddTag("闭操作", matAllErode);


            //寻找轮廓
            OpenCvSharp.Point[][] contours;         //vector<vector<Point>> contours;
            HierarchyIndex[]      hierarchyIndexes; //vector<Vec4i> hierarchy;
            Cv2.FindContours(
                matAllErode,
                out contours,
                out hierarchyIndexes,
                mode: RetrievalModes.Tree,
                method: ContourApproximationModes.ApproxSimple); //求轮廓

            Mat matContours = matIn.Clone();

            Cv2.DrawContours(matContours, contours, -1, new Scalar(0, 0, 255), 2); //画轮廓线
            AddTag("轮廓", matContours);

            //求轮廓的最小外接矩形
            Mat         matRects = matIn.Clone();
            List <Rect> rects    = new List <Rect>();

            foreach (OpenCvSharp.Point[] p in contours)
            {
                Rect rect = Cv2.BoundingRect(p);
                if ((double)rect.Height / rect.Width > pl.HeightDivideWidthLow && (double)rect.Height / rect.Width < pl.HeightDivedeWidthUp &&
                    rect.Height > pl.HeightLow && rect.Height < pl.HeightUp &&
                    rect.Width > pl.WidthLow && rect.Width < pl.WidthUp)
                {
                    rects.Add(rect);
                    Cv2.Rectangle(matRects, rect, new Scalar(0, 0, 255), 3);
                }
            }
            AddTag("外接矩形", matRects);


            ShowSpliteImage(rects, matIn);
        }
Exemple #13
0
        private void Holes1(Mat cropped_image)
        {
            try
            {
                Holecount = 0;
                Mat hole_crop = new Mat();
                OpenCvSharp.Point[][] contour;
                HierarchyIndex[]      hier;
                OpenCvSharp.Rect      rect1  = new OpenCvSharp.Rect();
                OpenCvSharp.Size      ksize  = new OpenCvSharp.Size(3, 3);
                OpenCvSharp.Size      kksize = new OpenCvSharp.Size(5, 5);
                Mat element  = Cv2.GetStructuringElement(MorphShapes.Cross, ksize);
                Mat element1 = Cv2.GetStructuringElement(MorphShapes.Cross, kksize);
                cropped_image.CopyTo(hole_crop);
                if (hole_crop.Channels() == 1)
                {
                    Cv2.CvtColor(hole_crop, hole_crop, ColorConversionCodes.GRAY2BGR);
                }

                OpenCvSharp.Rect roi1 = new OpenCvSharp.Rect(47, 320, 180, 236); //47,340,180,220  47, 338, 180, 220
                Cv2.Rectangle(hole_crop, roi1, Scalar.Green, 3);                 //3
                OpenCvSharp.Point pnt1 = new OpenCvSharp.Point(roi1.X, roi1.Y);
                Cv2.PutText(hole_crop, 1.ToString(), pnt1, HersheyFonts.HersheyPlain, 8, Scalar.Red, 5);

                OpenCvSharp.Rect roi2 = new OpenCvSharp.Rect(430, 27, 200, 215); //370,25,200,200    //430,30,200,210
                Cv2.Rectangle(hole_crop, roi2, Scalar.Green, 3);                 //3
                OpenCvSharp.Point pnt2 = new OpenCvSharp.Point(roi2.X - 80, roi2.Y + 80);
                Cv2.PutText(hole_crop, 2.ToString(), pnt2, HersheyFonts.HersheyPlain, 8, Scalar.Red, 5);

                OpenCvSharp.Rect roi3 = new OpenCvSharp.Rect(881, 320, 202, 250); //870,320,200,250    877, 320, 200, 250
                Cv2.Rectangle(hole_crop, roi3, Scalar.Green, 3);                  //3
                OpenCvSharp.Point pnt3 = new OpenCvSharp.Point(roi3.X, roi3.Y);
                Cv2.PutText(hole_crop, 3.ToString(), pnt3, HersheyFonts.HersheyPlain, 8, Scalar.Red, 5);

                OpenCvSharp.Rect roi4 = new OpenCvSharp.Rect(550, 780, 185, 220); //580,800,200,200   550, 790, 170, 210   550, 800, 170, 200
                Cv2.Rectangle(hole_crop, roi4, Scalar.Green, 3);                  //3
                OpenCvSharp.Point pnt4 = new OpenCvSharp.Point(roi4.X, roi4.Y);
                Cv2.PutText(hole_crop, 4.ToString(), pnt4, HersheyFonts.HersheyPlain, 8, Scalar.Red, 5);

                hole_crop.CopyTo(crop1);
                if (crop1.Channels() > 1)
                {
                    Cv2.CvtColor(crop1, crop1, ColorConversionCodes.BGR2GRAY);
                }
                for (int a = 0; a < 4; a++)
                {
                    if (a == 0)
                    {
                        Mat tempcrop = new Mat(crop1, roi1);
                        Cv2.Threshold(tempcrop, tempcrop, 90, 255, ThresholdTypes.Otsu); //110
                        Cv2.Dilate(tempcrop, tempcrop, element1, null, 1);               //1
                        Cv2.Erode(tempcrop, tempcrop, element, null, 4);                 //6
                        Cv2.FindContours(tempcrop, out contour, out hier, RetrievalModes.Tree, ContourApproximationModes.ApproxSimple);

                        for (int b = 0; b < contour.Length; b++)
                        {
                            rect1 = Cv2.BoundingRect(contour[b]);
                            if (Cv2.ContourArea(contour[b]) > 800 && Cv2.ContourArea(contour[b]) < 3000)//300,4000
                            {
                                rect1 = Cv2.BoundingRect(contour[b]);
                                Mat    spot_img    = new Mat(tempcrop, rect1);
                                int    white_pix   = Cv2.CountNonZero(spot_img);
                                int    black_pix   = spot_img.Width * spot_img.Height - white_pix;
                                double aspectratio = Convert.ToDouble(rect1.Width) / Convert.ToDouble(rect1.Height);
                                if (aspectratio > 0.83 && aspectratio < 1.65 && black_pix > white_pix)  // 0.83,1.80
                                {
                                    //if (rect1.Height > 25 && rect1.Height < 250 & rect1.Width > 30  && rect1.Width < 200)//10,200,200,10//25,250,30,200
                                    //{
                                    OpenCvSharp.Rect hole_rect = new OpenCvSharp.Rect(rect1.X + roi1.X, rect1.Y + roi1.Y, rect1.Width, rect1.Height);
                                    Cv2.Rectangle(hole_crop, hole_rect, Scalar.LimeGreen, 5);
                                    Cv2.DrawContours(tempcrop, contour, b, Scalar.Blue, 3);
                                    Holecount++;
                                    Hole_absent = true;
                                    break;
                                    //}
                                }
                            }
                        }
                        if (!Hole_absent == true)
                        {
                            OpenCvSharp.Rect hole_rect = new OpenCvSharp.Rect(47, 320, 180, 236);
                            Cv2.Rectangle(hole_crop, hole_rect, Scalar.Red, 5);
                        }
                        Hole_absent = false;
                        //Cv2.NamedWindow("color", WindowFlags.Normal);
                        //Cv2.ImShow("color", tempcrop);
                    }

                    if (a == 1)
                    {
                        Mat tempcrop1 = new Mat(crop1, roi2);
                        Cv2.AdaptiveThreshold(tempcrop1, tempcrop1, 255, AdaptiveThresholdTypes.MeanC, ThresholdTypes.Binary, 89, 58); //89,58
                        Cv2.Erode(tempcrop1, tempcrop1, element1, null, 4);                                                            //6
                        Cv2.FindContours(tempcrop1, out contour, out hier, RetrievalModes.Tree, ContourApproximationModes.ApproxSimple);

                        for (int b = 0; b < contour.Length; b++)
                        {
                            if (Cv2.ContourArea(contour[b]) > 500 && Cv2.ContourArea(contour[b]) < 4000)//  //600,12000
                            {
                                rect1 = Cv2.BoundingRect(contour[b]);
                                Mat spot_img = new Mat(tempcrop1, rect1);
                                //Cv2.NamedWindow("con", WindowFlags.Normal);
                                //Cv2.ImShow("con", spot_img);
                                //Cv2.ImWrite("con1.bmp", spot_img);
                                int    white_pix   = Cv2.CountNonZero(spot_img);
                                int    black_pix   = spot_img.Width * spot_img.Height - white_pix + 10;
                                double aspectratio = Convert.ToDouble(rect1.Width) / Convert.ToDouble(rect1.Height);
                                if (aspectratio < 1.55 && aspectratio > 0.80 /*&& black_pix > white_pix*/)             //1.38 0.80  1.45,0.80
                                {
                                    if (rect1.Height > 25 && rect1.Height < 80 & rect1.Width < 80 && rect1.Width > 25) //22,80,80,25
                                    {
                                        OpenCvSharp.Rect hole_rect = new OpenCvSharp.Rect(rect1.X + roi2.X, rect1.Y + roi2.Y, rect1.Width, rect1.Height);
                                        Cv2.Rectangle(hole_crop, hole_rect, Scalar.LimeGreen, 5);
                                        Cv2.DrawContours(tempcrop1, contour, b, Scalar.Blue, 3);
                                        Holecount++;
                                        Hole_absent = true;
                                        break;
                                    }
                                }
                            }
                        }
                        if (!Hole_absent == true)
                        {
                            OpenCvSharp.Rect hole_rect = new OpenCvSharp.Rect(430, 27, 200, 215);
                            Cv2.Rectangle(hole_crop, hole_rect, Scalar.Red, 5);
                        }
                        Hole_absent = false;
                        //Cv2.NamedWindow("color1", WindowFlags.Normal);
                        //Cv2.ImShow("color1", tempcrop1);
                    }
                    if (a == 2)
                    {
                        Mat tempcrop = new Mat(crop1, roi3);

                        Cv2.AdaptiveThreshold(tempcrop, tempcrop, 255, AdaptiveThresholdTypes.MeanC, ThresholdTypes.Binary, 71, 19);
                        Cv2.MorphologyEx(tempcrop, tempcrop, MorphTypes.Close, element);
                        Cv2.FindContours(tempcrop, out contour, out hier, RetrievalModes.Tree, ContourApproximationModes.ApproxSimple);

                        for (int b = 0; b < contour.Length; b++)
                        {
                            if (Cv2.ContourArea(contour[b]) > 500 && Cv2.ContourArea(contour[b]) < 10000)//300,4000  300,6000
                            {
                                rect1 = Cv2.BoundingRect(contour[b]);
                                Mat    spot_img    = new Mat(tempcrop, rect1);
                                int    white_pix   = Cv2.CountNonZero(spot_img);
                                int    black_pix   = spot_img.Width * spot_img.Height - white_pix;
                                double aspectratio = Convert.ToDouble(rect1.Width) / Convert.ToDouble(rect1.Height);
                                if (aspectratio < 1.55 && aspectratio > 0.75 && black_pix > white_pix /*|| white_pix < 2000 || black_pix > 500*/)//1.43 0.75
                                {
                                    //if (rect1.Height > 28 && rect1.Height < 200 & rect1.Width < 300 && rect1.Width > 30)//30,200,300,30
                                    //{
                                    OpenCvSharp.Rect hole_rect = new OpenCvSharp.Rect(rect1.X + roi3.X, rect1.Y + roi3.Y, rect1.Width, rect1.Height);
                                    Cv2.Rectangle(hole_crop, hole_rect, Scalar.LimeGreen, 5);
                                    Cv2.DrawContours(tempcrop, contour, b, Scalar.Blue, 3);
                                    Holecount++;
                                    Hole_absent = true;
                                    break;
                                    //}
                                }
                            }
                        }
                        if (!Hole_absent == true)
                        {
                            OpenCvSharp.Rect hole_rect = new OpenCvSharp.Rect(881, 320, 202, 250);
                            Cv2.Rectangle(hole_crop, hole_rect, Scalar.Red, 5);
                        }
                        Hole_absent = false;
                        //Cv2.NamedWindow("color2", WindowFlags.Normal);
                        //Cv2.ImShow("color2", tempcrop);
                    }
                    if (a == 3)
                    {
                        Mat tempcrop = new Mat(crop1, roi4);
                        Cv2.Threshold(tempcrop, tempcrop, 90, 255, ThresholdTypes.Otsu);                                              //60//98
                        Cv2.Dilate(tempcrop, tempcrop, element1, null, 1);                                                            //1
                        Cv2.AdaptiveThreshold(tempcrop, tempcrop, 255, AdaptiveThresholdTypes.MeanC, ThresholdTypes.Binary, 75, 171); //71,171
                        Cv2.Erode(tempcrop, tempcrop, element1, null, 3);                                                             //4
                        Cv2.FindContours(tempcrop, out contour, out hier, RetrievalModes.Tree, ContourApproximationModes.ApproxSimple);
                        for (int b = 0; b < contour.Length; b++)
                        {
                            rect1 = Cv2.BoundingRect(contour[b]);
                            if (Cv2.ContourArea(contour[b]) > 1000 && Cv2.ContourArea(contour[b]) < 4000)//1000,4000
                            {
                                rect1 = Cv2.BoundingRect(contour[b]);
                                Mat spot_img = new Mat(tempcrop, rect1);

                                int    white_pix   = Cv2.CountNonZero(spot_img);
                                int    black_pix   = spot_img.Width * spot_img.Height - white_pix;
                                double aspectratio = Convert.ToDouble(rect1.Width) / Convert.ToDouble(rect1.Height);
                                if (aspectratio > 0.40 && aspectratio < 1.50 && black_pix > white_pix)//1.10 0.80 0.40,1.40
                                {
                                    //if (rect1.Height > 30 && rect1.Height < 300 && rect1.Width >20 && rect1.Width < 200)//30,90,90,30  30,300,20,200
                                    //{
                                    OpenCvSharp.Rect hole_rect = new OpenCvSharp.Rect(rect1.X + roi4.X, rect1.Y + roi4.Y, rect1.Width, rect1.Height);
                                    Cv2.Rectangle(hole_crop, hole_rect, Scalar.LimeGreen, 5);
                                    Cv2.DrawContours(tempcrop, contour, b, Scalar.Blue, 3);
                                    Holecount++;
                                    Hole_absent = true;
                                    break;
                                    //}
                                }
                            }
                        }
                        if (!Hole_absent == true)
                        {
                            OpenCvSharp.Rect hole_rect = new OpenCvSharp.Rect(550, 780, 185, 220);
                            Cv2.Rectangle(hole_crop, hole_rect, Scalar.Red, 5);
                        }
                        Hole_absent = false;
                        //Cv2.NamedWindow("color3", WindowFlags.Normal);
                        //Cv2.ImShow("color3", tempcrop);
                    }
                    hole_crop.CopyTo(finalimg);
                }
            }
            catch (Exception Ex)
            {
                //MessageBox.Show(Ex.Message.ToString());
                log.Error("Error Message: " + Ex.Message.ToString(), Ex);
            }
        }
Exemple #14
0
        private void thread(Mat inimg)
        {
            try
            {
                threadcount = 0;
                Mat thread      = new Mat();
                Mat thread_copy = new Mat();
                OpenCvSharp.Point[][] contour1;
                HierarchyIndex[]      hier1;
                OpenCvSharp.Rect      rect3  = new OpenCvSharp.Rect();
                OpenCvSharp.Size      kksize = new OpenCvSharp.Size(1, 1);
                Mat element = Cv2.GetStructuringElement(MorphShapes.Cross, kksize);

                if (crop.Channels() > 1)
                {
                    Cv2.CvtColor(crop, crop, ColorConversionCodes.BGR2GRAY);
                }
                OpenCvSharp.Rect rectan = new OpenCvSharp.Rect(205, 200, 700, 700);
                //Cv2.Rectangle(crop, rectan, Scalar.White, 2);

                Mat thread_crop = new Mat(crop, rectan);
                Cv2.NamedWindow("tempcrop", WindowFlags.Normal);
                Cv2.ImShow("tempcrop", thread_crop);
                template_matching(crop, temp_match);
                //template_matching(thread_crop, temp_match);
                resizeimg.CopyTo(thread);
                //Cv2.NamedWindow("templete", WindowFlags.Normal);
                //Cv2.ImShow("templete", resizeimg);
                if (thread.Channels() > 1)
                {
                    Cv2.CvtColor(thread, thread, ColorConversionCodes.BGR2GRAY);
                }

                OpenCvSharp.Rect thread_roi1 = new OpenCvSharp.Rect(50, 90, 33, 23); //48, 84, 33, 23
                Cv2.Rectangle(thread, thread_roi1, Scalar.Black, -1);
                OpenCvSharp.Rect final_rect = new OpenCvSharp.Rect(20, 23, 105, 80); //20,23,105,80
                Cv2.Rectangle(thread, final_rect, Scalar.Green, 1);
                thread = new Mat(thread, final_rect);
                //Cv2.NamedWindow("mask", WindowFlags.Normal);
                //Cv2.ImShow("mask", thread);
                thread.CopyTo(thread_copy);
                Cv2.Sobel(thread, thread, MatType.CV_8UC1, 0, 1, 1);
                Cv2.Threshold(thread, thread, 120, 255, ThresholdTypes.Otsu);

                //Cv2.NamedWindow("inrange", WindowFlags.Normal);
                //Cv2.ImShow("inrange", thread);
                pixelcount = Cv2.CountNonZero(thread);

                Cv2.FindContours(thread, out contour1, out hier1, RetrievalModes.Tree, ContourApproximationModes.ApproxSimple);
                if (thread_copy.Channels() == 1)
                {
                    Cv2.CvtColor(thread_copy, thread_copy, ColorConversionCodes.GRAY2BGR);
                }
                for (int i = 0; i < contour1.Length; i++)
                {
                    rect3 = Cv2.BoundingRect(contour1[i]);
                    if (Cv2.ContourArea(contour1[i]) > 15 && Cv2.ContourArea(contour1[i]) < 1000)         //60,500 20,1000
                    {
                        if (rect3.Width > 2 && rect3.Width < 90 && rect3.Height < 26 && rect3.Height > 1) //20,90,26,3
                        {
                            Cv2.DrawContours(thread_copy, contour1, i, Scalar.LimeGreen, 2);
                            threadcount++;
                        }
                    }
                }

                //Cv2.NamedWindow("templete1", WindowFlags.Normal);
                //Cv2.ImShow("templete1", thread_copy);
                Cv2.ImWrite("thread" + ".bmp", resizeimg);
            }
            catch (Exception Ex)
            {
                //MessageBox.Show(Ex.Message.ToString());
                log.Error("Error Message: " + Ex.Message.ToString(), Ex);
            }
        }
Exemple #15
0
        // https://github.com/VahidN/OpenCVSharp-Samples/blob/master/OpenCVSharpSample19/Program.cs
        public static void Gradient(Mat src)
        {
            var gray     = new Mat();
            var channels = src.Channels();

            if (channels > 1)
            {
                Cv2.CvtColor(src, gray, ColorConversionCodes.BGRA2GRAY);
            }
            else
            {
                src.CopyTo(gray);
            }


            // compute the Scharr gradient magnitude representation of the images
            // in both the x and y direction
            var gradX = new Mat();

            Cv2.Sobel(gray, gradX, MatType.CV_32F, xorder: 1, yorder: 0, ksize: -1);
            //Cv2.Scharr(gray, gradX, MatType.CV_32F, xorder: 1, yorder: 0);
            Cv2.ImShow("gradX", gradX);

            var gradY = new Mat();

            Cv2.Sobel(gray, gradY, MatType.CV_32F, xorder: 0, yorder: 1, ksize: -1);
            //Cv2.Scharr(gray, gradY, MatType.CV_32F, xorder: 0, yorder: 1);
            Cv2.ImShow("gradY", gradY);

            // subtract the y-gradient from the x-gradient
            var gradient = new Mat();

            Cv2.Subtract(gradX, gradY, gradient);
            Cv2.ConvertScaleAbs(gradient, gradient);

            Cv2.ImShow("Gradient", gradient);


            // blur and threshold the image
            var blurred = new Mat();

            Cv2.Blur(gradient, blurred, new Size(9, 9));

            double thresh      = 127.0;
            var    threshImage = new Mat();

            Cv2.Threshold(blurred, threshImage, thresh, 255, ThresholdTypes.Binary);


            bool debug = true;

            if (debug)
            {
                Cv2.ImShow("Thresh", threshImage);
                Cv2.WaitKey(1); // do events
            }


            // construct a closing kernel and apply it to the thresholded image
            var kernel = Cv2.GetStructuringElement(MorphShapes.Rect, new Size(21, 7));
            var closed = new Mat();

            Cv2.MorphologyEx(threshImage, closed, MorphTypes.Close, kernel);

            if (debug)
            {
                Cv2.ImShow("Closed", closed);
                Cv2.WaitKey(1); // do events
            }


            // perform a series of erosions and dilations
            Cv2.Erode(closed, closed, null, iterations: 4);
            Cv2.Dilate(closed, closed, null, iterations: 4);

            if (debug)
            {
                Cv2.ImShow("Erode & Dilate", closed);
                Cv2.WaitKey(1); // do events
            }


            //find the contours in the thresholded image, then sort the contours
            //by their area, keeping only the largest one

            Point[][]        contours;
            HierarchyIndex[] hierarchyIndexes;
            Cv2.FindContours(
                closed,
                out contours,
                out hierarchyIndexes,
                mode: RetrievalModes.CComp,
                method: ContourApproximationModes.ApproxSimple);

            if (contours.Length == 0)
            {
                throw new NotSupportedException("Couldn't find any object in the image.");
            }

            var contourIndex       = 0;
            var previousArea       = 0;
            var biggestContourRect = Cv2.BoundingRect(contours[0]);

            while ((contourIndex >= 0))
            {
                var contour = contours[contourIndex];

                var boundingRect     = Cv2.BoundingRect(contour); //Find bounding rect for each contour
                var boundingRectArea = boundingRect.Width * boundingRect.Height;
                if (boundingRectArea > previousArea)
                {
                    biggestContourRect = boundingRect;
                    previousArea       = boundingRectArea;
                }

                contourIndex = hierarchyIndexes[contourIndex].Next;
            }
        }
Exemple #16
0
        /**
         * 识别的主流程
         * 返回识别后的数字
         * needSave是否输出中间图像
         */
        public static string Process(string Img, bool needSave = false)
        {
            // 图片路径
            // const string Img = @"D:\work\sharp\DigitalTube1\data\1873.jpg";
            string ImgPath = Path.GetDirectoryName(Img);
            string ImgName = Path.GetFileNameWithoutExtension(Img);
            string ImgPref = Path.Combine(ImgPath, ImgName);

            // 显示原始图片
            var OriginImg = Cv2.ImRead(Img);
            // ------------------- debug start
            //Cv2.NamedWindow("OriginImg", WindowMode.Normal);
            //Cv2.ImShow("OriginImg", OriginImg);
            // ------------------- debug end

            // 转化成灰度图
            var Grayscale = Cv2.ImRead(Img, ImreadModes.Grayscale);
            // ------------------- debug start
            //Cv2.NamedWindow("Grayscale", WindowMode.Normal);
            //Cv2.ImShow("Grayscale", Grayscale);
            // ------------------- debug end

            // 往四周扩展10个像素
            var CopyBordered = new Mat();

            Cv2.CopyMakeBorder(Grayscale, CopyBordered, 10, 10, 10, 10, BorderTypes.Constant, new Scalar(255));
            // ------------------- debug start
            //Cv2.NamedWindow("CopyBordered", WindowMode.Normal);
            //Cv2.ImShow("CopyBordered", CopyBordered);
            // ------------------- debug end

            // 进行高斯模糊变换(去噪)
            var Blured = new Mat();

            Cv2.GaussianBlur(CopyBordered, Blured, new Size(15, 15), 0);
            // ------------------- debug start
            //Cv2.NamedWindow("Blured", WindowMode.Normal);
            //Cv2.ImShow("Blured", Blured);
            // ------------------- debug end

            // 转化为二值图片(只有黑白无灰色)
            var Binary = new Mat();

            Cv2.Threshold(Blured, Binary, 128, 255, ThresholdTypes.Binary);
            // ------------------- debug start
            //Cv2.NamedWindow("Binary", WindowMode.Normal);
            //Cv2.ImShow("Binary", Binary);
            // ------------------- debug end

            // 去掉小数点(小块面积去噪)
            var DropNoise = DropSmallAreaNoise(Binary);
            // ------------------- debug start
            //Cv2.NamedWindow("DropNoise", WindowMode.Normal);
            //Cv2.ImShow("DropNoise", DropNoise);
            // ------------------- debug end

            // 图形形态学(腐蚀)
            var Eroded     = new Mat();
            Mat KernelOpen = Cv2.GetStructuringElement(MorphShapes.Rect, new Size(4, 4));

            Cv2.MorphologyEx(DropNoise, Eroded, MorphTypes.Open, KernelOpen, new Point(-1, -1), 7);
            // ------------------- debug start
            //Cv2.NamedWindow("Eroded", WindowMode.Normal);
            //Cv2.ImShow("Eroded", Eroded);
            // ------------------- debug end

            // 图形形态学(膨胀)
            var Dilated     = new Mat();
            Mat KernelClose = Cv2.GetStructuringElement(MorphShapes.Rect, new Size(3, 3));

            Cv2.MorphologyEx(Eroded, Dilated, MorphTypes.Close, KernelClose, new Point(-1, -1), 5);
            // ------------------- debug start
            //Cv2.NamedWindow("Dilated", WindowMode.Normal);
            //Cv2.ImShow("Dilated", Dilated);
            // ------------------- debug end

            // 图形取反变换(黑白颠倒)
            var Binary2 = new Mat();

            Cv2.BitwiseNot(Dilated, Binary2);
            // ------------------- debug start
            //Cv2.NamedWindow("Binary2", WindowMode.Normal);
            //Cv2.ImShow("Binary2", Binary2);
            // ------------------- debug end

            var Morphologyed = Binary2.Clone();

            // 识别图片轮廓(后续按照轮廓分割)
            var Rects = new List <Rect>();

            // 识别轮廓
            Point[][] Contours = Cv2.FindContoursAsArray(Binary2, RetrievalModes.External, ContourApproximationModes.ApproxSimple);
            Console.WriteLine("Contours Count = " + Contours.Length);
            foreach (Point[] Contour in Contours)
            {
                Rect Region = Cv2.BoundingRect(Contour);
                Cv2.Rectangle(Morphologyed, Region, new Scalar(193, 0, 0), 4);
                Rects.Add(Region);
            }
            // ------------------- debug start
            //Cv2.NamedWindow("Contours", WindowMode.Normal);
            //Cv2.ImShow("Contours", Morphologyed);
            // ------------------- debug start
            if (needSave)
            {
                Cv2.ImWrite(ImgPref + "__Contours.png", Morphologyed);
            }

            // 对轮廓进行排序(X轴方向,横写文字)
            Rects.Sort((a, b) => (a.X - b.X));

            // 对每个轮廓部分进行处理
            var ImgParts = new Mat[Rects.Count];

            for (var i = 0; i < Rects.Count; i++)
            {
                // 对图形进行边界扩充
                var ImgPart = Binary2[Rects[i]].Clone();
                ImgParts[i] = CopyBorder(ImgPart);
                // ------------------- debug start
                //Cv2.NamedWindow("Number" + (i + 1), WindowMode.Normal);
                //Cv2.ImShow("Number" + (i + 1), ImgParts[i]);
                // ------------------- debug end
                if (needSave)
                {
                    Cv2.ImWrite(ImgPref + "__Contours(" + (i + 1) + ").png", ImgParts[i]);
                }
            }

            // 用穿线法将图形识别成数字
            var Numbers = new char[Rects.Count];

            //如果有一个数字识别不出来,那么就t_number[0]标记为'e'
            for (var i = 0; i < Rects.Count; i++)
            {
                Numbers[i] = myIdentification(ImgParts[i]);
                // Console.WriteLine("Number" + (i + 1) + " = " + Numbers[i]);
            }

            // 将画面停留以便观察结果
            //Cv2.WaitKey();

            return(new string(Numbers));
        }
Exemple #17
0
        public Window1()
        {
            InitializeComponent();

            Mat src = new Mat(@"./carp.jpg");

            src = src.Resize(new Size(src.Width / 2, src.Height / 2));
            Cv2.ImShow("src", src);
            Mat gray   = new Mat();
            Mat binary = new Mat();

            Cv2.CvtColor(src, gray, ColorConversionCodes.RGB2GRAY);
            gray = gray.GaussianBlur(new Size(5, 5), 0);
            Cv2.Threshold(gray, binary, 50, 255, ThresholdTypes.Otsu | ThresholdTypes.Binary);
            var element = Cv2.GetStructuringElement(MorphShapes.Rect, new Size(3, 3));

            binary = binary.Erode(element);
            binary = binary.MorphologyEx(MorphTypes.Close, element);
            Cv2.ImShow("bin", binary);
            //var line = binary.Canny(100, 200);
            //Cv2.ImShow("line", line);
            //Cv2.WaitKey();
            //建立轮廓接受数组
            Point[][]        contours;
            HierarchyIndex[] hierarchy;
            Cv2.FindContours(binary, out contours, out hierarchy, RetrievalModes.External, ContourApproximationModes.ApproxNone);
            //最小外接矩形接收数组
            MessageBox.Show(contours.Length.ToString());
            RotatedRect[] rotateRect    = new RotatedRect[contours.Length];
            Point[][]     contours_poly = new Point[contours.Length][];
            for (int i = 0; i < contours.Length; i++)
            {
                contours_poly[i] = Cv2.ApproxPolyDP(contours[i], 30, true); //返回凸包,单线长大于30过滤

                rotateRect[i] = Cv2.MinAreaRect(contours_poly[i]);          //最小外接矩形集合

                Point2f[] pot = new Point2f[4];                             //新建点集合接收点集合

                //for (int i = 0; i < rotateRect.Length; i++)
                //{
                var angle = rotateRect[i].Angle; //矩形角度
                pot = rotateRect[i].Points();    //矩形的4个角
                var line1 = Math.Sqrt((pot[0].X - pot[1].X) * (pot[0].X - pot[1].X) + (pot[0].Y - pot[1].Y) * (pot[0].Y - pot[1].Y));
                var line2 = Math.Sqrt((pot[0].X - pot[3].X) * (pot[0].X - pot[3].X) + (pot[0].Y - pot[3].Y) * (pot[0].Y - pot[3].Y));
                //if (line1 * line2 < 1000)//过滤,太小的矩形直接pass
                //{
                //    continue;
                //}
                if (line1 > line2)//依据实际情况进行判断
                {
                    angle += 90;
                }

                Mat Roi = new Mat(src.Size(), MatType.CV_8UC3);
                Roi.SetTo(0);                                             //全黑
                Cv2.DrawContours(binary, contours, -1, Scalar.White, -1); //在二值图像中圈出轮廓区域并染白
                Cv2.ImShow("bin", binary);
                src.CopyTo(Roi, binary);                                  //将原图通过mask抠图到Roi
                Cv2.ImShow("Roi", Roi);
                Mat afterRotato = new Mat(src.Size(), MatType.CV_8UC3);
                afterRotato.SetTo(0);
                Point2f center = rotateRect[i].Center;
                Mat     M      = Cv2.GetRotationMatrix2D(center, angle, 1);                                          //计算变换矩阵
                Cv2.WarpAffine(Roi, afterRotato, M, Roi.Size(), InterpolationFlags.Linear, BorderTypes.Transparent); //得到变换后的图像,滤除其他信息
                Cv2.ImShow("旋转后", afterRotato);

                Mat bin2 = new Mat();
                Cv2.ImShow("after", afterRotato);
                Cv2.CvtColor(afterRotato, bin2, ColorConversionCodes.RGB2GRAY);
                Cv2.Threshold(bin2, bin2, 20, 255, ThresholdTypes.Binary);
                Point[][]        con;
                HierarchyIndex[] temp;//接收矫正后的轮廓信息
                Cv2.FindContours(bin2, out con, out temp, RetrievalModes.External, ContourApproximationModes.ApproxNone);
                for (int j = 0; j < con.Length; j++)
                {
                    Rect rect = Cv2.BoundingRect(con[j]); //直接使用矫正矩形,因为矫正后不需要再旋转
                    if (rect.Height * rect.Width < 8000)  //过滤干扰信息
                    {
                        continue;
                    }
                    Mat dstImg = new Mat(afterRotato, rect);

                    Cv2.ImShow("dst", dstImg);
                    dstImg.SaveImage("dst.jpg");
                    ////string name = "dst" + i;//主要看调试的时候有几个结果
                    //dstImg = dstImg.CvtColor(ColorConversionCodes.RGB2GRAY);
                    //dstImg = dstImg.Threshold(10, 255, ThresholdTypes.Otsu);
                    //Cv2.ImShow("chan", dstImg.Canny(100, 200));

                    //dstImg.FindContours(out var con1, out var hie1, RetrievalModes.External,
                    //    ContourApproximationModes.ApproxNone);
                    //dstImg.DrawContours(con1, -1, Scalar.Green, 3);
                    //Cv2.ImShow("dst2", dstImg);
                }
            }
            Cv2.WaitKey();
            Console.ReadLine();
        }
Exemple #18
0
        public Mat TrackAll(Mat source)
        {
            Mat hsv = new Mat();

            Cv2.CvtColor(source, hsv, ColorConversion.BgrToHsv);
            Mat mask  = new Mat();
            Mat mask2 = new Mat();

            // Cv2.InRange(hsv, new Scalar(35, 43, 46), new Scalar(77, 255, 255), mask);
            Cv2.InRange(hsv, new Scalar(0, 70, 50), new Scalar(10, 255, 255), mask);
            Cv2.InRange(hsv, new Scalar(170, 70, 50), new Scalar(180, 255, 255), mask2);
            mask = mask | mask2;
            Cv2.Erode(mask, mask, new Mat());
            Cv2.Dilate(mask, mask, new Mat());
            Mat res = source;

            // Cv2.BitwiseAnd(source, source, res, mask);

            Point[][]        contours;         //vector<vector<Point>> contours;
            HierarchyIndex[] hierarchyIndexes; //vector<Vec4i> hierarchy;
            Cv2.FindContours(
                mask,
                out contours,
                out hierarchyIndexes,
                ContourRetrieval.External,
                ContourChain.ApproxSimple
                );
            var contourIndex = 0;

            while ((contourIndex >= 0) && contours.Length != 0)
            {
                var contour = contours[contourIndex];

                var boundingRect = Cv2.BoundingRect(contour); //Find bounding rect for each contour
                Cv2.Rectangle(res,
                              new Point(boundingRect.X, boundingRect.Y),
                              new Point(boundingRect.X + boundingRect.Width, boundingRect.Y + boundingRect.Height),
                              new Scalar(0, 0, 255),
                              2);
                contourIndex = hierarchyIndexes[contourIndex].Next;
            }


            Cv2.InRange(hsv, new Scalar(0, 0, 221), new Scalar(180, 30, 255), mask);
            //Cv2.InRange(hsv, new Scalar(35, 43, 46), new Scalar(77, 255, 255), mask);
            Cv2.Erode(mask, mask, new Mat());
            Cv2.Dilate(mask, mask, new Mat());
            Cv2.FindContours(
                mask,
                out contours,
                out hierarchyIndexes,
                ContourRetrieval.External,
                ContourChain.ApproxSimple
                );
            contourIndex = 0;

            while ((contourIndex >= 0) && contours.Length != 0)
            {
                var contour = contours[contourIndex];

                var boundingRect = Cv2.BoundingRect(contour); //Find bounding rect for each contour
                Cv2.Rectangle(res,
                              new Point(boundingRect.X, boundingRect.Y),
                              new Point(boundingRect.X + boundingRect.Width, boundingRect.Y + boundingRect.Height),
                              new Scalar(0, 0, 255),
                              2);
                contourIndex = hierarchyIndexes[contourIndex].Next;
            }
            return(res);
        }
        static void Main(string[] args)
        {
            Mat flow, cflow, gray, prevgray, img_bgr;

            Point[][]        contours;
            HierarchyIndex[] hierarchy;
            prevgray = new Mat();

            VideoCapture cap = new VideoCapture();

            cap.Open(0);
            int sleepTime = (int)Math.Round(1000 / cap.Fps);

            using (Window window = new Window("capture"))
                using (Mat frame = new Mat()) // Frame image buffer
                {
                    while (true)
                    {
                        cap.Read(frame);
                        if (frame.Empty())
                        {
                            break;
                        }
                        gray    = new Mat();
                        flow    = new Mat();
                        cflow   = new Mat();
                        img_bgr = new Mat();
                        Cv2.CvtColor(frame, gray, ColorConversionCodes.BGR2GRAY);
                        if (prevgray.Empty())
                        {
                            prevgray = gray;
                        }
                        else
                        {
                            Cv2.CalcOpticalFlowFarneback(prevgray, gray, flow, 0.5, 5, 16, 3, 5, 1.2, OpticalFlowFlags.FarnebackGaussian);
                            Cv2.CvtColor(prevgray, cflow, ColorConversionCodes.GRAY2BGR);
                            drawOptFlowMap(ref flow, ref cflow, 1.5, 16, new Scalar(0, 0, 255));
                            drawHsv(flow, out img_bgr);
                            Mat gray_bgr = new Mat();
                            gray_bgr = Mat.Zeros(frame.Rows, frame.Cols, MatType.CV_8UC1);
                            Cv2.CvtColor(img_bgr, gray_bgr, ColorConversionCodes.BGR2GRAY);
                            Cv2.Normalize(gray_bgr, gray_bgr, 0, 255, NormTypes.MinMax, MatType.CV_8UC1);
                            Cv2.Blur(gray_bgr, gray_bgr, new Size(3, 3));

                            // Detect edges using Threshold
                            Mat img_thresh = new Mat();
                            img_thresh = Mat.Zeros(frame.Rows, frame.Cols, MatType.CV_8UC1);
                            Cv2.Threshold(gray_bgr, img_thresh, 155, 255, ThresholdTypes.BinaryInv);
                            Cv2.FindContours(img_thresh, out contours, out hierarchy, RetrievalModes.External, ContourApproximationModes.ApproxSimple);

                            if (contours.Length == 0)
                            {
                                throw new NotSupportedException("Couldn't find any object in the image.");
                            }

                            for (int i = 0; i < contours.Length; i++)
                            {
                                Rect box = Cv2.BoundingRect(contours[i]);
                                if (box.Width > 50 && box.Height > 50 && box.Width < 900 && box.Height < 680)
                                {
                                    Cv2.Rectangle(frame,
                                                  box.TopLeft, box.BottomRight,
                                                  new Scalar(0, 255, 0), 4);
                                }
                            }
                            window.Image = frame;
                            Char c = (Char)Cv2.WaitKey(1);
                            if (c == 27)
                            {
                                break;
                            }
                            Swap <Mat>(ref gray, ref prevgray);
                        }
                    }
                }
        }
Exemple #20
0
        public List <byte[]> Execute(byte[] data)
        {
            var src = Cv2.ImDecode(data, ImreadModes.Color);

            var gray = new Mat();

            Cv2.CvtColor(src, gray, ColorConversionCodes.BGRA2GRAY);

            var threshImage = new Mat();

            Cv2.Threshold(gray, threshImage, 30, ThresholdMaxVal, ThresholdTypes.BinaryInv); // Threshold to find contour

            Cv2.FindContours(
                threshImage,
                out var contours,
                out var hierarchyIndexes,
                RetrievalModes.CComp,
                ContourApproximationModes.ApproxSimple);

            if (contours.Length == 0)
            {
                throw new NotSupportedException("Couldn't find any object in the image.");
            }

            var results = new List <OrderedMatResult>();

            var contourIndex = 0;

            while (contourIndex >= 0)
            {
                var contour   = contours[contourIndex];
                var leftmostX = contour.Min(c => c.X);

                var boundingRect = Cv2.BoundingRect(contour); //Find bounding rect for each contour

                //Cv2.Rectangle(src,
                //    new Point(boundingRect.X, boundingRect.Y),
                //    new Point(boundingRect.X + boundingRect.Width, boundingRect.Y + boundingRect.Height),
                //    new Scalar(0, 0, 0),
                //    2);

                var roi = new Mat(threshImage, boundingRect); //Crop the image
                Cv2.Threshold(roi, roi, Thresh, ThresholdMaxVal, ThresholdTypes.Binary);

                const int marginFactor = 3;
                var       paddedWidth  = roi.Width * marginFactor;
                var       paddedHeight = roi.Height * marginFactor;

                var roiPadded = new Mat(paddedWidth, paddedHeight, roi.Type());
                roiPadded.SetTo(new Scalar(0, 0, 0));

                var shiftWidth  = (roiPadded.Width - roi.Width) / 2;
                var shiftHeight = (roiPadded.Height - roi.Height) / 2;

                var destRoi = new Rect(shiftWidth, shiftHeight - 2, roi.Width, roi.Height);

                roi.CopyTo(roiPadded.SubMat(destRoi));

                results.Add(new OrderedMatResult
                {
                    Data  = roiPadded,
                    Order = leftmostX
                });

                contourIndex = hierarchyIndexes[contourIndex].Next;
            }

            var ordered = results
                          .OrderBy(x => x.Order)
                          .Select(x => x.Data)
                          .ToList();

            // rotations
            if (ordered.Count == 6)
            {
                for (var i = 0; i < ordered.Count; i++)
                {
                    var current = ordered[i];

                    switch (i)
                    {
                    case 0:
                        new RotateCaptcha().Execute(current, 10);
                        break;

                    case 1:
                        new RotateCaptcha().Execute(current, -15);
                        break;

                    case 2:
                        new RotateCaptcha().Execute(current, 15);

                        // Skew
                        var w = current.Rows;
                        var h = current.Cols;

                        var wMid = w / 2;
                        var hMid = h / 2;

                        var srcTri = new[]
                        {
                            new Point2f(0, hMid),
                            new Point2f(w, hMid),
                            new Point2f(0, h)
                            //new Point2f(w, h)
                        };

                        var dstTri = new[]
                        {
                            new Point2f(0, hMid),
                            new Point2f(w, hMid),
                            new Point2f(-w / 10f, h)
                            //new Point2f(w / 20f, h)
                        };

                        // Get the Affine Transform
                        var warpMat = Cv2.GetAffineTransform(srcTri, dstTri);

                        Cv2.WarpAffine(current, current, warpMat, current.Size(), InterpolationFlags.Area);

                        //Cv2.ImShow("sharpen", current);
                        //Cv2.WaitKey();

                        break;

                    case 3:
                        new RotateCaptcha().Execute(current, -15);
                        break;

                    case 4:
                        new RotateCaptcha().Execute(current, 5);
                        break;

                    case 5:
                        new RotateCaptcha().Execute(current, -15);
                        break;
                    }


                    Cv2.GaussianBlur(current, current, new Size(), 0.001);
                    Cv2.AddWeighted(current, 1, current, 1, 0, current);

                    Cv2.Threshold(current, current, 10, ThresholdMaxVal, ThresholdTypes.BinaryInv);

                    Cv2.Resize(current, current, new Size(300, 300), 0, 0, InterpolationFlags.Cubic);

                    //Cv2.ImShow("sharpen", current);
                    //Cv2.WaitKey();
                }
            }

            var datas = ordered
                        .Select(x => x.ImEncode(".jpg"))
                        .ToList();

            return(datas);
        }
        private static string detectBarcode(string fileName, double thresh, bool debug = false, double rotation = 0)
        {
            Console.WriteLine("\nProcessing: {0}", fileName);

            // load the image and convert it to grayscale
            var image = new Mat(fileName);

            if (rotation != 0)
            {
                rotateImage(image, image, rotation, 1);
            }

            if (debug)
            {
                Cv2.ImShow("Source", image);
                Cv2.WaitKey(1); // do events
            }

            var gray     = new Mat();
            var channels = image.Channels();

            if (channels > 1)
            {
                Cv2.CvtColor(image, gray, ColorConversion.BgrToGray);
            }
            else
            {
                image.CopyTo(gray);
            }


            // compute the Scharr gradient magnitude representation of the images
            // in both the x and y direction
            var gradX = new Mat();

            Cv2.Sobel(gray, gradX, MatType.CV_32F, xorder: 1, yorder: 0, ksize: -1);
            //Cv2.Scharr(gray, gradX, MatType.CV_32F, xorder: 1, yorder: 0);

            var gradY = new Mat();

            Cv2.Sobel(gray, gradY, MatType.CV_32F, xorder: 0, yorder: 1, ksize: -1);
            //Cv2.Scharr(gray, gradY, MatType.CV_32F, xorder: 0, yorder: 1);

            // subtract the y-gradient from the x-gradient
            var gradient = new Mat();

            Cv2.Subtract(gradX, gradY, gradient);
            Cv2.ConvertScaleAbs(gradient, gradient);

            if (debug)
            {
                Cv2.ImShow("Gradient", gradient);
                Cv2.WaitKey(1); // do events
            }


            // blur and threshold the image
            var blurred = new Mat();

            Cv2.Blur(gradient, blurred, new Size(9, 9));

            var threshImage = new Mat();

            Cv2.Threshold(blurred, threshImage, thresh, 255, ThresholdType.Binary);

            if (debug)
            {
                Cv2.ImShow("Thresh", threshImage);
                Cv2.WaitKey(1); // do events
            }


            // construct a closing kernel and apply it to the thresholded image
            var kernel = Cv2.GetStructuringElement(StructuringElementShape.Rect, new Size(21, 7));
            var closed = new Mat();

            Cv2.MorphologyEx(threshImage, closed, MorphologyOperation.Close, kernel);

            if (debug)
            {
                Cv2.ImShow("Closed", closed);
                Cv2.WaitKey(1); // do events
            }


            // perform a series of erosions and dilations
            Cv2.Erode(closed, closed, null, iterations: 4);
            Cv2.Dilate(closed, closed, null, iterations: 4);

            if (debug)
            {
                Cv2.ImShow("Erode & Dilate", closed);
                Cv2.WaitKey(1); // do events
            }


            //find the contours in the thresholded image, then sort the contours
            //by their area, keeping only the largest one

            Point[][]       contours;
            HiearchyIndex[] hierarchyIndexes;
            Cv2.FindContours(
                closed,
                out contours,
                out hierarchyIndexes,
                mode: ContourRetrieval.CComp,
                method: ContourChain.ApproxSimple);

            if (contours.Length == 0)
            {
                throw new NotSupportedException("Couldn't find any object in the image.");
            }

            var contourIndex       = 0;
            var previousArea       = 0;
            var biggestContourRect = Cv2.BoundingRect(contours[0]);

            while ((contourIndex >= 0))
            {
                var contour = contours[contourIndex];

                var boundingRect     = Cv2.BoundingRect(contour); //Find bounding rect for each contour
                var boundingRectArea = boundingRect.Width * boundingRect.Height;
                if (boundingRectArea > previousArea)
                {
                    biggestContourRect = boundingRect;
                    previousArea       = boundingRectArea;
                }

                contourIndex = hierarchyIndexes[contourIndex].Next;
            }


            /*biggestContourRect.Width += 10;
             * biggestContourRect.Height += 10;
             * biggestContourRect.Left -= 5;
             * biggestContourRect.Top -= 5;*/


            var barcode = new Mat(image, biggestContourRect); //Crop the image

            Cv2.CvtColor(barcode, barcode, ColorConversion.BgrToGray);

            Cv2.ImShow("Barcode", barcode);
            Cv2.WaitKey(1); // do events

            var barcodeClone = barcode.Clone();
            var barcodeText  = getBarcodeText(barcodeClone);

            if (string.IsNullOrWhiteSpace(barcodeText))
            {
                Console.WriteLine("Enhancing the barcode...");
                //Cv2.AdaptiveThreshold(barcode, barcode, 255,
                //AdaptiveThresholdType.GaussianC, ThresholdType.Binary, 9, 1);
                //var th = 119;
                var th = 100;
                Cv2.Threshold(barcode, barcode, th, 255, ThresholdType.ToZero);
                Cv2.Threshold(barcode, barcode, th, 255, ThresholdType.Binary);
                barcodeText = getBarcodeText(barcode);
            }

            Cv2.Rectangle(image,
                          new Point(biggestContourRect.X, biggestContourRect.Y),
                          new Point(biggestContourRect.X + biggestContourRect.Width, biggestContourRect.Y + biggestContourRect.Height),
                          new Scalar(0, 255, 0),
                          2);

            if (debug)
            {
                Cv2.ImShow("Segmented Source", image);
                Cv2.WaitKey(1); // do events
            }

            Cv2.WaitKey(0);
            Cv2.DestroyAllWindows();

            return(barcodeText);
        }
        /// <summary>
        /// Apply the given warps to a specified image and return the warped result.
        /// </summary>
        /// <param name="source">The source image to warp</param>
        /// <param name="width">The width of the destination image</param>
        /// <param name="height">The height of the destination image</param>
        /// <param name="warps">The warps to apply</param>
        /// <returns>The warped image</returns>
        public static Mat ApplyWarps(this Mat source, int width, int height, IEnumerable <Warp> warps)
        {
            // set up opencv images for the replacement image and the output
            var destination = new Mat(height, width, MatType.CV_8UC3);

            destination.SetTo(0);

            // process all warps
            foreach (var warp in warps)
            {
                var t1 = warp.Source.ToPoint2f();
                var t2 = warp.Destination.ToPoint2f();

                // get bounding rects around source and destination triangles
                var r1 = Cv2.BoundingRect(t1);
                var r2 = Cv2.BoundingRect(t2);

                // crop the input image to r1
                using (var img1Cropped = new Mat(r1.Size, source.Type()))
                {
                    var temp = new Mat(source, r1);
                    temp.CopyTo(img1Cropped);

                    // adjust triangles to local coordinates within their bounding box
                    for (int i = 0; i < t1.Length; i++)
                    {
                        t1[i].X -= r1.Left;
                        t1[i].Y -= r1.Top;
                        t2[i].X -= r2.Left;
                        t2[i].Y -= r2.Top;
                    }

                    // get the transformation matrix to warp t1 to t2
                    using (var transform = Cv2.GetAffineTransform(t1, t2))
                    {
                        // warp triangle
                        using (var img2Cropped = new Mat(r2.Height, r2.Width, img1Cropped.Type()))
                        {
                            Cv2.WarpAffine(img1Cropped, img2Cropped, transform, img2Cropped.Size());

                            // create a mask in the shape of the t2 triangle
                            var hull = t2.Select(p => new Point(p.X, p.Y));
                            using (var mask = new Mat(r2.Height, r2.Width, MatType.CV_8UC3))
                            {
                                mask.SetTo(0);
                                Cv2.FillConvexPoly(mask, hull, new Scalar(1, 1, 1), LineTypes.Link8, 0);

                                // alpha-blend the t2 triangle - this sets all pixels outside the triangle to zero
                                Cv2.Multiply(img2Cropped, mask, img2Cropped);

                                // cut the t2 triangle out of the destination image
                                using (var target = new Mat(destination, r2))
                                {
                                    Cv2.Multiply(target, new Scalar(1, 1, 1) - mask, target);

                                    // insert the t2 triangle into the destination image
                                    Cv2.Add(target, img2Cropped, target);
                                }
                            }
                        }
                    }
                }
            }

            // return the destination image
            return(destination);
        }
        private Mat FindContoursAndDraw(Bitmap originalMap, string objectName, int minArea = 500, int maxArea = 10000)
        {
            //var houghBitmap = HoughTransform(originalMap);
            //var invertedHoughBitmap = InvertImage(houghBitmap);
            Mat originalMat = BitmapConverter.ToMat(originalMap);
            //Mat invertedHoughMat = BitmapConverter.ToMat(invertedHoughBitmap);
            Mat blackWhiteMat = new Mat();
            Mat edgesMat      = new Mat();

            Cv2.CvtColor(originalMat, blackWhiteMat, ColorConversionCodes.BGRA2GRAY);
            if (MapObjectsColors.GetInstance().Tight.Contains(objectName))
            {
                Bitmap edgesMap = BitmapConverter.ToBitmap(blackWhiteMat);
                edgesMap = ImageFilter.SobelFilter(edgesMap, grayscale: true);
                edgesMat = BitmapConverter.ToMat(edgesMap);
                Cv2.CvtColor(edgesMat, edgesMat, ColorConversionCodes.BGRA2GRAY);
            }
            else
            {
                Cv2.Canny(blackWhiteMat, edgesMat, 50, 100);
            }


            OpenCvSharp.Point[][] contours;
            HierarchyIndex[]      hierarchyIndexes;
            Cv2.FindContours(
                edgesMat,
                out contours,
                out hierarchyIndexes,
                mode: RetrievalModes.CComp,
                method: ContourApproximationModes.ApproxSimple);



            var componentCount = 0;
            var contourIndex   = 0;
            var objectDict     = mapObjects.getObjectDictionary();

            if (contours.Length != 0)
            {
                if (objectDict.ContainsKey(objectName))
                {
                    objectDict[objectName] = contours;
                }
                else
                {
                    objectDict.Add(objectName, contours);
                }
                while ((contourIndex >= 0))
                {
                    var contour          = contours[contourIndex];
                    var boundingRect     = Cv2.BoundingRect(contour);
                    var boundingRectArea = boundingRect.Width * boundingRect.Height;
                    var ca  = Cv2.ContourArea(contour) * Convert.ToDouble(scaleBox.SelectedItem) / 100;
                    var cal = Cv2.ArcLength(contour, closed: true) * Convert.ToDouble(scaleBox.SelectedItem) / 100;

                    //if (boundingRectArea > minArea)
                    //{

                    Cv2.PutText(originalMat, $"A:{ca.ToString("#.##")} km2", new OpenCvSharp.Point(boundingRect.X, boundingRect.Y + 10), HersheyFonts.HersheyPlain, 1, Scalar.White, 1);
                    Cv2.PutText(originalMat, $"L:{cal.ToString("#.##")} km", new OpenCvSharp.Point(boundingRect.X, boundingRect.Y + 25), HersheyFonts.HersheyPlain, 1, Scalar.White, 1);


                    //}


                    //Cv2.DrawContours(
                    //    originalMat,
                    //    contours,
                    //    contourIndex,
                    //    color: Scalar.All(componentCount + 1),
                    //    thickness: -1,
                    //    lineType: LineTypes.Link8,
                    //    hierarchy: hierarchyIndexes,
                    //    maxLevel: int.MaxValue);

                    componentCount++;


                    contourIndex = hierarchyIndexes[contourIndex].Next;
                }
            }

            return(originalMat);
        }
Exemple #24
0
        //展示字符
        private void ProcessAndShowChars(Bitmap plate)
        {
            currentTabCount = 0;

            Mat matIn = plate.ToMat();

            AddTag("原图", matIn);

            // matIn = Utilities.GammaTransform(matIn);
            //AddTag("gamma增强", matIn);

            //matIn = Utilities.IndexTransform(matIn);
            //AddTag("指数增强", matIn);

            //matIn = Utilities.LogTransform(matIn);
            //AddTag("指数增强", matIn);

            //matIn = Utilities.LaplaceTransform(matIn);
            //AddTag("拉普拉斯",matIn);

            Mat matGray = matIn.CvtColor(ColorConversionCodes.RGB2GRAY);

            AddTag("灰度图", matGray);

            PlateColor plateColor = CharSegement.GetPlateColor(matIn);
            Mat        matClear   = CharSegement.ClearMaodingAndBorder(matGray, plateColor);

            AddTag("去边框与铆钉", matClear);

            //找轮廓
            OpenCvSharp.Point[][] contours         = null;
            HierarchyIndex[]      hierarchyIndices = null;
            matClear.FindContours(out contours, out hierarchyIndices, RetrievalModes.External,
                                  ContourApproximationModes.ApproxNone);

            //求轮廓外接最小矩形
            List <Rect> rects       = new List <Rect>();
            Mat         matContours = matIn.Clone();

            for (int index = 0; index < contours.Length; index++)
            {
                Rect rect = Cv2.BoundingRect(contours[index]);
                if (CharSegement.VerifyRect(rect) &&
                    CharSegement.NotOnBorder(rect, matIn.Size()))
                {
                    rects.Add(rect);
                    Cv2.Rectangle(matContours, rect, new Scalar(0, 0, 255), 1);
                }
            }
            AddTag("外接矩形", matContours);

            //去除内部矩形
            Mat matInner = matIn.Clone();

            rects = CharSegement.RejectInnerRectFromRects(rects);
            for (int index = 0; index < rects.Count; index++)
            {
                Cv2.Rectangle(matInner, rects[index], new Scalar(0, 0, 255), 1);
            }
            AddTag("去内部矩形", matInner);



            //调整矩形大小
            Mat matAdjust = matIn.Clone();

            rects = CharSegement.AdjustRects(rects);
            for (int index = 0; index < rects.Count; index++)
            {
                Cv2.Rectangle(matAdjust, rects[index], new Scalar(0, 0, 255), 1);
            }
            AddTag("调整大小", matAdjust);

            //得到安全矩形
            rects = CharSegement.GetSafeRects(matIn, rects);;

            //展示切割结果
            ShowSpliteImage(rects, matIn);
        }
Exemple #25
0
        private static void Morph(List <Point2f> srcFace, List <Point2f> dstFace, float alpha, Mat morphImage, Mat srcImg,
                                  Mat dstImg)
        {
            var averageFace = srcFace
                              .Zip(dstFace, (p1, p2) => new Point2f(
                                       (1 - alpha) * p1.X + alpha * p2.X,
                                       (1 - alpha) * p1.Y + alpha * p2.Y
                                       ))
                              .ToList();
            var rect        = new Rect(0, 0, morphImage.Cols, morphImage.Rows);
            var indexesList = GetDelaunayTriangulationIndexes(rect, averageFace);

            foreach (var indexes in indexesList)
            {
                var srcTri  = indexes.Select(i => srcFace[i]).ToList();
                var dstTri  = indexes.Select(i => dstFace[i]).ToList();
                var avgTri  = indexes.Select(i => averageFace[i]).ToList();
                var srcRect = Cv2.BoundingRect(srcTri).Intersect(new Rect(0, 0, srcImg.Width, srcImg.Height));
                var dstRect = Cv2.BoundingRect(dstTri).Intersect(new Rect(0, 0, dstImg.Width, dstImg.Height));
                var avgRect = Cv2.BoundingRect(avgTri).Intersect(new Rect(0, 0, morphImage.Width, morphImage.Height));

                var srcOffsetRect    = new List <Point2f>();
                var dstOffsetRect    = new List <Point2f>();
                var avgOffsetRect    = new List <Point2f>();
                var avgOffsetRectInt = new List <Point>();
                for (var i = 0; i < 3; i++)
                {
                    srcOffsetRect.Add(new Point2f(srcTri[i].X - srcRect.X, srcTri[i].Y - srcRect.Y));
                    dstOffsetRect.Add(new Point2f(dstTri[i].X - dstRect.X, dstTri[i].Y - dstRect.Y));
                    avgOffsetRect.Add(new Point2f(avgTri[i].X - avgRect.X, avgTri[i].Y - avgRect.Y));
                    avgOffsetRectInt.Add(new Point(avgTri[i].X - avgRect.X, avgTri[i].Y - avgRect.Y));
                }

                using (var mask = Mat.Zeros(avgRect.Height, avgRect.Width, MatType.CV_32FC3).ToMat())
                {
                    var scalar = new Scalar(1, 1, 1);
                    Cv2.FillConvexPoly(mask, avgOffsetRectInt, scalar, LineTypes.AntiAlias);
                    var srcImgRect = new Mat();
                    var dstImgRect = new Mat();
                    srcImg[srcRect].CopyTo(srcImgRect);
                    dstImg[dstRect].CopyTo(dstImgRect);

                    void ApplyAffineTransform(Mat warpImage, Mat imgRect, List <Point2f> offsetRect)
                    {
                        using (var warpMat = Cv2.GetAffineTransform(offsetRect, avgOffsetRect))
                        {
                            Cv2.WarpAffine(imgRect, warpImage, warpMat, warpImage.Size(),
                                           InterpolationFlags.Linear,
                                           BorderTypes.Reflect101);
                        }
                    }

                    using (var warpImage1 = Mat.Zeros(avgRect.Height, avgRect.Width, srcImgRect.Type()).ToMat())
                        using (var warpImage2 = Mat.Zeros(avgRect.Height, avgRect.Width, dstImgRect.Type()).ToMat())
                        {
                            ApplyAffineTransform(warpImage1, srcImgRect, srcOffsetRect);
                            ApplyAffineTransform(warpImage2, dstImgRect, dstOffsetRect);

                            using (var avgImgRect = ((1.0 - alpha) * warpImage1 + alpha * warpImage2).ToMat())
                            {
                                Cv2.Multiply(avgImgRect, mask, avgImgRect);
                                Cv2.Multiply(morphImage[avgRect], scalar - mask, morphImage[avgRect]);
                                morphImage[avgRect] = morphImage[avgRect] + avgImgRect;
                            }
                        }
                }
            }
        }
Exemple #26
0
        public static BloodCardOrcNumber GetOrcNumber(Mat img, int x, int y, int w, int h, int s1, int s2, int index)
        {
            List <OpenCvSharp.Rect> rects_list   = new List <OpenCvSharp.Rect>();
            BloodCardOrcNumber      bloodcardorc = new BloodCardOrcNumber();

            img = img.Clone() * (s2 / 1000);
            img = img.CvtColor(ColorConversionCodes.BGR2GRAY);
            Mat img_gary = null;

            for (int k = 0; k < 5; k++)
            {
                rects_list.Clear();
                img_gary = img.Threshold(s1 + (k - 2) * 3, 255, ThresholdTypes.Binary);
                img_gary = img_gary.SubMat(y, y + h, x, x + w);
                bloodcardorc.full_img = img_gary.Clone();
                OpenCvSharp.Point[][] contours;
                HierarchyIndex[]      hierarchyIndexes;
                img_gary.FindContours(out contours, out hierarchyIndexes, RetrievalModes.CComp, ContourApproximationModes.ApproxSimple);
                for (int i = 0; i < contours.Length; i++)
                {
                    var rect = Cv2.BoundingRect(contours[i]);
                    if (rect.Width > 10 && rect.Width < 50 && rect.Height > 10 && rect.Height < 50)
                    {
                        var numberblock = img_gary.SubMat(rect.Top, rect.Bottom, rect.Left, rect.Right);
                        numberblock = numberblock.Resize(new OpenCvSharp.Size(28, 28));
                        var pixmean = numberblock.Mean();
                        if (pixmean.Val0 < 180)
                        {
                            rects_list.Add(rect);
                        }
                    }
                }
                if (rects_list.Count() == 20)
                {
                    break;
                }
            }


            ////排序
            List <Mat> number_list = new List <Mat>();
            Mat        mergeimg    = new Mat();

            rects_list.Sort((a, b) => a.Left.CompareTo(b.Left));
            for (int i = 0; i < rects_list.Count; i++)
            {
                var rect        = rects_list[i];
                var numberblock = img_gary.SubMat(rect.Top, rect.Bottom, rect.Left, rect.Right);
                numberblock = numberblock.Resize(new OpenCvSharp.Size(28, 28));
                var pixmean = numberblock.Mean();
                if (pixmean.Val0 < 180)
                {
                    number_list.Add(numberblock);
                    mergeimg = Tools.MergeImage(mergeimg, numberblock);
                }
            }
            if (mergeimg.Width != 0 && mergeimg.Height != 0)
            {
                bloodcardorc.number_img = mergeimg.Clone();
            }
            else
            {
                bloodcardorc.number_img = null;
            }
            bloodcardorc.numberstr = Tools.CheckOrc(number_list, index);
            return(bloodcardorc);
        }
Exemple #27
0
        public static void type2()
        {
            Mat src = Cv2.ImRead(@"D:\OpenCV\ING\curtainwall\aaaaa.png");
            //src=Findarea(src);
            //Window.WaitKey();
            Mat hsv = new Mat(), threshold = new Mat();
            Mat dst = new Mat(), dst2 = new Mat();
            //Scalar HsvRedLow = new Scalar(0, 40, 40);
            //Scalar HsvRedHigh = new Scalar(40, 255, 255);
            //Scalar HsvGreenLow = new Scalar(41, 40, 40);
            //Scalar HsvGreenHigh = new Scalar(90, 255, 255);
            //Scalar HsvBlueLow = new Scalar(100, 40, 40);
            //Scalar HsvBlueHigh = new Scalar(140, 255, 255);
            Scalar HsvRedLow    = new Scalar(0, 0, 1);     //白 white
            Scalar HsvRedHigh   = new Scalar(0.1156, 0.2480, 0.9804);
            Scalar HsvGreenLow  = new Scalar(0, 0, 46);    //灰 gray
            Scalar HsvGreenHigh = new Scalar(180, 43, 220);
            Scalar HsvBlueLow   = new Scalar(100, 43, 46); //蓝 blue
            Scalar HsvBlueHigh  = new Scalar(124, 255, 255);

            Cv2.CvtColor(src, hsv, ColorConversionCodes.BGR2HSV);
            Cv2.NamedWindow("hsv", 0);
            Cv2.ResizeWindow("hsv", 500, 500);
            Cv2.ImShow("hsv", hsv);
            Scalar[]         HsvLow    = { HsvRedLow, HsvGreenLow, HsvBlueLow };
            Scalar[]         HsvHigh   = { HsvRedHigh, HsvGreenHigh, HsvBlueHigh };
            string[]         textcolor = { "White", "Gray", "Blue" };
            Point[][]        contours;  //= Findarea(src);
            HierarchyIndex[] hierarchy; // = hierarchys;
            for (int color = 0; color < 3; color++)
            {
                Cv2.InRange(hsv, HsvLow[color], HsvHigh[color], threshold);
                Cv2.Threshold(threshold, threshold, 1, 255, ThresholdTypes.Binary);
                Cv2.CopyMakeBorder(threshold, dst, 1, 1, 1, 1, BorderTypes.Constant, 0);

                Mat a = Cv2.GetStructuringElement(MorphShapes.Ellipse, new Size(5, 5));//5.23
                Cv2.MorphologyEx(dst, dst, MorphTypes.Open, a);
                //dst = threshold;

                //Cv2.CvtColor(src,dst2,ColorConversionCodes.BGR2GRAY);
                //Cv2.Threshold(dst2,dst2,100,255,ThresholdTypes.Binary);
                //Cv2.FindContours(dst2, out contours, out hierarchy, RetrievalModes.Tree, ContourApproximationModes.ApproxSimple);
                //Cv2.ImShow("ss",dst2);
                Cv2.NamedWindow(textcolor[color] + "轮廓", 0);
                Cv2.ResizeWindow(textcolor[color] + "轮廓", 500, 500);
                Cv2.ImShow(textcolor[color] + "轮廓", dst);
                Cv2.FindContours(dst, out contours, out hierarchy, RetrievalModes.Tree, ContourApproximationModes.ApproxSimple);
                //Console.WriteLine(contours.Length);
                int count = 0;
                for (int i = 0; i < contours.Length; i++)
                {
                    if (contours[i].Length < 200)
                    {
                        continue;
                    }
                    Rect bound = Cv2.BoundingRect(contours[i]);
                    //Cv2.DrawContours(src, contours, i, Scalar.Black, 1, LineTypes.Link8, hierarchy, 0, new Point(0, 0));
                    Point center = new Point(bound.X + bound.Width / 2, bound.Y + bound.Height / 2);

                    //int[] bc = { center.X, center.Y };
                    //char x = threshold.At<char>(bc);
                    //Console.WriteLine(x);
                    if (true)
                    {
                        count++;
                        Cv2.PutText(src, textcolor[color], center, HersheyFonts.HersheyComplex, 1, Scalar.Black, 1, LineTypes.Link8);
                        Cv2.Circle(src, center, 10, Scalar.Gray);
                    }
                }
                Console.WriteLine(textcolor[color] + "区域数量:" + count);
            }
            Cv2.NamedWindow("result", 0);
            Cv2.ResizeWindow("result", 500, 500);
            Cv2.ImShow("result", src);
            Window.WaitKey();
        }
Exemple #28
0
        // TODO: supports left-to-right image
        public static int[] DetectFolds(string backFilename, out double score)
        {
            score = -1;

            var matSrc = new Mat(backFilename);

            int blur = matSrc.Width / BlurWidthDivider;

            if (blur % 2 == 0)
            {
                blur++;
            }

            var mat = matSrc.CvtColor(ColorConversion.BgrToGray)
                      .AdaptiveThreshold(255, AdaptiveThresholdType.MeanC, ThresholdType.Binary, blur, 1);

            mat = ~mat;

            Cv2.Resize(mat, mat, new Size(matSrc.Width, 1), interpolation: Interpolation.Area);
            mat = mat.MedianBlur(blur).Threshold(0, 255, OpenCvSharp.ThresholdType.Otsu);
            Cv2.Resize(mat, mat, new Size(matSrc.Width, matSrc.Height), interpolation: Interpolation.Cubic);

            var rects = mat
                        .FindContoursAsArray(ContourRetrieval.External, ContourChain.ApproxSimple)
                        .Select(x => Cv2.BoundingRect(x))
                        .OrderBy(x => x.X)
                        .ToArray();

            if (rects.Length < 4)
            {
                throw new FoldDetectException(string.Format("Too few folds: {0}", rects.Length));
            }

            var combinations = getCombinations(rects, 4).ToArray();

            var rectSets = combinations.Select(_rectCands =>
            {
                var rectCands = _rectCands.OrderBy(x => x.X).ToArray();

                var imageWidth  = (double)matSrc.Width;
                var widthRatios = rectCands.Select(r => new {
                    Left  = r.X / imageWidth,
                    Right = (r.X + r.Width) / imageWidth,
                    Width = r.Width / imageWidth
                }).ToArray();

                // TODO: zero division check
                double leftThin  = widthRatios[0].Left;
                double rightThin = 1 - widthRatios[3].Right;
                double thinRatio = leftThin / (leftThin + rightThin);

                double leftFat  = widthRatios[1].Left - widthRatios[0].Right;
                double rightFat = widthRatios[3].Left - widthRatios[2].Right;
                double fatRatio = leftFat / (leftFat + rightFat);

                double thinFatDistance = Math.Sqrt(Math.Pow(fatRatio - 0.5, 2) + Math.Pow(thinRatio - 0.5, 2));
                return(new { Rects = rectCands, WidthRatios = widthRatios, ThinFatDistance = thinFatDistance, ThinRatio = thinRatio, FatRatio = fatRatio, });
            });

            rectSets = rectSets.Where(r => r.WidthRatios.All(x => x.Width < FoldWidthThreshold)).ToArray();
            if (!rectSets.Any())
            {
                throw new FoldDetectException("All fold rects are too fat.");
            }

            var bestMatchRectSet = rectSets.OrderBy(x => x.ThinFatDistance).First();

            if (bestMatchRectSet.ThinFatDistance > PermissiveRatioDistance)
            {
                throw new FoldDetectException(string.Format("Matched rect is not found (score: {0})", bestMatchRectSet.ThinFatDistance));
            }
            score = bestMatchRectSet.ThinFatDistance;

            int fold0 = bestMatchRectSet.Rects[0].Right;
            int fold1 = bestMatchRectSet.Rects[1].Right;
            int fold2 = bestMatchRectSet.Rects[3].Right;

            return(new int[] { fold0, fold1, fold2 });
        }
Exemple #29
0
        private List <OpenCvSharp.Rect> do_cv(Bitmap crop)
        {
            // convert the given image to an openCV mat(erial)
            var image = OpenCvSharp.Extensions.BitmapConverter.ToMat(crop);
            //new CvWindowEx(image);

            // apply some image filters to the given image to improve CV operations
            var gray = new Mat();

            Cv2.CvtColor(image, gray, ColorConversionCodes.BGR2GRAY);
            var gray2 = new Mat();

            Cv2.BilateralFilter(gray, gray2, 10, 17, 17);
            var edged = new Mat();

            Cv2.Canny(gray, edged, 80, 200);
            //new CvWindowEx(edged);

            // dilate / thicken the shapes in the given image
            var dilated = new Mat();

            Cv2.Dilate(edged, dilated, null);
            //new CvWindowEx(edged);

            //
            // use openCV to compute 'contours' (or boxes) around 'features' in
            // in the area of interest (the cropped screenshot)
            //

            OpenCvSharp.Point[][]        contours;
            OpenCvSharp.HierarchyIndex[] hierarchy;
            Cv2.FindContours(dilated, out contours, out hierarchy, RetrievalModes.Tree, ContourApproximationModes.ApproxSimple);

            //----------------------------------------------------------------
            //          OBJECT CONTOUR / CLICK TARGET FILTRATION
            //----------------------------------------------------------------

            var filteredContours = new List <OpenCvSharp.Rect>();

            foreach (var c in contours)
            {
                var    contourRect = Cv2.BoundingRect(c);
                double aspectRatio = contourRect.Width / contourRect.Height;

                // discard contours that look like 'lines' such as the edge of a window
                if ((aspectRatio > 3 || aspectRatio < 0.2) && (contourRect.Height < 8 || contourRect.Width < 8))
                {
                    //Cv2.Rectangle(image, contourRect, new Scalar(0, 0, 255, 255));
                    Console.WriteLine("Bad ratio... " + aspectRatio);
                    continue;
                }

                // discard contours deemed 'too large'
                if (contourRect.Width * contourRect.Height > 6000)
                {
                    //Cv2.Rectangle(image, contourRect, new Scalar(255, 0, 0, 255));
                    Console.WriteLine("Bad size... " + contourRect.Width);
                    continue;
                }

                filteredContours.Add(contourRect);
                //Cv2.Rectangle(image, contourRect, new Scalar(255, 255, 0, 255));
            }
            //new CvWindowEx(image);

            //----------------------------------------------------------------
            //          CLICK TARGET COORDINATE TRANSLATION
            //----------------------------------------------------------------

            // TODO we should probably move this outside of this function

            var transRects = new List <Rect>();
            var goodRects  = NonMaxSuppression(filteredContours, 0.3f);

            Console.WriteLine(goodRects.Count);

            foreach (var rect in goodRects)
            {
                Cv2.Rectangle(image, rect, new Scalar(0, 255, 0, 255));
                transRects.Add(new Rect(ScreenGazeSnapshot.m_GazeRect.X + rect.X, ScreenGazeSnapshot.m_GazeRect.Y + rect.Y, rect.Width, rect.Height));
            }
            //new CvWindowEx(image);

            // save the 'rendered' cv results incase we want to dump it to disk later
            m_LastScreenCrop   = crop;
            m_LastScreenCropCV = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(image);

            // return the results
            return(transRects);
        }
Exemple #30
0
        private void dealimage(String path, String savepath)
        {
            Mat    result = Cv2.ImRead(path);
            Scalar color  = new Scalar(0, 0, 0);

            Cv2.CopyMakeBorder(result, result, 10, 10, 10, 10, BorderTypes.Constant, color);

            Mat outp = new Mat();

            Cv2.CvtColor(result, outp, ColorConversionCodes.BGR2GRAY);

            Mat thresh = new Mat();

            Cv2.Threshold(outp, thresh, 0, 255, ThresholdTypes.Binary);

            /* Cv2.ImShow("2", thresh);
             * Cv2.WaitKey(-1);*/
            OpenCvSharp.Point[][] counts;
            HierarchyIndex[]      hierarchyIndices;
            Cv2.FindContours(thresh.Clone(), out counts, out hierarchyIndices, RetrievalModes.External, ContourApproximationModes.ApproxSimple);
            double max = 0;

            OpenCvSharp.Point[] point = null;
            foreach (var count in counts)
            {
                if (max < Cv2.ContourArea(count))
                {
                    point = count;
                    max   = Cv2.ContourArea(count);
                }
            }
            Console.WriteLine(thresh.Rows);
            Console.WriteLine(thresh.Cols);
            /*int** mask = new int[][];*/
            Rect rect = Cv2.BoundingRect(point);
            Mat  mat  = Mat.Zeros(thresh.Rows, thresh.Cols, thresh.Type());

            Cv2.Rectangle(mat, rect.TopLeft, rect.BottomRight, 255, -1);
            Mat minRect = mat.Clone();
            Mat sub     = mat.Clone();

            while (Cv2.CountNonZero(sub) > 0)
            {
                Cv2.Erode(minRect, minRect, null);
                Cv2.Subtract(minRect, thresh, sub);
            }
            Cv2.FindContours(minRect.Clone(), out counts, out hierarchyIndices, RetrievalModes.External, ContourApproximationModes.ApproxSimple);
            max = 0;
            foreach (var count in counts)
            {
                if (max < Cv2.ContourArea(count))
                {
                    point = count;
                    max   = Cv2.ContourArea(count);
                }
            }
            rect     = Cv2.BoundingRect(point);
            result   = new Mat(result, rect);
            savepath = savepath + "/" + "result.jpg";
            Cv2.ImWrite(savepath, result);
            try
            {
                pictureBox1.Image = Image.FromFile(savepath);
            }
            catch (Exception e)
            {
            }
            MessageBox.Show("拼接成功");
        }