Пример #1
3
        public void FilterTiles(Mat image, Mat modifiedMat)
        {
            CvInvoke.Imshow("0", image);
            
            Stopwatch sw1 = new Stopwatch();
            sw1.Start();
            Mat laplaced = new Mat();
            CvInvoke.CvtColor(image, laplaced, Emgu.CV.CvEnum.ColorConversion.Bgr2Gray);
            Mat greyResult = laplaced.Clone();
            Mat greySource = laplaced.Clone();

            Mat cannySrc = new Mat();

            //if not half inch, do canny and subtract to separate tiles better. Basically "sharpens" the edge
            if (scan.TileSettings.CannyEdges)
            {
                //create canny image, these parameters could be adjusted probably?
                CvInvoke.Canny(greySource, greyResult, 50, 150);
                //dilate canny                

                CvInvoke.Dilate(greyResult, greyResult, null, new System.Drawing.Point(1, 1), scan.TileSettings.CannyDilate, BorderType.Default, CvInvoke.MorphologyDefaultBorderValue);
                CvInvoke.Erode(greyResult, greyResult, null, new System.Drawing.Point(1, 1), scan.TileSettings.CannyDilate, BorderType.Default, CvInvoke.MorphologyDefaultBorderValue);
                
                CvInvoke.Imshow("1a", greyResult);

                //subtract dilated canny from source to get separation
                CvInvoke.Subtract(greySource, greyResult, greyResult);
                greySource = greyResult.Clone();
                CvInvoke.Imshow("1b", greyResult);
            }

            if (scan.TileSettings.ThresholdEdges)
            {
                Mat edges = new Mat();
                CvInvoke.Threshold(greyResult, edges, (float)thresholdTrackbar.Value, 0, ThresholdType.ToZero);
                CvInvoke.Subtract(greySource, edges, greyResult);
                CvInvoke.Erode(greyResult, greyResult, null, new System.Drawing.Point(1, 1), 2, BorderType.Default, CvInvoke.MorphologyDefaultBorderValue);
                CvInvoke.Imshow("pres-1c", greyResult);
             }
            //perform distance transform
            CvInvoke.DistanceTransform(greyResult, greyResult, null, DistType.L2, 5);
            //normalize the image to bring out the peaks
            CvInvoke.Normalize(greyResult, greyResult, 0, 1, NormType.MinMax);
            CvInvoke.Imshow("2", greyResult);

            //threshold the image, different thresholds for different tiles

            CvInvoke.Threshold(greyResult, greyResult, scan.TileSettings.ThresholdVal, 1, ThresholdType.Binary);

            CvInvoke.Imshow("3", greyResult);

            //erode to split the blobs
            CvInvoke.Erode(greyResult, greyResult, null, new System.Drawing.Point(-1, -1), scan.TileSettings.ThresholdErode, BorderType.Default, CvInvoke.MorphologyDefaultBorderValue);

            //convert to 8 bit unsigned needed for canny
            greyResult.ConvertTo(greyResult, DepthType.Cv8U);

            VectorOfVectorOfPoint markers = new VectorOfVectorOfPoint();

            //create 32bit, single channel image for result of markers
            Mat markerImage = new Mat(greyResult.Size, DepthType.Cv32S, 1);

            //set image to 0
            markerImage.SetTo(new MCvScalar(0, 0, 0));

            //find the contours
            CvInvoke.FindContours(greyResult, markers, null, RetrType.External, ChainApproxMethod.LinkRuns);

            //label the markers from 1 -> n, the rest of the image should remain 0
            for (int i = 0; i < markers.Size; i++)
                CvInvoke.DrawContours(markerImage, markers, i, new MCvScalar(i + 1, i + 1, i + 1), -1);

            ScalarArray mult = new ScalarArray(5000);
            Mat markerVisual = new Mat();

            CvInvoke.Multiply(markerImage, mult, markerVisual);

            CvInvoke.Imshow("4", markerVisual);

            //draw the background marker
            CvInvoke.Circle(markerImage,
                new System.Drawing.Point(5, 5),
                3,
                new MCvScalar(255, 255, 255),
                -1);

            //convert to 3 channel
            Mat convertedOriginal = new Mat();
            
            //use canny modified if 3/4", or use the gray image for others

            CvInvoke.CvtColor(greySource, convertedOriginal, ColorConversion.Gray2Bgr);

            //watershed!!
            CvInvoke.Watershed(convertedOriginal, markerImage);
            //visualize
            CvInvoke.Multiply(markerImage, mult, markerVisual);
            CvInvoke.Imshow("5", markerVisual);

            //get contours to get the actual tiles now that they are separate...
            VectorOfVectorOfPoint tilesContours = new VectorOfVectorOfPoint();

            markerVisual.ConvertTo(markerVisual, DepthType.Cv8U);
          
            CvInvoke.BitwiseNot(markerVisual, markerVisual);
            CvInvoke.Imshow("6", markerVisual);
            CvInvoke.Dilate(markerVisual, markerVisual, null, new System.Drawing.Point(1, 1), 2, BorderType.Default, CvInvoke.MorphologyDefaultBorderValue);

            CvInvoke.FindContours(markerVisual, tilesContours, null, RetrType.External, ChainApproxMethod.LinkRuns);
            
            List<System.Drawing.Point> tiles = new List<System.Drawing.Point>();
            for (int i = 0; i < tilesContours.Size; i++)
            {
                using(VectorOfPoint c = tilesContours[i])
                using (VectorOfPoint approx = new VectorOfPoint())
                {
                    //epsilon = arclength * .05 to get rid of convex areas
                    CvInvoke.ApproxPolyDP(c, approx, CvInvoke.ArcLength(c, true) * .05, true);
                    double area = CvInvoke.ContourArea(approx);
                  
                    //filter out the small contours...
                    if (area > scan.TileSettings.MinArea && area < scan.TileSettings.MaxArea)
                    {
                        //match the shape to the square
                        double ratio = CvInvoke.MatchShapes(_square, approx, Emgu.CV.CvEnum.ContoursMatchType.I3);

                        if (ratio < .05)
                        {
                            var M = CvInvoke.Moments(c);
                            int cx = (int)(M.M10 / M.M00);
                            int cy = (int)(M.M01 / M.M00);

                            //filter out any that are too close 
                            if (!tiles.Any(x => Math.Abs(x.X - cx) < 50 && Math.Abs(x.Y - cy) < 50))
                            {
                                tiles.Add(new System.Drawing.Point(cx, cy));
                                for (int j = 0; j < approx.Size; j++)
                                {
                                    int second = j+1 == approx.Size ? 0 : j + 1;

                                    //do some detection for upsidedown/right side up here....

                                    CvInvoke.Line(image,
                                        new System.Drawing.Point(approx[j].X, approx[j].Y),
                                        new System.Drawing.Point(approx[second].X, approx[second].Y),
                                        new MCvScalar(255, 255, 255,255), 4);
                                }
                            }
                        }
                    }
                }
            }
            sw1.Stop();

            dataTextBox.AppendText(String.Format("Took {0} ms to detect {1} tiles{2}", sw1.ElapsedMilliseconds, tiles.Count, Environment.NewLine));

       //     dataTextBox.AppendText(String.Format("Found {0} tiles{1}", tiles.Count, Environment.NewLine));
         
            this.originalBox.Image = image;
            resultBox.Image = markerVisual;
        }
Пример #2
1
        public static VectorOfPoint FindLargestContour(IInputOutputArray cannyEdges, IInputOutputArray result)
        {
            int largest_contour_index = 0;
            double largest_area = 0;
            VectorOfPoint largestContour;

            using (Mat hierachy = new Mat())
            using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint())
            {
                IOutputArray hirarchy;

                CvInvoke.FindContours(cannyEdges, contours, hierachy, RetrType.Tree, ChainApproxMethod.ChainApproxNone);

                for (int i = 0; i < contours.Size; i++)
                {
                    MCvScalar color = new MCvScalar(0, 0, 255);

                    double a = CvInvoke.ContourArea(contours[i], false);  //  Find the area of contour
                    if (a > largest_area)
                    {
                        largest_area = a;
                        largest_contour_index = i;                //Store the index of largest contour
                    }

                    CvInvoke.DrawContours(result, contours, largest_contour_index, new MCvScalar(255, 0, 0));
                }

                CvInvoke.DrawContours(result, contours, largest_contour_index, new MCvScalar(0, 0, 255), 3, LineType.EightConnected, hierachy);
                largestContour = new VectorOfPoint(contours[largest_contour_index].ToArray());
            }

            return largestContour;
        }
Пример #3
0
 public VectorOfVectorOfPoint FindContours()
 {
     VectorOfVectorOfPoint contours =new VectorOfVectorOfPoint();
     IOutputArray hierarchy=new Mat();
     CvInvoke.FindContours(matImage, contours, hierarchy, RetrType.List, ChainApproxMethod.ChainApproxNone);
     return contours;
 }
Пример #4
0
        public static VectorOfVectorOfPoint FindRectangle(IInputOutputArray cannyEdges, IInputOutputArray result, int areaSize = 250)
        {
            using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint())
            {
                CvInvoke.FindContours(cannyEdges, contours, null, RetrType.List, ChainApproxMethod.ChainApproxSimple);
                int count = contours.Size;
                for (int i = 0; i < count; i++)
                {
                    var rect = CvInvoke.MinAreaRect(contours[i]).MinAreaRect();
                    CvInvoke.Rectangle(result, rect, new MCvScalar(0, 0, 255), 3);

                    using (VectorOfPoint contour = contours[i])
                    using (VectorOfPoint approxContour = new VectorOfPoint())
                    {
                        CvInvoke.ApproxPolyDP(contour, approxContour, CvInvoke.ArcLength(contour, true) * 0.05, true);

                        if (CvInvoke.ContourArea(approxContour, false) > areaSize) //only consider contours with area greater than 250
                        {
                            if (approxContour.Size >= 4) //The contour has 4 vertices.
                            {
                                #region determine if all the angles in the contour are within [80, 100] degree
                                bool isRectangle = true;
                                Point[] pts = approxContour.ToArray();
                                LineSegment2D[] edges = PointCollection.PolyLine(pts, true);

                                for (int j = 0; j < edges.Length; j++)
                                {
                                    double angle = Math.Abs(
                                       edges[(j + 1) % edges.Length].GetExteriorAngleDegree(edges[j]));
                                    if (angle < 80 || angle > 100)
                                    {
                                        isRectangle = false;
                                        break;
                                    }
                                }
                                #endregion

                                //if (isRectangle)
                                //{
                                //    var rect = CvInvoke.MinAreaRect(approxContour).MinAreaRect();
                                //    CvInvoke.Rectangle(result, rect, new MCvScalar(0, 0, 255), 3);
                                //    //boxList.Add(CvInvoke.MinAreaRect(approxContour));
                                //}
                            }
                        }
                    }
                }

                return contours;
            }
        }
Пример #5
0
      /// <summary>
      /// Draw the planar subdivision
      /// </summary>
      /// <param name="maxValue">The points contains values between [0, maxValue)</param>
      /// <param name="pointCount">The total number of points</param>
      /// <returns>An image representing the planar subvidision of the points</returns>
      public static Mat Draw(float maxValue, int pointCount)
      {
         Triangle2DF[] delaunayTriangles;
         VoronoiFacet[] voronoiFacets;
         Random r = new Random((int)(DateTime.Now.Ticks & 0x0000ffff));

         CreateSubdivision(maxValue, pointCount, out delaunayTriangles, out voronoiFacets);

         //create an image for display purpose
         Mat img = new Mat((int)maxValue, (int)maxValue, DepthType.Cv8U, 3);

         //Draw the voronoi Facets
         foreach (VoronoiFacet facet in voronoiFacets)
         {
#if NETFX_CORE
            Point[] polyline = Extensions.ConvertAll<PointF, Point>(facet.Vertices, Point.Round);
#else
            Point[] polyline = Array.ConvertAll<PointF, Point>(facet.Vertices, Point.Round);
#endif
            using (VectorOfPoint vp = new VectorOfPoint(polyline))
            using (VectorOfVectorOfPoint vvp = new VectorOfVectorOfPoint(vp))
            {
               //Draw the facet in color
               CvInvoke.FillPoly(
                  img, vvp, 
                  new Bgr(r.NextDouble()*120, r.NextDouble()*120, r.NextDouble()*120).MCvScalar);

               //highlight the edge of the facet in black
               CvInvoke.Polylines(img, vp, true, new Bgr(0, 0, 0).MCvScalar, 2);
            }
            //draw the points associated with each facet in red
            CvInvoke.Circle(img, Point.Round( facet.Point ), 5, new Bgr(0, 0, 255).MCvScalar, -1);
         }

         //Draw the Delaunay triangulation
         foreach (Triangle2DF triangle in delaunayTriangles)
         {
#if NETFX_CORE
            Point[] vertices = Extensions.ConvertAll<PointF, Point>(triangle.GetVertices(), Point.Round);
#else
            Point[] vertices = Array.ConvertAll<PointF, Point>(triangle.GetVertices(), Point.Round);
#endif
            using (VectorOfPoint vp = new VectorOfPoint(vertices))
            {
               CvInvoke.Polylines(img, vp, true, new Bgr(255, 255, 255).MCvScalar);
            }
         }

         return img;
      }
Пример #6
0
        private static UMat FilterPlate(UMat plate)
        {
            UMat thresh = new UMat();
            CvInvoke.Threshold(plate, thresh, 120, 255, ThresholdType.BinaryInv);
            //Image<Gray, Byte> thresh = plate.ThresholdBinaryInv(new Gray(120), new Gray(255));

            Size plateSize = plate.Size;
            using (Mat plateMask = new Mat(plateSize.Height, plateSize.Width, DepthType.Cv8U, 1))
            using (Mat plateCanny = new Mat())
            using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint())
            {
                plateMask.SetTo(new MCvScalar(255.0));
                CvInvoke.Canny(plate, plateCanny, 100, 50);
                CvInvoke.FindContours(plateCanny, contours, null, RetrType.External, ChainApproxMethod.ChainApproxSimple);

                int count = contours.Size;
                for (int i = 1; i < count; i++)
                {
                    using (VectorOfPoint contour = contours[i])
                    {

                        Rectangle rect = CvInvoke.BoundingRectangle(contour);
                        if (rect.Height > (plateSize.Height >> 1))
                        {
                            rect.X -= 1;
                            rect.Y -= 1;
                            rect.Width += 2;
                            rect.Height += 2;
                            Rectangle roi = new Rectangle(Point.Empty, plate.Size);
                            rect.Intersect(roi);
                            CvInvoke.Rectangle(plateMask, rect, new MCvScalar(), -1);
                            //plateMask.Draw(rect, new Gray(0.0), -1);
                        }
                    }

                }

                thresh.SetTo(new MCvScalar(), plateMask);
            }

            CvInvoke.Erode(thresh, thresh, null, new Point(-1, -1), 1, BorderType.Constant,
                CvInvoke.MorphologyDefaultBorderValue);
            CvInvoke.Dilate(thresh, thresh, null, new Point(-1, -1), 1, BorderType.Constant,
                CvInvoke.MorphologyDefaultBorderValue);

            return thresh;
        }
Пример #7
0
        /// <summary>
        /// Detect license plate from the given image
        /// </summary>
        /// <param name="img">The image to search license plate from</param>
        /// <param name="licensePlateImagesList">A list of images where the detected license plate regions are stored</param>
        /// <param name="filteredLicensePlateImagesList">A list of images where the detected license plate regions (with noise removed) are stored</param>
        /// <param name="detectedLicensePlateRegionList">A list where the regions of license plate (defined by an MCvBox2D) are stored</param>
        /// <returns>The list of words for each license plate</returns>
        public List<String> DetectLicensePlate(
            IInputArray img,
            List<IInputOutputArray> licensePlateImagesList,
            List<IInputOutputArray> filteredLicensePlateImagesList,
            List<RotatedRect> detectedLicensePlateRegionList)
        {
            List<String> licenses = new List<String>();
            using (Mat gray = new Mat())
            using (Mat canny = new Mat())
            using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint())
            {
                CvInvoke.CvtColor(img, gray, ColorConversion.Bgr2Gray);
                CvInvoke.Canny(gray, canny, 100, 50, 3, false);
                int[,] hierachy = CvInvoke.FindContourTree(canny, contours, ChainApproxMethod.ChainApproxSimple);

                FindLicensePlate(contours, hierachy, 0, gray, canny, licensePlateImagesList, filteredLicensePlateImagesList, detectedLicensePlateRegionList, licenses);
            }
            return licenses;
        }
Пример #8
0
    public void ApplyFilter(Mat src)
    {
        CvInvoke.CvtColor(src, src, ColorConversion.Bgr2Hsv);

        Mat threshold = new Mat(src.Height, src.Width, src.Depth, src.NumberOfChannels);
        MCvScalar min = new MCvScalar(m_hmin, m_smin, m_vmin);
        MCvScalar max = new MCvScalar(m_hmax, m_smax, m_vmax);

        CvInvoke.InRange(src, new ScalarArray(min), new ScalarArray(max), threshold);

        Mat element = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new Size(3,3), Point.Empty);
        CvInvoke.Erode(threshold, threshold, element, Point.Empty, 1, BorderType.Constant, new MCvScalar(1.0f));
        CvInvoke.Canny(threshold, threshold, 100, 255);

        VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint();
        Mat hierarchy = new Mat();

         CvInvoke.FindContours(threshold, contours, hierarchy, RetrType.Tree, ChainApproxMethod.ChainApproxSimple, Point.Empty);

        Mat draw = new Mat(src.Height, src.Width, src.Depth, 1);
        draw.SetTo(new MCvScalar(0.0));
        int i = 0;

        //Debug.Log("CONTOURS");

        var contoursArray = contours.ToArrayOfArray();
        foreach(Point[] contour in contoursArray)
        {
            CvInvoke.DrawContours(draw, contours, i, new MCvScalar(255.0), 1, LineType.EightConnected, null, int.MaxValue, Point.Empty);

         	double a = CvInvoke.ContourArea(new VectorOfPoint(contour));
            //Debug.Log("Contour: " + a);
            i++;
        }

        //Emgu.CV.UI.ImageViewer.Show(draw, "test");

        if(m_onFrame != null) m_onFrame.Invoke(draw);
    }
Пример #9
0
      public void TestContour()
      {
         Image<Gray, Byte> img = EmguAssert.LoadImage<Gray, byte>("stuff.jpg");
         img.SmoothGaussian(3);
         img = img.Canny(80, 50);
         Image<Gray, Byte> res = img.CopyBlank();
         res.SetValue(255);

         using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint())
         using (Mat hierachy = new Mat())
         {
            CvInvoke.FindContours(img, contours, hierachy, RetrType.Tree, ChainApproxMethod.ChainApproxSimple);
            
         }

         using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint())
         //using (VectorOfVectorOfInt hierarchy = new VectorOfVectorOfInt())
         {
            CvInvoke.FindContours(img, contours, null, RetrType.List, ChainApproxMethod.ChainApproxSimple);
            for (int i = 0; i < contours.Size; i++)
            {
               using (VectorOfPoint contour = contours[i])
               {
                  Point[] pts = contour.ToArray();
                  CvInvoke.Polylines(res, contour, true, new MCvScalar());
               }
            }
         }
         /*
         Contour<Point> contour = img.FindContours();

         while (contour != null)
         {
            Contour<Point> approx = contour.ApproxPoly(contour.Perimeter * 0.05);

            if (approx.Convex && approx.Area > 20.0)
            {
               Point[] vertices = approx.ToArray();

               LineSegment2D[] edges = PointCollection.PolyLine(vertices, true);

               res.DrawPolyline(vertices, true, new Gray(200), 1);
            }
            contour = contour.HNext;
         }*/
         //Emgu.CV.UI.ImageViewer.Show(res);
      }
Пример #10
0
        public void PerformShapeDetection()
        {
            if (fileNameTextBox.Text != String.Empty)
            {
                Stopwatch watch = Stopwatch.StartNew();
                watch.Start();
                StringBuilder msgBuilder = new StringBuilder("Performance: ");

                #region get image

                img = new Image<Bgr, byte>(fileNameTextBox.Text);
                img = img.Resize(0.5, Inter.Linear).SmoothMedian(5);
                #endregion

                #region HSV magic
                //min.Hue = MinHueTB.Value; min.Satuation = MinSatTB.Value; min.Value = MinValTB.Value;
                //max.Hue = MaxHueTB.Value; max.Satuation = MaxSatTB.Value; max.Value = MaxValTB.Value;

                HsvMagic(img, maskHsvBlack, maskHsvBlue);

                circleImageBox.Image = maskHsvBlack;
                originalImageBox.Image = img;

                img.ToBitmap().Save("C:\\Emgu\\Dump\\Img.png",System.Drawing.Imaging.ImageFormat.Png);
                maskHsvBlack.ToBitmap().Save("C:\\Emgu\\Dump\\maskHsvBlack.png",  System.Drawing.Imaging.ImageFormat.Png);
                maskHsvBlue.ToBitmap().Save("C:\\Emgu\\Dump\\maskHsvBlue.png",  System.Drawing.Imaging.ImageFormat.Png);
                #endregion

                #region Canny and edge detection

                double cannyThreshold = 1.0;
                double cannyThresholdLinking = 500.0;

                Image<Gray, Byte> cannyBlue = maskHsvBlue.Canny(cannyThreshold, cannyThresholdLinking);
                Image<Gray, Byte> cannyBlack = maskHsvBlack.Canny(cannyThreshold, cannyThresholdLinking);

                watch.Stop();
                msgBuilder.Append(String.Format("Hsv and Canny - {0} ms; ", watch.ElapsedMilliseconds));
                #endregion
                cannyBlue.ToBitmap().Save("C:\\Emgu\\Dump\\cannyBlue.png", System.Drawing.Imaging.ImageFormat.Png);
                cannyBlack.ToBitmap().Save("C:\\Emgu\\Dump\\cannyBlack.png", System.Drawing.Imaging.ImageFormat.Png);

                #region Find  rectangles

                #region detect black borders
                VectorOfVectorOfPoint blackborders = new VectorOfVectorOfPoint();//list of black borders
                List<RotatedRect> Black_boxList = new List<RotatedRect>(); //a box is a rotated rectangle
                VectorOfVectorOfPoint othercontours_black = new VectorOfVectorOfPoint();
                getBlackContours(cannyBlack, blackborders, Black_boxList, othercontours_black);
                resultImg = cannyBlack.Convert<Bgr, Byte>();
                #endregion

                #region blue borders

                VectorOfVectorOfPoint blueborders = new VectorOfVectorOfPoint();//list of blue borders
                List<RotatedRect> Blue_boxList = new List<RotatedRect>(); //a box is a rotated rectangle
                VectorOfVectorOfPoint othercontours_blue = new VectorOfVectorOfPoint();
                getBlueContours(cannyBlue, blueborders, Blue_boxList, othercontours_blue);

                #endregion

              #region clear duplicate boxes

                List<RotatedRect> fltrBlue_boxList = new List<RotatedRect>();
                SizeF TMP_SizeF = new SizeF(0,0);
                PointF TMP_PointF = new PointF(0, 0);
                float TMP_Angle = 0;

                if (Blue_boxList.Count >= 2)
                {
                  for (int i = 1; i < Blue_boxList.Count; i++)
                  {
                    if (Blue_boxList[i - 1].Size.Width * Blue_boxList[i - 1].Size.Height > 750)
                    {
                      if (Math.Abs(Blue_boxList[i - 1].Angle - Blue_boxList[i].Angle) < 1)
                      {
                        if (Math.Abs(Blue_boxList[i - 1].Center.X - Blue_boxList[i].Center.X) < 1 && Math.Abs(Blue_boxList[i - 1].Center.Y - Blue_boxList[i].Center.Y) < 1)
                          if (Math.Abs(Blue_boxList[i - 1].Size.Width - Blue_boxList[i].Size.Width) < 1 && Math.Abs(Blue_boxList[i - 1].Size.Height - Blue_boxList[i].Size.Height) < 1)
                          {
                            TMP_PointF.X = (float)(0.5 * (Blue_boxList[i - 1].Center.X + Blue_boxList[i].Center.X));
                            TMP_PointF.Y = (float)(0.5 * (Blue_boxList[i - 1].Center.Y + Blue_boxList[i].Center.Y));
                            TMP_SizeF.Width = (float)(0.5 * (Blue_boxList[i - 1].Size.Width + Blue_boxList[i].Size.Width));
                            TMP_SizeF.Height = (float)(0.5 * (Blue_boxList[i - 1].Size.Height + Blue_boxList[i].Size.Height));
                            TMP_Angle = (float)(0.5 * (Blue_boxList[i - 1].Angle + Blue_boxList[i].Angle));
                            fltrBlue_boxList.Add(new RotatedRect(TMP_PointF, TMP_SizeF, TMP_Angle));

                          }
                      }
                      else fltrBlue_boxList.Add(Blue_boxList[i]);
                    }
                  }
                }
                else { fltrBlue_boxList = Blue_boxList; } //Blue_boxList.Clear(); }

                List<RotatedRect> fltrBlack_boxList = new List<RotatedRect>();
              VectorOfVectorOfPoint fltr_blackborders = new VectorOfVectorOfPoint();
                TMP_SizeF.Width = 0;
                TMP_SizeF.Height = 0;
                TMP_PointF.X = 0;
                TMP_PointF.Y = 0;
                TMP_Angle = 0;

                if (Black_boxList.Count >= 2)
                {
                  for (int i = 1; i < Black_boxList.Count; i++)
                  {
                    if (Black_boxList[i - 1].Size.Width * Black_boxList[i - 1].Size.Height > 10)
                    {
                      if (Math.Abs(Black_boxList[i - 1].Angle - Black_boxList[i].Angle) < 1)
                      {
                        if (Math.Abs(Black_boxList[i - 1].Center.X - Black_boxList[i].Center.X) < 1 && Math.Abs(Black_boxList[i - 1].Center.Y - Black_boxList[i].Center.Y) < 1)
                          if (Math.Abs(Black_boxList[i - 1].Size.Width - Black_boxList[i].Size.Width) < 1 && Math.Abs(Black_boxList[i - 1].Size.Height - Black_boxList[i].Size.Height) < 1)
                          {
                            TMP_PointF.X = (float)(0.5 * (Black_boxList[i - 1].Center.X + Black_boxList[i].Center.X));
                            TMP_PointF.Y = (float)(0.5 * (Black_boxList[i - 1].Center.Y + Black_boxList[i].Center.Y));
                            TMP_SizeF.Width = (float)(0.5 * (Black_boxList[i - 1].Size.Width + Black_boxList[i].Size.Width));
                            TMP_SizeF.Height = (float)(0.5 * (Black_boxList[i - 1].Size.Height + Black_boxList[i].Size.Height));
                            TMP_Angle = (float)(0.5 * (Black_boxList[i - 1].Angle + Black_boxList[i].Angle));
                            fltrBlack_boxList.Add(new RotatedRect(TMP_PointF, TMP_SizeF, TMP_Angle));
                            //fltr_blackborders.Push();
                          }
                      }
                      else fltrBlack_boxList.Add(Black_boxList[i]);
                    }
                  }
                }
                else { fltrBlack_boxList = Black_boxList; }//Black_boxList.Clear(); }
                #endregion

              //////////
                circleImageBox.Image = maskHsvBlack;
              ////////////

                CvInvoke.DrawContours(resultImg, blackborders, -1, new Bgr(Color.Green).MCvScalar);
                CvInvoke.DrawContours(resultImg, othercontours_black, -1, new Bgr(Color.Red).MCvScalar);
                CvInvoke.DrawContours(resultImg, blueborders, -1, new Bgr(Color.Blue).MCvScalar);

                foreach (RotatedRect box in fltrBlack_boxList)
                {
                    CvInvoke.Polylines(resultImg, Array.ConvertAll(box.GetVertices(), Point.Round), true, new Bgr(Color.Aqua).MCvScalar, 1);
                }
                foreach (RotatedRect box in Black_boxList)
                {
                  CvInvoke.Polylines(img, Array.ConvertAll(box.GetVertices(), Point.Round), true, new Bgr(Color.Pink).MCvScalar, 1);
                }
                foreach (RotatedRect box in Blue_boxList)
                {
                  CvInvoke.Polylines(img, Array.ConvertAll(box.GetVertices(), Point.Round), true, new Bgr(Color.DarkViolet).MCvScalar, 1);
                }
                foreach (RotatedRect box in fltrBlue_boxList)
                {
                  CvInvoke.Polylines(resultImg, Array.ConvertAll(box.GetVertices(), Point.Round), true, new Bgr(Color.Yellow).MCvScalar, 1);
                }
                triangleRectangleImageBox.Image = resultImg;
                originalImageBox.Image = img;

                #region save to files
                Image<Bgr, Byte> TMPImageforSaving = new Image<Bgr, byte>(maskHsvBlack.Width, maskHsvBlack.Height, new Bgr(Color.Black));
                CvInvoke.DrawContours(TMPImageforSaving, blackborders, -1, new Bgr(Color.Green).MCvScalar);
                CvInvoke.DrawContours(TMPImageforSaving, othercontours_black, -1, new Bgr(Color.Red).MCvScalar);

                foreach (RotatedRect box in Black_boxList)
                {
                  CvInvoke.Polylines(TMPImageforSaving, Array.ConvertAll(box.GetVertices(), Point.Round), true, new Bgr(Color.Pink).MCvScalar, 1);
                }
                TMPImageforSaving.ToBitmap().Save("C:\\Emgu\\Dump\\NonFltrBlack.png", System.Drawing.Imaging.ImageFormat.Png);

                TMPImageforSaving = new Image<Bgr, byte>(TMPImageforSaving.Width, TMPImageforSaving.Height, new Bgr(Color.Black));
                CvInvoke.DrawContours(TMPImageforSaving, blackborders, -1, new Bgr(Color.Green).MCvScalar);
                CvInvoke.DrawContours(TMPImageforSaving, othercontours_black, -1, new Bgr(Color.Red).MCvScalar);
                foreach (RotatedRect box in Blue_boxList)
                {
                  CvInvoke.Polylines(TMPImageforSaving, Array.ConvertAll(box.GetVertices(), Point.Round), true, new Bgr(Color.DarkViolet).MCvScalar, 1);
                }
                TMPImageforSaving.ToBitmap().Save("C:\\Emgu\\Dump\\NonFltrBlue.png", System.Drawing.Imaging.ImageFormat.Png);
                TMPImageforSaving = new Image<Bgr, byte>(maskHsvBlack.Width, maskHsvBlack.Height, new Bgr(Color.Black));
                CvInvoke.DrawContours(TMPImageforSaving, blackborders, -1, new Bgr(Color.Green).MCvScalar);
                CvInvoke.DrawContours(TMPImageforSaving, othercontours_black, -1, new Bgr(Color.Red).MCvScalar);

                foreach (RotatedRect box in fltrBlack_boxList)
                {
                  CvInvoke.Polylines(TMPImageforSaving, Array.ConvertAll(box.GetVertices(), Point.Round), true, new Bgr(Color.Aqua).MCvScalar, 1);
                }
                TMPImageforSaving.ToBitmap().Save("C:\\Emgu\\Dump\\FltrBlack.png", System.Drawing.Imaging.ImageFormat.Png);

                TMPImageforSaving = new Image<Bgr, byte>(TMPImageforSaving.Width, TMPImageforSaving.Height, new Bgr(Color.Black));
                CvInvoke.DrawContours(TMPImageforSaving, blackborders, -1, new Bgr(Color.Green).MCvScalar);
                CvInvoke.DrawContours(TMPImageforSaving, othercontours_black, -1, new Bgr(Color.Red).MCvScalar);
                foreach (RotatedRect box in fltrBlue_boxList)
                {
                  CvInvoke.Polylines(TMPImageforSaving, Array.ConvertAll(box.GetVertices(), Point.Round), true, new Bgr(Color.Yellow).MCvScalar, 1);
                }
                TMPImageforSaving.ToBitmap().Save("C:\\Emgu\\Dump\\FltrBlue.png", System.Drawing.Imaging.ImageFormat.Png);
                #endregion

              /*
                List<VectorOfPoint> contours_for_work = new List<VectorOfPoint>();
                using (VectorOfVectorOfPoint contours = blackborders)
                {
                  for (int i = 0; i < contours.Size; i++)
                  {
                    contours_for_work.Add(contours[i]);
                  }
                }
                contours_for_work.Sort((VectorOfPoint cont1, VectorOfPoint cont2) =>
                 (bool) (CvInvoke.ContourArea(cont1) > CvInvoke.ContourArea(cont1)) );
              */

                VectorOfVectorOfPoint Big = new VectorOfVectorOfPoint();
                bool ready = false;
                using (VectorOfVectorOfPoint contours = blackborders)
                {
                    for (int i = 0; i < contours.Size && !ready; i++)
                    {

                        VectorOfPoint contourI = contours[i];
                        for (int j = i + 1; j < contours.Size && !ready; j++)
                        {
                            if (0.38 * CvInvoke.ContourArea(contours[j]) > CvInvoke.ContourArea(contourI) && 0.26 * CvInvoke.ContourArea(contours[j]) < CvInvoke.ContourArea(contourI))
                            {
                                Big.Push(contours[j]);
                                Big.Push(contours[i]);
                                ready = !ready;
                            }
                        }
                    }
                }

                TMPImageforSaving = new Image<Bgr, Byte>(resultImg.Width, resultImg.Height, new Bgr(Color.Black));
                CvInvoke.DrawContours(TMPImageforSaving, Big, -1, new Bgr(Color.White).MCvScalar);
                TMPImageforSaving.ToBitmap().Save("C:\\Emgu\\Dump\\DetectedContours.png", System.Drawing.Imaging.ImageFormat.Png);
              imgHsv[0].ToBitmap().Save("C:\\Emgu\\Dump\\ImgHsv - Hue.png", System.Drawing.Imaging.ImageFormat.Png);
              imgHsv[1].ToBitmap().Save("C:\\Emgu\\Dump\\ImgHsv - Sat.png", System.Drawing.Imaging.ImageFormat.Png);
              imgHsv[2].ToBitmap().Save("C:\\Emgu\\Dump\\ImgHsv - Val.png", System.Drawing.Imaging.ImageFormat.Png);
              Image<Hls, byte> HlsImg = img.Convert<Hls, Byte>();

              HlsImg[0].ToBitmap().Save("C:\\Emgu\\Dump\\Img HLS - Hue.png", System.Drawing.Imaging.ImageFormat.Png);
              HlsImg[1].ToBitmap().Save("C:\\Emgu\\Dump\\Img HLS - Light.png", System.Drawing.Imaging.ImageFormat.Png);
              HlsImg[2].ToBitmap().Save("C:\\Emgu\\Dump\\Img HLS - Sat.png", System.Drawing.Imaging.ImageFormat.Png);

                lineImageBox.Image = TMPImageforSaving;

                watch.Stop();
                msgBuilder.Append(String.Format("Triangles & Rectangles - {0} ms; ", watch.ElapsedMilliseconds));
                #endregion
                /*

                  lineImageBox.Image = resultImg;
                  originalImageBox.Image = img;
                  this.Text = msgBuilder.ToString();

                  #region draw and rectangles
                  Mat triangleRectangleImage = new Mat(img.Size, DepthType.Cv8U, 3);
                  triangleRectangleImage.SetTo(new MCvScalar(0));

                  foreach (RotatedRect box in boxList)
                  {
                      CvInvoke.Polylines(triangleRectangleImage, Array.ConvertAll(box.GetVertices(), Point.Round), true, new Bgr(Color.DarkOrange).MCvScalar, 2);
                  }

                  triangleRectangleImageBox.Image = triangleRectangleImage;
                  #endregion

                  #region draw lines
                  /*Mat lineImage = new Mat(img.Size, DepthType.Cv8U, 3);
                  lineImage.SetTo(new MCvScalar(0));
                 foreach (LineSegment2D line in lines)
                   CvInvoke.Line(lineImage, line.P1, line.P2, new Bgr(Color.Green).MCvScalar, 2);

                  lineImageBox.Image = lineImage;
                  #endregion
              }
              }

              #region draw
              //foreach (LineSegment2D line in lines)
              //CvInvoke.Line(lineImage, line.P1, line.P2, new Bgr(Color.Green).MCvScalar, 2);

              #endregion
               * */
            }
        }
Пример #11
0
        public void getBlueContours(Image<Gray, Byte> src, VectorOfVectorOfPoint blueborders, List<RotatedRect> Blue_boxList, VectorOfVectorOfPoint othercontours_blue)
        {
            //blueborders = new VectorOfVectorOfPoint();//list of blue borders
              //Blue_boxList = new List<RotatedRect>(); //a box is a rotated rectangle
              //othercontours_blue = new VectorOfVectorOfPoint();

              using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint())
              {
            CvInvoke.FindContours(src, contours, null, RetrType.List, ChainApproxMethod.ChainApproxSimple);

            for (int i = 0; i < contours.Size; i++)
            {
              using (VectorOfPoint contour = contours[i])
              using (VectorOfPoint approxContour = new VectorOfPoint())
              {
                CvInvoke.ApproxPolyDP(contour, approxContour, CvInvoke.ArcLength(contour, true) * 0.05, true);
                if (CvInvoke.ContourArea(approxContour, false) > 250 && CvInvoke.BoundingRectangle(approxContour).Width * CvInvoke.BoundingRectangle(approxContour).Height > 1000) //only consider contours with area greater than 250
                {
                  if (approxContour.Size == 4)
                  {
                    Blue_boxList.Add(CvInvoke.MinAreaRect(approxContour));
                    blueborders.Push(contour);
                  }
                  else
                  {
                    othercontours_blue.Push(contour);
                    //Point[] pts = approxContour.ToArray();
                    //other.Add(PointCollection.PolyLine(pts, true));
                  }
                }
              }
            }
              }
        }
Пример #12
0
        public void getBlackContours(Image<Gray, Byte> src, VectorOfVectorOfPoint blackborders, List<RotatedRect> Black_boxList, VectorOfVectorOfPoint othercontours_black)
        {
            //blackborders = new VectorOfVectorOfPoint();//list of black borders
             //Black_boxList = new List<RotatedRect>(); //a box is a rotated rectangle
             //othercontours_black = new VectorOfVectorOfPoint();

            Bitmap TMPGood = new Bitmap(src.ToBitmap() , src.Width, src.Height);
            Bitmap TMPBad = new Bitmap(src.ToBitmap(), src.Width, src.Height);
            Graphics gGood = Graphics.FromImage(TMPGood);
            Graphics gBad = Graphics.FromImage(TMPBad);
            //Pen RedPen = new Pen(Color.Red);
            //Pen GreenPen = new Pen(Color.Green);
            Brush RedBrush = new SolidBrush(Color.Red);
            Brush GreenBrush = new SolidBrush(Color.Green);

                using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint())
                {
                  CvInvoke.FindContours(src, contours, null, RetrType.List, ChainApproxMethod.ChainApproxSimple);
                    for (int i = 0; i < contours.Size; i++)
                    {
                        using (VectorOfPoint contour = contours[i])
                        using (VectorOfPoint approxContour = new VectorOfPoint())
                        {
                          Point[] ptsContour = contour.ToArray();
                          for (int k = 0; k < ptsContour.Length; k++)
                          {
                            gBad.FillEllipse(RedBrush, ptsContour[k].X, ptsContour[k].Y, 6, 6);
                          }

                            CvInvoke.ApproxPolyDP(contour, approxContour, CvInvoke.ArcLength(contour, true) * 0.05, true);
                            if (CvInvoke.ContourArea(approxContour, false) > 250) //only consider contours with area greater than 250
                            {
                              Point[] ptsApprox = approxContour.ToArray();

                              //TMP.Draw(pts, new Bgr(Color.DarkOrange), 5); //!!!!!!!!!!!!!!!
                              for (int k = 0; k < ptsApprox.Length; k++)
                              {
                                gGood.FillEllipse(GreenBrush, ptsApprox[k].X, ptsApprox[k].Y, 6, 6);
                              }

                                if (CvInvoke.ContourArea(approxContour, false) > 250 && approxContour.Size == 4)
                                {
                                    Black_boxList.Add(CvInvoke.MinAreaRect(approxContour));
                                    blackborders.Push(contour);
                                }
                                else
                                {
                                    othercontours_black.Push(contour);
                                    //Point[] pts = approxContour.ToArray();
                                    //other.Add(PointCollection.PolyLine(pts, true));
                                }
                            }
                        }
                    }
                }
                TMPGood.Save("C:\\Emgu\\Dump\\Black contour corners GOOD.png", System.Drawing.Imaging.ImageFormat.Png);
                TMPBad.Save("C:\\Emgu\\Dump\\Black contour corners BAD.png", System.Drawing.Imaging.ImageFormat.Png);
        }
Пример #13
0
 public void FilterBlackBorders(VectorOfVectorOfPoint blackborders, List<RotatedRect> Black_boxlist, VectorOfVectorOfPoint othercontours_black)
 {
 }
Пример #14
0
      public void TestConvecityDefect()
      {
         Mat frame = EmguAssert.LoadMat("lena.jpg");
         using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint())
         using (Image<Gray, byte> canny = frame.ToImage<Gray, byte>())
         {
            IOutputArray hierarchy = null;
            CvInvoke.FindContours(canny, contours, hierarchy, RetrType.List, ChainApproxMethod.ChainApproxSimple);

            for (int i = 0; i < contours.Size; i++)
            {
               CvInvoke.ApproxPolyDP(contours[i], contours[i], 5, false);
               using (VectorOfInt hull = new VectorOfInt())
               using (Mat defects = new Mat())
               using (VectorOfPoint c = contours[i])
               {
                  CvInvoke.ConvexHull(c, hull, false, false);
                  CvInvoke.ConvexityDefects(c, hull, defects);
                  if (!defects.IsEmpty)
                  {
                     using (Matrix<int> value = new Matrix<int>(defects.Rows, defects.Cols, defects.NumberOfChannels))
                     {
                        defects.CopyTo(value);
                        //you can iterate through the defect here:
                        for (int j = 0; j < value.Rows; j++)
                        {
                           int startIdx = value.Data[j, 0];
                           int endIdx = value.Data[j, 1];
                           int farthestPtIdx = value.Data[j, 2];
                           double fixPtDepth = value.Data[j, 3]/256.0;
                           
                        }
                     }
                  }
               }
            }
         }
      }
        private Drawing.Bitmap GetMaskedBitmap(string imagePath, IList<Point> pointCollection)
        {
            Mat matrix = new Mat(imagePath, LoadImageType.AnyColor);
            UMat uMatrix = matrix.ToUMat(AccessType.ReadWrite);

            // Scale Polygon
            List<Point> scaledPoints = GetScaledPoints(pointCollection, uMatrix.Rows, uMatrix.Cols);

            polygonPoints = GetPolygonPoints(scaledPoints, uMatrix.Rows, uMatrix.Cols);

            // Apply Polygon
            using (VectorOfPoint vPoint = new VectorOfPoint(polygonPoints.ToArray()))
            using (VectorOfVectorOfPoint vvPoint = new VectorOfVectorOfPoint(vPoint))
            {
                CvInvoke.FillPoly(uMatrix, vvPoint, new Bgr(0, 0, 0).MCvScalar);
            }

            // Crop Bitmap
            int left = (int)scaledPoints.Min(p => p.X);
            int top = (int)scaledPoints.Min(p => p.Y);
            int width = (int)scaledPoints.Max(p => p.X) - left;
            int height = (int)scaledPoints.Max(p => p.Y) - top;

            Image<Bgr, byte> image = new Image<Bgr, byte>(uMatrix.Bitmap);
            image.ROI = new Drawing.Rectangle(left, top, width, height);

            return image.Bitmap;
        }
Пример #16
0
      public void TestConvexityDefacts()
      {
         Image<Bgr, Byte> image = new Image<Bgr, byte>(300, 300);
         Point[] polyline = new Point[] {
            new Point(10, 10),
            new Point(10, 250),
            new Point(100, 100),
            new Point(250, 250),
            new Point(250, 10)};
         using (VectorOfPoint vp = new VectorOfPoint(polyline))
         using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint(vp))
         using (VectorOfInt convexHull = new VectorOfInt())
         using (Mat convexityDefect = new Mat())
         {
            //Draw the contour in white thick line
            CvInvoke.DrawContours(image, contours, -1, new MCvScalar(255, 255, 255), 3);
            CvInvoke.ConvexHull(vp, convexHull);
            CvInvoke.ConvexityDefects(vp, convexHull, convexityDefect);

            //convexity defect is a four channel mat, when k rows and 1 cols, where k = the number of convexity defects. 
            if (!convexityDefect.IsEmpty)
            {
               //Data from Mat are not directly readable so we convert it to Matrix<>
               Matrix<int> m = new Matrix<int>(convexityDefect.Rows, convexityDefect.Cols,
                  convexityDefect.NumberOfChannels);
               convexityDefect.CopyTo(m);

               for (int i = 0; i < m.Rows; i++)
               {
                  int startIdx = m.Data[i, 0];
                  int endIdx = m.Data[i, 1];
                  Point startPoint = polyline[startIdx];
                  Point endPoint = polyline[endIdx];
                  //draw  a line connecting the convexity defect start point and end point in thin red line
                  CvInvoke.Line(image, startPoint, endPoint, new MCvScalar(0, 0, 255));
               }
            }

            //Emgu.CV.UI.ImageViewer.Show(image);
         }
      }
Пример #17
0
        public LaserTrackerResult UpdateFromFrame(Mat frame)
        {
            _timer.Reset();
            _timer.Start();
            Bitmap camBitmap, threshBitmap;

            var rects = new List<Rectangle>();
            using (var threshFrame = new Mat())
            {
                using (var hsvFrame = new Mat())
                {
                    using (var resizeFrame = new Mat())
                    {
                        var size = new Size(_width, _height);
                        CvInvoke.Resize(frame, resizeFrame, size);
                        if (_warp)
                        {
                            using (var warpedFrame = new Mat())
                            {
                                CvInvoke.WarpPerspective(resizeFrame, warpedFrame, _homographyMat, size);
                                warpedFrame.CopyTo(resizeFrame);
                            }
                        }
                        CvInvoke.CvtColor(resizeFrame, hsvFrame, ColorConversion.Bgr2Hsv);
                        camBitmap = resizeFrame.Bitmap.Clone(new Rectangle(0, 0, _width, _height), PixelFormat.Format32bppArgb);
                    }
                    float hueMin = _hueCenter - _hueWidth;
                    float hueMax = _hueCenter + _hueWidth;
                    HueThreshold(hueMin, hueMax, hsvFrame, threshFrame);
                    if (_dilate > 0)
                    {
                        CvInvoke.Dilate(threshFrame, threshFrame, null, new Point(-1, -1), _dilate, BorderType.Default, new MCvScalar());
                    }

                }
                threshBitmap = threshFrame.Bitmap.Clone(new Rectangle(0, 0, _width, _height), PixelFormat.Format32bppArgb);

                using (var dummyFrame = threshFrame.Clone())
                {
                    using (var contours = new VectorOfVectorOfPoint())
                    {
                        CvInvoke.FindContours(dummyFrame, contours, null, RetrType.External, ChainApproxMethod.ChainApproxSimple);
                        for (var i = 0; i < contours.Size; i++)
                        {
                            var rect = CvInvoke.BoundingRectangle(contours[i]);
                            if (rect.Width*rect.Height < _minPixels) continue;
                            rects.Add(rect);
                        }
                    }
                }
            }
            rects.Sort((r1, r2) =>
            {
                var s1 = r1.Width * r1.Height;
                var s2 = r2.Width * r2.Height;
                return s1.CompareTo(s2);
            });
            return new LaserTrackerResult(camBitmap, threshBitmap, rects, _timer.Elapsed);
        }
Пример #18
0
        private void sensor_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            BitmapSource depthBmp = null;
            blobCount = 0;

            using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
            {
                using (DepthImageFrame depthFrame = e.OpenDepthImageFrame())
                {
                    if (depthFrame != null)
                    {

                        blobCount = 0;

                        depthBmp = depthFrame.SliceDepthImage((int)sliderMin.Value, (int)sliderMax.Value);

                        Image<Bgr, Byte> openCVImg = new Image<Bgr, byte>(depthBmp.ToBitmap());
                        Image<Gray, byte> gray_image = openCVImg.Convert<Gray, byte>();

                        //Find contours
                        using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint())
                        {
                            CvInvoke.FindContours(gray_image, contours, new Mat(), Emgu.CV.CvEnum.RetrType.List, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxSimple);

                            for (int i = 0; i < contours.Size; i++)
                            {
                                VectorOfPoint contour = contours[i];
                                double area = CvInvoke.ContourArea(contour, false);

                                if ((area > Math.Pow(sliderMinSize.Value, 2)) && (area < Math.Pow(sliderMaxSize.Value, 2)))
                                {
                                    System.Drawing.Rectangle box = CvInvoke.BoundingRectangle(contour);
                                    openCVImg.Draw(box, new Bgr(System.Drawing.Color.Red), 2);
                                    blobCount++;
                                }
                            }
                        }

                        this.outImg.Source = ImageHelpers.ToBitmapSource(openCVImg);
                        txtBlobCount.Text = blobCount.ToString();
                    }
                }

                if (colorFrame != null)
                {

                    colorFrame.CopyPixelDataTo(this.colorPixels);
                    this.colorBitmap.WritePixels(
                        new Int32Rect(0, 0, this.colorBitmap.PixelWidth, this.colorBitmap.PixelHeight),
                        this.colorPixels,
                        this.colorBitmap.PixelWidth * sizeof(int),
                        0);

                }
            }
        }
Пример #19
0
      private void FindLicensePlate(
         VectorOfVectorOfPoint contours, int[,] hierachy, int idx, IInputArray gray, IInputArray canny,
         List<IInputOutputArray> licensePlateImagesList, List<IInputOutputArray> filteredLicensePlateImagesList, List<RotatedRect> detectedLicensePlateRegionList,
         List<String> licenses)
      {
         for (; idx >= 0;  idx = hierachy[idx,0])
         {
            int numberOfChildren = GetNumberOfChildren(hierachy, idx);      
            //if it does not contains any children (charactor), it is not a license plate region
            if (numberOfChildren == 0) continue;

            using (VectorOfPoint contour = contours[idx])
            {
               if (CvInvoke.ContourArea(contour) > 400)
               {
                  if (numberOfChildren < 3)
                  {
                     //If the contour has less than 3 children, it is not a license plate (assuming license plate has at least 3 charactor)
                     //However we should search the children of this contour to see if any of them is a license plate
                     FindLicensePlate(contours, hierachy, hierachy[idx, 2], gray, canny, licensePlateImagesList,
                        filteredLicensePlateImagesList, detectedLicensePlateRegionList, licenses);
                     continue;
                  }

                  RotatedRect box = CvInvoke.MinAreaRect(contour);
                  if (box.Angle < -45.0)
                  {
                     float tmp = box.Size.Width;
                     box.Size.Width = box.Size.Height;
                     box.Size.Height = tmp;
                     box.Angle += 90.0f;
                  }
                  else if (box.Angle > 45.0)
                  {
                     float tmp = box.Size.Width;
                     box.Size.Width = box.Size.Height;
                     box.Size.Height = tmp;
                     box.Angle -= 90.0f;
                  }

                  double whRatio = (double) box.Size.Width/box.Size.Height;
                  if (!(3.0 < whRatio && whRatio < 10.0))
                     //if (!(1.0 < whRatio && whRatio < 2.0))
                  {
                     //if the width height ratio is not in the specific range,it is not a license plate 
                     //However we should search the children of this contour to see if any of them is a license plate
                     //Contour<Point> child = contours.VNext;
                     if (hierachy[idx, 2] > 0)
                        FindLicensePlate(contours, hierachy, hierachy[idx, 2], gray, canny, licensePlateImagesList,
                           filteredLicensePlateImagesList, detectedLicensePlateRegionList, licenses);
                     continue;
                  }

                  using (UMat tmp1 = new UMat())
                  using (UMat tmp2 = new UMat())
                  {
                     PointF[] srcCorners = box.GetVertices();
                     
                     PointF[] destCorners = new PointF[] {
                        new PointF(0, box.Size.Height - 1),
                        new PointF(0, 0),
                        new PointF(box.Size.Width - 1, 0), 
                        new PointF(box.Size.Width - 1, box.Size.Height - 1)};
                     
                     using (Mat rot = CvInvoke.GetAffineTransform(srcCorners, destCorners))
                     {
                        CvInvoke.WarpAffine(gray, tmp1, rot, Size.Round(box.Size));           
                     }

                     //resize the license plate such that the front is ~ 10-12. This size of front results in better accuracy from tesseract
                     Size approxSize = new Size(240, 180);
                     double scale = Math.Min(approxSize.Width/box.Size.Width, approxSize.Height/box.Size.Height);
                     Size newSize = new Size( (int)Math.Round(box.Size.Width*scale),(int) Math.Round(box.Size.Height*scale));
                     CvInvoke.Resize(tmp1, tmp2, newSize, 0, 0, Inter.Cubic);

                     //removes some pixels from the edge
                     int edgePixelSize = 2;
                     Rectangle newRoi = new Rectangle(new Point(edgePixelSize, edgePixelSize),
                        tmp2.Size - new Size(2*edgePixelSize, 2*edgePixelSize));
                     UMat plate = new UMat(tmp2, newRoi);

                     UMat filteredPlate = FilterPlate(plate);

                     Tesseract.Character[] words;
                     StringBuilder strBuilder = new StringBuilder();
                     using (UMat tmp = filteredPlate.Clone())
                     {
                        _ocr.Recognize(tmp);
                        words = _ocr.GetCharacters();

                        if (words.Length == 0) continue;

                        for (int i = 0; i < words.Length; i++)
                        {
                           strBuilder.Append(words[i].Text);
                        }
                     }

                     licenses.Add(strBuilder.ToString());
                     licensePlateImagesList.Add(plate);
                     filteredLicensePlateImagesList.Add(filteredPlate);
                     detectedLicensePlateRegionList.Add(box);

                  }
               }
            }
         }
      }
Пример #20
0
        // get all of the valid contour maps, valid means circumfence > 200 px
        // this was not in their code, I added this feature, but I used their logic
        public static List<ColorfulContourMap> getAllContourMap(Mat input, int index, int mode = 0)
        {
            // use for all members
            List<ColorfulContourMap> result = new List<ColorfulContourMap>();
            MatImage m1 = new MatImage(input);
            m1.Convert();
            Mat gray = m1.Out();
            // use for black background
            if (mode == 0)
            {
                MatImage m2 = new MatImage(gray);
                m2.SmoothGaussian(3);
                m2.ThresholdBinaryInv(245, 255);
                gray = m2.Out();
            }
            // use for white background
            else
            {
                MatImage m2 = new MatImage(gray);
                m2.SmoothGaussian(3);
                m2.ThresholdBinaryInv(100, 255);
                gray = m2.Out();
            }

            // one time use
            List<Point> pointList = new List<Point>();
            List<Point> polyPointList = new List<Point>();
            List<ColorfulPoint> cps = new List<ColorfulPoint>();
            List<ColorfulPoint> pcps = new List<ColorfulPoint>();

            // fetch all the contours using Emgu CV
            // fetch all the polys using Emgu CV
            // extract the points and colors

            Mat temp = gray.Clone();
            VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint();
            CvInvoke.FindContours(gray, contours, new Mat(), RetrType.List, ChainApproxMethod.ChainApproxNone);

            double area = Math.Abs(CvInvoke.ContourArea(contours[0]));
            VectorOfPoint maxArea = contours[0]; // maxArea is used as the current contour
                                                 //contour = contour.HNext;
                                                 // use this to loop
            for (int i = 0; i < contours.Size; i++)
            {

                double nextArea = Math.Abs(CvInvoke.ContourArea(contours[i], false));  //  Find the area of contour
                area = nextArea;
                if (area >= Constants.MIN_AREA)
                {
                    maxArea = contours[i];
                    VectorOfPoint poly = new VectorOfPoint();
                    CvInvoke.ApproxPolyDP(maxArea, poly, 1.0, true);
                    pointList = maxArea.ToArray().ToList();
                    polyPointList = poly.ToArray().ToList();
                    foreach (Point p in pointList)
                    {
                        ColorfulPoint cp = new ColorfulPoint { X = p.X, Y = p.Y, color = extractPointColor(p, input) };
                        cps.Add(cp);
                    }
                    foreach (Point p in polyPointList)
                    {
                        ColorfulPoint cp = new ColorfulPoint { X = p.X, Y = p.Y, color = extractPointColor(p, input) };
                        pcps.Add(cp);
                    }
                    result.Add(new ColorfulContourMap(cps, pcps, index));
                    // clear temporal lists
                    pointList = new List<Point>();
                    polyPointList = new List<Point>();
                    cps = new List<ColorfulPoint>();
                    pcps = new List<ColorfulPoint>();

                }

            }

            return result;
        }
Пример #21
0
      public void TestContour()
      {
         //Application.EnableVisualStyles();
         //Application.SetCompatibleTextRenderingDefault(false);
         using (Image<Gray, Byte> img = new Image<Gray, Byte>(100, 100, new Gray()))
         {
            Rectangle rect = new Rectangle(10, 10, 80 - 10, 50 - 10);
            img.Draw(rect, new Gray(255.0), -1);
            //ImageViewer.Show(img);
            PointF pIn = new PointF(60, 40);
            PointF pOut = new PointF(80, 100);

            using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint() )
            
            {
               CvInvoke.FindContours(img, contours, null, RetrType.List, ChainApproxMethod.ChainApproxSimple);
               using (VectorOfPoint firstContour = contours[0])
               {
                  EmguAssert.IsTrue( CvInvoke.IsContourConvex(firstContour )  );
               }
            }
            /*
            using (MemStorage stor = new MemStorage())
            {
               //Contour<Point> cs = img.FindContours(CvEnum.ChainApproxMethod.ChainApproxSimple, CvEnum.RetrType.List, stor);
               //EmguAssert.IsTrue(cs.MCvContour.elem_size == Marshal.SizeOf(typeof(Point)));
               //EmguAssert.IsTrue(rect.Width * rect.Height == cs.Area);

               //EmguAssert.IsTrue(cs.Convex);
               //EmguAssert.IsTrue(rect.Width * 2 + rect.Height * 2 == cs.Perimeter);
               Rectangle rect2 = cs.BoundingRectangle;
               rect2.Width -= 1;
               rect2.Height -= 1;
               //rect2.Center.X -= 0.5;
               //rect2.Center.Y -= 0.5;
               //EmguAssert.IsTrue(rect2.Equals(rect));
               EmguAssert.IsTrue(cs.InContour(pIn) > 0);
               EmguAssert.IsTrue(cs.InContour(pOut) < 0);
               //EmguAssert.IsTrue(cs.Distance(pIn) == 10);
               //EmguAssert.IsTrue(cs.Distance(pOut) == -50);
               img.Draw(cs, new Gray(100), new Gray(100), 0, 1);

               MCvPoint2D64f rectangleCenter = new MCvPoint2D64f(rect.X + rect.Width / 2.0, rect.Y + rect.Height / 2.0);

               using (VectorOfPoint vp = new VectorOfPoint(cs.ToArray()))
               {    
                  MCvMoments moment = CvInvoke.Moments(vp, false);
                  MCvPoint2D64f center = moment.GravityCenter;
                  //EmguAssert.IsTrue(center.Equals(rectangleCenter));
               }
               
            }

            using (MemStorage stor = new MemStorage())
            {
               Image<Gray, Byte> img2 = new Image<Gray, byte>(300, 200);
               Contour<Point> c = img2.FindContours(Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxSimple, Emgu.CV.CvEnum.RetrType.List, stor);
               EmguAssert.IsTrue(c == null);
            }*/
         }

         int s1 = Marshal.SizeOf(typeof(MCvSeq));
         int s2 = Marshal.SizeOf(typeof(MCvContour));
         int sizeRect = Marshal.SizeOf(typeof(Rectangle));
         EmguAssert.IsTrue(s1 + sizeRect + 4 * Marshal.SizeOf(typeof(int)) == s2);
      }
Пример #22
0
        public static List<VectorOfPoint> GetContours(Image<Gray, Byte> image, ChainApproxMethod apxMethod = ChainApproxMethod.ChainApproxSimple, RetrType retrievalType = RetrType.List, double accuracy = 0.001d, double minimumArea = 10)
        {
            List<VectorOfPoint> convertedContours = new List<VectorOfPoint>();

            using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint())
            {
                using (Image<Gray, Byte> tempImage = image.Copy())
                {
                    CvInvoke.FindContours(tempImage, contours, null, retrievalType, apxMethod);
                }

                int count = contours.Size;
                for (int i = 0; i < count; i++)
                {
                    using (VectorOfPoint contour = contours[i])
                    {
                        VectorOfPoint approxContour = new VectorOfPoint();
                        CvInvoke.ApproxPolyDP(contour, approxContour, accuracy, false);
                        if (CvInvoke.ContourArea(approxContour, false) > minimumArea)
                        {
                            convertedContours.Add(approxContour);
                        }
                    }
                }
            }

            return convertedContours;
        }
        private void ProcessFrame()
        {
            try
            {
                #region Background/Foreground
                Image<Bgr, byte> difference = BackgroundSubstractionOptions.Substract(_currentFrame, _frameHistoryBuffer);

                Rectangle? handArea = ForegoundExtractionOptions.HighlightForeground(difference);
                Image<Bgr, byte> skinDetectionFrame = _currentFrame.Copy();

                if (handArea.HasValue)
                    ForegoundExtractionOptions.CutBackground(skinDetectionFrame, handArea.Value);
                #endregion

                #region Skin filtering / Morphological / Smooth filtering
                Image<Gray, byte> skinDetectionFrameGray = SkinFilteringOptions.ActiveItem.FilterFrame(skinDetectionFrame);

                MorphologicalFilteringOptions.StackSync.EnterReadLock();
                foreach (var operation in MorphologicalFilteringOptions.OperationStack)
                {
                    if (operation.FilterType == Model.Enums.MorphologicalFilterType.Dilatation)
                    {
                        CvInvoke.Dilate(skinDetectionFrameGray, skinDetectionFrameGray, operation.GetKernel(),
                            new Point(operation.KernelAnchorX, operation.KernelAnchorY), operation.Intensity, operation.KernelBorderType,
                            new MCvScalar(operation.KernelBorderThickness));
                    }
                    else
                    {
                        CvInvoke.Erode(skinDetectionFrameGray, skinDetectionFrameGray, operation.GetKernel(),
                            new Point(operation.KernelAnchorX, operation.KernelAnchorY), operation.Intensity, operation.KernelBorderType,
                            new MCvScalar(operation.KernelBorderThickness));
                    }
                }
                MorphologicalFilteringOptions.StackSync.ExitReadLock();

                skinDetectionFrameGray = SmoothFilteringOptions.FilterFrame(skinDetectionFrameGray);
                #endregion

                #region Contours / ConvexHull / ConvexityDefects
                Image<Bgr, byte> fingerTrackerFrame = _currentFrame.Copy();

                List<Point> fingers = new List<Point>();

                using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint())
                {
                    CvInvoke.FindContours(skinDetectionFrameGray.Copy(), contours, null, RetrType.List, FingerTrackingOptions.ApproxMethod);

                    if (contours.Size > 0)
                    {
                        VectorOfPoint biggestContour = contours[0];

                        if (contours.Size > 1)
                        {
                            for (int i = 1; i < contours.Size; i++)
                            {
                                if (CvInvoke.ContourArea(contours[i], false) > CvInvoke.ContourArea(biggestContour, false))
                                    biggestContour = contours[i];
                            }
                        }

                        if (CvInvoke.ContourArea(biggestContour, false) > FingerTrackingOptions.MinContourArea)
                        {
                            using (VectorOfPoint contour = biggestContour)
                            {
                                using (VectorOfPoint approxContour = new VectorOfPoint())
                                {
                                    CvInvoke.ApproxPolyDP(contour, approxContour, CvInvoke.ArcLength(contour, true) * FingerTrackingOptions.PerimeterScalingFactor.Value, true);

                                    fingerTrackerFrame.Draw(approxContour.ToArray(), new Bgr(FingerTrackingOptions.ContourHighlightColor), 2);
                                    VectorOfPoint convexHull = new VectorOfPoint();
                                    VectorOfInt intHull = new VectorOfInt();
                                    CvInvoke.ConvexHull(approxContour, convexHull, FingerTrackingOptions.ConvexHullCW);
                                    CvInvoke.ConvexHull(approxContour, intHull, FingerTrackingOptions.ConvexHullCW);
                                    fingerTrackerFrame.DrawPolyline(convexHull.ToArray(), true, new Bgr(FingerTrackingOptions.ConvexHullColor), 2);

                                    var countourRect = CvInvoke.MinAreaRect(approxContour);
                                    fingerTrackerFrame.Draw(new CircleF(new PointF(countourRect.Center.X, countourRect.Center.Y), 3), new Bgr(FingerTrackingOptions.DefectLinesColor), 2);

                                    Mat defects = new Mat();
                                    CvInvoke.ConvexityDefects(approxContour, intHull, defects);

                                    if (!defects.IsEmpty)
                                    {
                                        var contourPoints = approxContour.ToArray();

                                        Matrix<int> m = new Matrix<int>(defects.Rows, defects.Cols, defects.NumberOfChannels);
                                        defects.CopyTo(m);

                                        for (int i = 0; i < m.Rows; i++)
                                        {
                                            int startIdx = m.Data[i, 0];
                                            int endIdx = m.Data[i, 1];
                                            int depthIdx = m.Data[i, 2];

                                            Point startPoint = contourPoints[startIdx];
                                            Point endPoint = contourPoints[endIdx];
                                            Point depthPoint = contourPoints[depthIdx];

                                            LineSegment2D startDepthLine = new LineSegment2D(startPoint, depthPoint);
                                            LineSegment2D depthEndLine = new LineSegment2D(depthPoint, endPoint);

                                            LineSegment2D startCenterLine = new LineSegment2D(startPoint, new Point((int)countourRect.Center.X, (int)countourRect.Center.Y));
                                            LineSegment2D depthCenterLine = new LineSegment2D(depthPoint, new Point((int)countourRect.Center.X, (int)countourRect.Center.Y));
                                            LineSegment2D endCenterLine = new LineSegment2D(endPoint, new Point((int)countourRect.Center.X, (int)countourRect.Center.Y));

                                            CircleF startCircle = new CircleF(startPoint, 5);
                                            CircleF depthCircle = new CircleF(depthPoint, 5);
                                            CircleF endCircle = new CircleF(endPoint, 5);

                                            if (startPoint.Y < countourRect.Center.Y)
                                                fingers.Add(startPoint);

                                            if (!FingerTrackingOptions.TrackOnlyControlPoint)
                                            {
                                                fingerTrackerFrame.Draw(startCircle, new Bgr(FingerTrackingOptions.DefectStartPointHighlightColor), 2);
                                                fingerTrackerFrame.Draw(depthCircle, new Bgr(FingerTrackingOptions.DefectDepthPointHighlightColor), 2);
                                                fingerTrackerFrame.Draw(endCircle, new Bgr(FingerTrackingOptions.DefectEndPointHighlightColor), 2);

                                                fingerTrackerFrame.Draw(startDepthLine, new Bgr(FingerTrackingOptions.DefectLinesColor), 2);
                                                //fingerTrackerFrame.Draw(depthEndLine, new Bgr(FingerTrackingOptions.DefectLinesColor), 2);

                                                fingerTrackerFrame.Draw(startCenterLine, new Bgr(FingerTrackingOptions.DefectLinesColor), 2);
                                                //fingerTrackerFrame.Draw(depthCenterLine, new Bgr(FingerTrackingOptions.DefectLinesColor), 2);
                                               // fingerTrackerFrame.Draw(endCenterLine, new Bgr(FingerTrackingOptions.DefectLinesColor), 2);
                                            }
                                        }

                                        _lastControlPoint = _currentControlPoint;
                                        _currentControlPoint = MouseControlOptions.UseHandCenter ? new Point((int)countourRect.Center.X, (int)countourRect.Center.Y)
                                                    : fingers.FirstOrDefault(f => f.Y == fingers.Min(line => line.Y));
                                        fingers.Clear();

                                        if (FingerTrackingOptions.TrackOnlyControlPoint)
                                        {
                                            fingerTrackerFrame = new Image<Bgr, byte>(fingerTrackerFrame.Width, fingerTrackerFrame.Height, new Bgr(Color.Black));
                                            fingerTrackerFrame.Draw(new CircleF(_currentControlPoint, 5), new Bgr(Color.Red), 2);
                                        }

                                    }
                                }
                            }
                        }
                    }
                }
                #endregion

                #region Mouse control
                if (_currentControlPoint.X != -1 && _currentControlPoint.Y != -1 && _lastControlPoint.X != -1 && _lastControlPoint.Y != -1
                         && _currentControlPoint.X != _lastControlPoint.X && _currentControlPoint.Y != _lastControlPoint.Y
                            && Math.Abs(_currentControlPoint.X - _lastControlPoint.X) < (MouseControlOptions.FrameWidth / 10)
                                 && Math.Abs(_currentControlPoint.Y - _lastControlPoint.Y) < (MouseControlOptions.FrameHeight / 10))
                {
                    int frameX = _currentControlPoint.X;
                    int frameY = _currentControlPoint.Y;

                    int moveX = _currentControlPoint.X - _lastControlPoint.X;
                    int moveY = _currentControlPoint.Y - _lastControlPoint.Y;

                    int sensitiveX = 1;
                    int sensitiveY = 1;

                    if (MouseControlOptions.MouseSensitive.Value > 0)
                    {
                        sensitiveX = (int)(((double)MouseControlOptions.ScreenWidth / MouseControlOptions.FrameWidth) * MouseControlOptions.MouseSensitive.Value);
                        sensitiveY = (int)(((double)MouseControlOptions.ScreenHeight / MouseControlOptions.FrameHeight) * MouseControlOptions.MouseSensitive.Value);
                    }
                    else if (MouseControlOptions.MouseSensitive.Value < 0)
                    {
                        sensitiveX = (int)(((double)MouseControlOptions.FrameWidth / MouseControlOptions.ScreenWidth) * MouseControlOptions.MouseSensitive.Value * -1);
                        sensitiveY = (int)(((double)MouseControlOptions.FrameHeight / MouseControlOptions.ScreenHeight) * MouseControlOptions.MouseSensitive.Value * -1);
                    }

                    moveX *= sensitiveX * -1;
                    moveY *= sensitiveY;

                    Point currentMousePosition = GetMousePosition();

                    int destinationX = currentMousePosition.X + moveX;
                    int destinationY = currentMousePosition.Y + moveY;

                    Messanger.PublishOnCurrentThread(new FingerMovedMessage(MouseControlOptions.ControlMouse, frameX, frameY, destinationX, destinationY));

                    if (MouseControlOptions.ControlMouse && MouseControlOptions.MouseSensitive.Value != 0 && destinationX >= 0 && destinationY >= 0)
                        SetCursorPos(destinationX, destinationY);
                }
                #endregion

                Messanger.PublishOnCurrentThread(new FrameProcessedMessage(_currentFrame, difference, skinDetectionFrameGray, fingerTrackerFrame));
            }
            catch { }
        }
Пример #24
0
      /// <summary>
      /// Find groups of Extremal Regions that are organized as text blocks.
      /// </summary>
      /// <param name="image">The image where ER grouping is to be perform on</param>
      /// <param name="channels">Array of single channel images from which the regions were extracted</param>
      /// <param name="erstats">Vector of ER’s retrieved from the ERFilter algorithm from each channel</param>
      /// <param name="groupingTrainedFileName">The XML or YAML file with the classifier model (e.g. trained_classifier_erGrouping.xml)</param>
      /// <param name="minProbability">The minimum probability for accepting a group.</param>
      /// <param name="groupMethods">The grouping methods</param>
      /// <returns>The output of the algorithm that indicates the text regions</returns>
      public static System.Drawing.Rectangle[] ERGrouping(IInputArray image, IInputArrayOfArrays channels, VectorOfERStat[] erstats, GroupingMethod groupMethods = GroupingMethod.OrientationHoriz, String groupingTrainedFileName = null, float minProbability = 0.5f)
      {
         IntPtr[] erstatPtrs = new IntPtr[erstats.Length];

         for (int i = 0; i < erstatPtrs.Length; i++)
         {
            erstatPtrs[i] = erstats[i].Ptr;
         }

         using (VectorOfVectorOfPoint regionGroups = new VectorOfVectorOfPoint())
         using (VectorOfRect groupsBoxes = new VectorOfRect())
         using (InputArray iaImage = image.GetInputArray())
         using (InputArray iaChannels = channels.GetInputArray())
         using (CvString s = (groupingTrainedFileName == null ? new CvString() : new CvString(groupingTrainedFileName)))
         {
            GCHandle erstatsHandle = GCHandle.Alloc(erstatPtrs, GCHandleType.Pinned);
            CvERGrouping(
               iaImage, iaChannels,
               erstatsHandle.AddrOfPinnedObject(), erstatPtrs.Length,
               regionGroups, groupsBoxes,
               groupMethods,
               s, minProbability);

            erstatsHandle.Free();
            return groupsBoxes.ToArray();
         }
      }
Пример #25
0
      /*
      public static void TestDrawLine(IntPtr img, int startX, int startY, int endX, int endY, MCvScalar color)
      {
         TestDrawLine(img, startX, startY, endX, endY, color.v0, color.v1, color.v2, color.v3);
      }

      [DllImport(CvInvoke.EXTERN_LIBRARY, CallingConvention = CvInvoke.CvCallingConvention, EntryPoint="testDrawLine")]
      private static extern void TestDrawLine(IntPtr img, int startX, int startY, int endX, int endY, double v0, double v1, double v2, double v3);
      */

      /// <summary>
      /// Implements the chamfer matching algorithm on images taking into account both distance from
      /// the template pixels to the nearest pixels and orientation alignment between template and image
      /// contours.
      /// </summary>
      /// <param name="img">The edge image where search is performed</param>
      /// <param name="templ">The template (an edge image)</param>
      /// <param name="contours">The output contours</param>
      /// <param name="cost">The cost associated with the matching</param>
      /// <param name="templScale">The template scale, use 1 for default</param>
      /// <param name="maxMatches">The maximum number of matches, use 20 for default</param>
      /// <param name="minMatchDistance">The minimum match distance. use 1.0 for default</param>
      /// <param name="padX">PadX, use 3 for default</param>
      /// <param name="padY">PadY, use 3 for default</param>
      /// <param name="scales">Scales, use 5 for default</param>
      /// <param name="minScale">Minimum scale, use 0.6 for default</param>
      /// <param name="maxScale">Maximum scale, use 1.6 for default</param>
      /// <param name="orientationWeight">Orientation weight, use 0.5 for default</param>
      /// <param name="truncate">Truncate, use 20 for default</param>
      /// <returns>The number of matches</returns>
      public static int cvChamferMatching(Image<Gray, Byte> img, Image<Gray, Byte> templ,
         out Point[][] contours, out float[] cost,
         double templScale, int maxMatches,
         double minMatchDistance, int padX,
         int padY, int scales, double minScale, double maxScale,
         double orientationWeight, double truncate)
      {
         using (Emgu.CV.Util.VectorOfVectorOfPoint vecOfVecOfPoint = new Util.VectorOfVectorOfPoint())
         using (Emgu.CV.Util.VectorOfFloat vecOfFloat = new Util.VectorOfFloat())
         {
            int count = _cvChamferMatching(img, templ, vecOfVecOfPoint, vecOfFloat, templScale, maxMatches, minMatchDistance, padX, padY, scales, minScale, maxScale, orientationWeight, truncate);
            contours = vecOfVecOfPoint.ToArray();
            cost = vecOfFloat.ToArray();
            return count;
         }
      }
Пример #26
0
      private void FindStopSign(Mat img, List<Mat> stopSignList, List<Rectangle> boxList, VectorOfVectorOfPoint contours, int[,] hierachy, int idx)
      {
         for (; idx >= 0; idx = hierachy[idx, 0])
         {
            using (VectorOfPoint c = contours[idx])
            using (VectorOfPoint approx = new VectorOfPoint())
            {
               CvInvoke.ApproxPolyDP(c, approx, CvInvoke.ArcLength(c, true) * 0.02, true);
               double area = CvInvoke.ContourArea(approx);
               if (area > 200)
               {
                  double ratio = CvInvoke.MatchShapes(_octagon, approx, Emgu.CV.CvEnum.ContoursMatchType.I3);

                  if (ratio > 0.1) //not a good match of contour shape
                  {
                     //check children
                     if (hierachy[idx, 2] >= 0)
                        FindStopSign(img, stopSignList, boxList, contours, hierachy, hierachy[idx, 2]);
                     continue;
                  }

                  Rectangle box = CvInvoke.BoundingRectangle(c);

                  Mat candidate = new Mat();
                  using (Mat tmp = new Mat(img, box))
                     CvInvoke.CvtColor(tmp, candidate, ColorConversion.Bgr2Gray);

                  //set the value of pixels not in the contour region to zero
                  using (Mat mask = new Mat(candidate.Size.Height, candidate.Width, DepthType.Cv8U, 1))
                  {
                     mask.SetTo(new MCvScalar(0));
                     CvInvoke.DrawContours(mask, contours, idx, new MCvScalar(255), -1, LineType.EightConnected, null, int.MaxValue, new Point(-box.X, -box.Y));

                     double mean = CvInvoke.Mean(candidate, mask).V0;
                     CvInvoke.Threshold(candidate, candidate, mean, 255, ThresholdType.Binary);
                     CvInvoke.BitwiseNot(candidate, candidate);
                     CvInvoke.BitwiseNot(mask, mask);

                     candidate.SetTo(new MCvScalar(0), mask);
                  }

                  int minMatchCount = 8;
                  double uniquenessThreshold = 0.8;
                  VectorOfKeyPoint _observeredKeypoint = new VectorOfKeyPoint();
                  Mat _observeredDescriptor = new Mat();
                  _detector.DetectAndCompute(candidate, null, _observeredKeypoint, _observeredDescriptor, false);

                  if (_observeredKeypoint.Size >= minMatchCount)
                  {
                     int k = 2;

                     Mat mask;

                     using (VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch())
                     {
                        _modelDescriptorMatcher.KnnMatch(_observeredDescriptor, matches, k, null);
                        mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                        mask.SetTo(new MCvScalar(255));
                        Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);
                     }

                     int nonZeroCount = CvInvoke.CountNonZero(mask);
                     if (nonZeroCount >= minMatchCount)
                     {
                        boxList.Add(box);
                        stopSignList.Add(candidate);
                     }
                  }
               }
            }
         }
      }
Пример #27
0
      public void DetectStopSign(Mat img, List<Mat> stopSignList, List<Rectangle> boxList)
      {
         Mat smoothImg = new Mat();
         CvInvoke.GaussianBlur(img, smoothImg, new Size(5, 5), 1.5, 1.5);
         //Image<Bgr, Byte> smoothImg = img.SmoothGaussian(5, 5, 1.5, 1.5);

         Mat smoothedRedMask = new Mat();
         GetRedPixelMask(smoothImg, smoothedRedMask);

         //Use Dilate followed by Erode to eliminate small gaps in some contour.
         CvInvoke.Dilate(smoothedRedMask, smoothedRedMask, null, new Point(-1, -1), 1, BorderType.Constant, CvInvoke.MorphologyDefaultBorderValue);
         CvInvoke.Erode(smoothedRedMask, smoothedRedMask, null, new Point(-1, -1), 1, BorderType.Constant, CvInvoke.MorphologyDefaultBorderValue);

         using (Mat canny = new Mat())
         using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint())
         {
            CvInvoke.Canny(smoothedRedMask, canny, 100, 50);
            int[,] hierachy = CvInvoke.FindContourTree(canny, contours, ChainApproxMethod.ChainApproxSimple);

            //Image<Gray, Byte> tmp = new Image<Gray, byte>(canny.Size);
            //CvInvoke.DrawContours(tmp, contours, -1, new MCvScalar(255, 255, 255));
            //Emgu.CV.UI.ImageViewer.Show(tmp);

            if (hierachy.GetLength(0) > 0)
               FindStopSign(img, stopSignList, boxList, contours, hierachy, 0);
         }

      }
Пример #28
0
         public void FindTiles(Mat image, VectorOfVectorOfPoint contours, int[,] hierachy, int hIndex)
        {
             Mat resultContours = new Mat(image.Size, image.Depth, image.NumberOfChannels);

            // originalBox.Image = image;
             //for all of the root hierarchies...
             for(; hIndex >= 0; hIndex = hierachy[hIndex,0])
             {
                 MCvScalar color = new MCvScalar(0, 0, 255);

                 using (VectorOfPoint c = contours[hIndex])
                 using (VectorOfPoint approx = new VectorOfPoint())
                 {
                     CvInvoke.ApproxPolyDP(c, approx, CvInvoke.ArcLength(c, true) * .02, true);//CvInvoke.ArcLength(c, true) * .02, true);
                     double area = CvInvoke.ContourArea(approx);
                     //filter out the small contours...
                     //if (area > 20000 && area < 30000 ) //3/4" tiles
                     if(area > 0 && area < 100000)
                     {
                         //match the shape to the square
                         double ratio = CvInvoke.MatchShapes(_square, approx, Emgu.CV.CvEnum.ContoursMatchType.I3);

                         if(ratio < .1)
                         {
                             CvInvoke.FillConvexPoly(resultContours, c, new MCvScalar(255), LineType.AntiAlias);
                             var M = CvInvoke.Moments(c);
                             int cx = (int)(M.M10 / M.M00);
                             int cy = (int)(M.M01 / M.M00);
                            
                            for (int i = 0; i < approx.Size; i++)
                            {
                                int second = i + 1;
                                if (second ==approx.Size)
                                    second = 0;
                                CvInvoke.Line(resultContours, 
                                    new System.Drawing.Point(approx[i].X, approx[i].Y),  
                                    new System.Drawing.Point(approx[second].X, approx[second].Y),
                                    new MCvScalar(128), 10);
                            }
                             CvInvoke.Rectangle(resultContours, new Rectangle(new System.Drawing.Point(cx - 50, cy - 50), new Size(100, 100)), new MCvScalar(128), 2);
                          }
                     }
                 }
                 color = new MCvScalar(0, 255, 0);
             }
             resultBox.Image = resultContours;
        }
        public void CalculateAverageBrightessForArea(string reference0, string reference1, StrassenbilderMetaDataContext dataContext)
        {
            // Image-Meta-Daten laden
            string name0 = Path.GetFileNameWithoutExtension(reference0);
            string name1 = Path.GetFileNameWithoutExtension(reference1);
            Image image0 = dataContext.Images.Where(i => i.Name == name0).FirstOrDefault();
            Image image1 = dataContext.Images.Where(i => i.Name == name1).FirstOrDefault();

            // Polygone Laden
            IEnumerable<Polygon> polygons = dataContext.Polygons.Where(p => p.CameraName == image0.Place);

            // Pro Maske anwenden
            foreach (var polygon in polygons)
            {
                IList<Point> polygonPoints = JsonConvert.DeserializeObject<Media.PointCollection>(polygon.PolygonPointCollection);

                // Maskiertes Bild laden
                Drawing.Bitmap bitmap0 = GetMaskedBitmap(reference0, polygonPoints);
                Drawing.Bitmap bitmap1 = GetMaskedBitmap(reference1, polygonPoints);

                Image<Bgr, byte> cvImage0 = new Image<Bgr, byte>(bitmap0);
                Image<Bgr, byte> cvImage1 = new Image<Bgr, byte>(bitmap1);

                // Maske generieren aus Polygon
                Mat matMask = new Mat(new Drawing.Size(cvImage0.Cols, cvImage0.Rows), DepthType.Cv8U, 3);
                // Polygone skalieren und generieren
                List<Point> scaledPoints = GetScaledPoints(polygonPoints, cvImage0.Rows, cvImage0.Cols);
                List<Drawing.Point> scaledDrawingPoints = GetPolygonPoints(scaledPoints, cvImage0.Rows, cvImage0.Cols);
                // Polygon weiss zeichnen
                using (VectorOfPoint vPoint = new VectorOfPoint(scaledDrawingPoints.ToArray()))
                using (VectorOfVectorOfPoint vvPoint = new VectorOfVectorOfPoint(vPoint))
                {
                    CvInvoke.FillPoly(matMask, vvPoint, new Bgr(255, 255, 255).MCvScalar);
                }
                Image<Gray, byte> imageMask = new Image<Gray, byte>(matMask.Bitmap);

                // Durchschnittsfarbe rechnen mit Maske
                Bgr result0 = cvImage0.GetAverage(imageMask);
                Bgr result1 = cvImage1.GetAverage(imageMask);
                // Resultat abspeichern
                polygon.BgrSnow = JsonConvert.SerializeObject(result0);
                polygon.BgrNormal = JsonConvert.SerializeObject(result1);
                dataContext.SubmitChanges();
            }
        }
        public short Calculate(string imageFilePath, Polygon polygon, Media.PointCollection pointCollection)
        {
            // Maskiertes Bild laden
            // Drawing.Bitmap maskedBitmap = GetMaskedBitmap(imageFilePath, pointCollection);

            Image<Bgr, byte> cvImage = new Image<Bgr, byte>(imageFilePath);

            // Maske generieren aus Polygon
            Mat matMask = new Mat(new Drawing.Size(cvImage.Cols, cvImage.Rows), DepthType.Cv8U, 3);
            // Polygone skalieren und generieren
            List<Point> scaledPoints = GetScaledPoints(pointCollection, cvImage.Rows, cvImage.Cols);
            List<Drawing.Point> scaledDrawingPoints = GetPolygonPoints(scaledPoints, cvImage.Rows, cvImage.Cols);
            // Polygon weiss zeichnen
            using (VectorOfPoint vPoint = new VectorOfPoint(scaledDrawingPoints.ToArray()))
            using (VectorOfVectorOfPoint vvPoint = new VectorOfVectorOfPoint(vPoint))
            {
                CvInvoke.FillPoly(matMask, vvPoint, new Bgr(255, 255, 255).MCvScalar);
            }
            Image<Gray, byte> imageMask = new Image<Gray, byte>(matMask.Bitmap);

            // Durchschnittsfarbe rechnen mit Maske
            Bgr result = cvImage.GetAverage(imageMask);
            // Vergleichen mit Referenzbildern
            Bgr snow = JsonConvert.DeserializeObject<Bgr>(polygon.BgrSnow);
            Bgr normal = JsonConvert.DeserializeObject<Bgr>(polygon.BgrNormal);

            double resultSnow = Math.Abs(snow.Blue - result.Blue) + Math.Abs(snow.Green - result.Green) + Math.Abs(snow.Red - result.Red);
            double resultNormal = Math.Abs(normal.Blue - result.Blue) + Math.Abs(normal.Green - result.Green) + Math.Abs(normal.Red - result.Red);

            if (Math.Abs(resultSnow - resultNormal) < 10)
            {
                return 0;
            }
            else if (resultSnow < resultNormal)
            {
                return 1;
            }
            else
            {
                return -1;
            }
        }