/// <summary> /// Approximates an elliptic arc with a polyline. /// The function ellipse2Poly computes the vertices of a polyline that /// approximates the specified elliptic arc. It is used by cv::ellipse. /// </summary> /// <param name="center">Center of the arc.</param> /// <param name="axes">Half of the size of the ellipse main axes. See the ellipse for details.</param> /// <param name="angle">Rotation angle of the ellipse in degrees. See the ellipse for details.</param> /// <param name="arcStart">Starting angle of the elliptic arc in degrees.</param> /// <param name="arcEnd">Ending angle of the elliptic arc in degrees.</param> /// <param name="delta">Angle between the subsequent polyline vertices. It defines the approximation</param> /// <returns>Output vector of polyline vertices.</returns> public static Point[] Ellipse2Poly(Point center, Size axes, int angle, int arcStart, int arcEnd, int delta) { using (var vec = new VectorOfPoint()) { NativeMethods.imgproc_ellipse2Poly(center, axes, angle, arcStart, arcEnd, delta, vec.CvPtr); return vec.ToArray(); } }
/// <summary> /// /// </summary> /// <param name="img"></param> /// <param name="hitThreshold"></param> /// <param name="winStride"></param> /// <param name="padding"></param> /// <returns></returns> public virtual Point[] Detect(GpuMat img, double hitThreshold, Size winStride, Size padding) { if (disposed) throw new ObjectDisposedException("HOGDescriptor"); if (img == null) throw new ArgumentNullException("img"); using (var flVec = new VectorOfPoint()) { NativeMethods.HOGDescriptor_detect(ptr, img.CvPtr, flVec.CvPtr, hitThreshold, winStride, padding); // std::vector<cv::Point>*からCvPoint[]に移し替えて返す return flVec.ToArray(); } }
private void wykryjZnaki() { //lista trójkątów List <Triangle2DF> triangleList = new List <Triangle2DF>(); //lista prostokątów i kwadratów List <RotatedRect> boxList = new List <RotatedRect>(); zwiekszProgressBar(1); //przetworzenie zdjecia do postaci wskazujacej tylko biale kontury na czarnym tle Image <Gray, byte> canny_zdj = new Image <Gray, byte>(imgInput.Width, imgInput.Height, new Gray(0)); canny_zdj = imgInput.Canny(300, 250); //przypisanie canny_zdj do pictureBox i rozciagniecie zdjecieCannyBox.Image = canny_zdj.Bitmap; zdjecieCannyBox.SizeMode = PictureBoxSizeMode.StretchImage; zwiekszProgressBar(2); LineSegment2D[] lines = CvInvoke.HoughLinesP( canny_zdj, 1, Math.PI / 45.0, 20, 30, 10); Image <Gray, byte> imgOut = canny_zdj.Convert <Gray, byte>().ThresholdBinary(new Gray(50), new Gray(200)); VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint(); Mat hier = new Mat(); zwiekszProgressBar(1); //wygladzenie obrazu imgSmooth = imgInput.PyrDown().PyrUp(); imgSmooth._SmoothGaussian(3); //ograniczenie wykrywanych figur do odpowiedniego zakresu ze skali RGB - zoltego imgOut = imgSmooth.InRange(new Bgr(0, 140, 150), new Bgr(80, 255, 255)); imgOut = imgOut.PyrDown().PyrUp(); imgOut._SmoothGaussian(3); zwiekszProgressBar(2); Dictionary <int, double> dict = new Dictionary <int, double>(); //wyszukanie konturow spelniajacych wymogi odnosnie mi.in. koloru CvInvoke.FindContours(imgOut, contours, null, Emgu.CV.CvEnum.RetrType.List, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxSimple); label1.Text = contours.Size.ToString(); //jesli odnaleziono chocby jeden kontur if (contours.Size > 0) { //petla przechodzaca po wszystkich wykrytych konturach for (int i = 0; i < contours.Size; i++) { using (VectorOfPoint contour = contours[i]) using (VectorOfPoint approxContour = new VectorOfPoint()) { CvInvoke.ApproxPolyDP(contour, approxContour, CvInvoke.ArcLength(contour, true) * 0.05, true); //filtr wielkosci pola wykrytego konturu if (CvInvoke.ContourArea(approxContour, false) > 50) { //jesli to trójkąt if (approxContour.Size == 3) { //tablica punktow i dodanie ich do tablicy trojkatow Point[] pts = approxContour.ToArray(); triangleList.Add(new Triangle2DF( pts[0], pts[1], pts[2] )); //sprawdzenie czy wykryty trojkat jest figura obróconą jednym z wierzcholkow do dolu if (pts[1].X > pts[0].X && pts[1].Y > pts[0].Y) { //ustawienie znaku A-7 ustawWykrytyZnak(2); double area = CvInvoke.ContourArea(contours[i]); //dodanie do tablicy glownej dict.Add(i, area); } } //jesli to czworokat else if (approxContour.Size == 4) { bool isRectangle = true; ///rozbicie figury na pojedyncze krawedzie Point[] pts = approxContour.ToArray(); LineSegment2D[] edges = PointCollection.PolyLine(pts, true); //petla przechodzaca po wszystkich krawedziach for (int j = 0; j < edges.Length; j++) { //sprawdzenie wielkosci kąta miedzy sprawdzanymi krawedziami double angle = Math.Abs( edges[(j + 1) % edges.Length].GetExteriorAngleDegree(edges[j])); //przerwanie jesli kąty w figurze są mniejsze niż 80 i wieksze niż 100 if (angle < 80 || angle > 100) { isRectangle = false; break; } } if (isRectangle) { RotatedRect rrect = CvInvoke.MinAreaRect(contours[i]); //ostateczne sprawdzenie czy wykryta figura jest obrocona wzgledem srodka o wartosc od 40 do 50 //stopni - znak D-1 jest obroconym kwadratem o 45 st wzgledem srodka if ((rrect.Angle < -40 && rrect.Angle > -50) || (rrect.Angle > 40 && rrect.Angle < 50)) { boxList.Add(CvInvoke.MinAreaRect(approxContour)); double area = CvInvoke.ContourArea(contours[i]); dict.Add(i, area); ustawWykrytyZnak(1); } } } } } } } zwiekszProgressBar(2); var item = dict.OrderByDescending(v => v.Value); foreach (var it in item) { int key = int.Parse(it.Key.ToString()); //pobranie odpowiednich konturow Rectangle rect = CvInvoke.BoundingRectangle(contours[key]); //narysowanie czerwonego prostokata wokol wykrytego znaku CvInvoke.Rectangle(imgInput, rect, new MCvScalar(0, 0, 255), 1); } zwiekszProgressBar(2); pictureBox2.Image = imgInput.Bitmap; pictureBox2.SizeMode = PictureBoxSizeMode.StretchImage; //utworzenie zdjecia wskazujacego WSZYSTKIE kontury w poczatkowym zdjeciu - czerowne linie Image <Bgr, Byte> lineImage = imgInput.CopyBlank(); foreach (LineSegment2D line in lines) { lineImage.Draw(line, new Bgr(Color.Red), 1); } zdjecieWykrytyZnak.Image = lineImage.Bitmap; zdjecieWykrytyZnak.SizeMode = PictureBoxSizeMode.StretchImage; }
private void openFileDialog1_FileOk(object sender, CancelEventArgs e) { outputLabel.Text = ""; StringBuilder msgBuilder = new StringBuilder("Performance: "); Image <Bgr, Byte> img = new Image <Bgr, byte>(openFileDialog1.FileName) .Resize(400, 400, Emgu.CV.CvEnum.Inter.Linear, true); //Convert the image to grayscale and filter out the noise UMat uimage = new UMat(); CvInvoke.CvtColor(img, uimage, ColorConversion.Bgr2Gray); //use image pyr to remove noise UMat pyrDown = new UMat(); CvInvoke.PyrDown(uimage, pyrDown); CvInvoke.PyrUp(pyrDown, uimage); //Image<Gray, Byte> gray = img.Convert<Gray, Byte>().PyrDown().PyrUp(); #region circle detection Stopwatch watch = Stopwatch.StartNew(); double cannyThreshold = 80.0; double circleAccumulatorThreshold = 100; CircleF[] circles = CvInvoke.HoughCircles(uimage, HoughType.Gradient, 2.0, 5.0, cannyThreshold, circleAccumulatorThreshold, 5, 150); outputLabel.Text += "C: " + circles.Length; watch.Stop(); msgBuilder.Append(String.Format("Hough circles - {0} ms; ", watch.ElapsedMilliseconds)); #endregion #region Canny and edge detection watch.Reset(); watch.Start(); double cannyThresholdLinking = 270.0; UMat cannyEdges = new UMat(); CvInvoke.Canny(uimage, cannyEdges, cannyThreshold, cannyThresholdLinking); LineSegment2D[] lines = CvInvoke.HoughLinesP( cannyEdges, 1, //Distance resolution in pixel-related units Math.PI / 45.0, //Angle resolution measured in radians. 20, //threshold 30, //min Line width 10); //gap between lines watch.Stop(); outputLabel.Text += "\nL: " + lines.Length; msgBuilder.Append(String.Format("Canny & Hough lines - {0} ms; ", watch.ElapsedMilliseconds)); #endregion #region Find triangles and rectangles watch.Reset(); watch.Start(); List <Triangle2DF> triangleList = new List <Triangle2DF>(); List <RotatedRect> boxList = new List <RotatedRect>(); //a box is a rotated rectangle List <LineSegment2D> lineBoxes = new List <LineSegment2D>(); //for the species that looks like a line using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint()) { CvInvoke.FindContours(cannyEdges, contours, null, RetrType.List, ChainApproxMethod.ChainApproxSimple); int count = contours.Size; for (int i = 0; i < count; i++) { using (VectorOfPoint contour = contours[i]) using (VectorOfPoint approxContour = new VectorOfPoint()) { CvInvoke.ApproxPolyDP(contour, approxContour, CvInvoke.ArcLength(contour, true) * 0.05, true); if (CvInvoke.ContourArea(approxContour, false) > 250) //only consider contours with area greater than 250 { if (approxContour.Size == 3) //The contour has 3 vertices, it is a triangle { Point[] pts = approxContour.ToArray(); triangleList.Add(new Triangle2DF( pts[0], pts[1], pts[2] )); } else if (approxContour.Size == 4) //The contour has 4 vertices. { #region determine if all the angles in the contour are within [80, 100] degree bool isRectangle = true; Point[] pts = approxContour.ToArray(); LineSegment2D[] edges = PointCollection.PolyLine(pts, true); for (int j = 0; j < edges.Length; j++) { double angle = Math.Abs( edges[(j + 1) % edges.Length].GetExteriorAngleDegree(edges[j])); if (angle < 80 || angle > 100) { isRectangle = false; break; } } #endregion if (isRectangle) { boxList.Add(CvInvoke.MinAreaRect(approxContour)); } } } } } } watch.Stop(); outputLabel.Text += "\nTriangles: " + triangleList.Count / 2; outputLabel.Text += "\nRectangles: " + boxList.Count / 2; msgBuilder.Append(String.Format("Triangles & Rectangles - {0} ms; ", watch.ElapsedMilliseconds)); #endregion this.Text = msgBuilder.ToString(); #region draw triangles and rectangles Image <Bgr, Byte> triangleRectangleImage = img.CopyBlank(); foreach (Triangle2DF triangle in triangleList) { img.Draw(triangle, new Bgr(Color.DarkBlue), 2); } foreach (RotatedRect box in boxList) { img.Draw(box, new Bgr(Color.DarkOrange), 2); } #endregion #region draw circles Image <Bgr, Byte> circleImage = img.CopyBlank(); foreach (CircleF circle in circles) { img.Draw(circle, new Bgr(Color.Brown), 2); } #endregion #region draw lines Image <Bgr, Byte> lineImage = img.CopyBlank(); foreach (LineSegment2D line in lines) { img.Draw(line, new Bgr(Color.Green), 2); } #endregion originalImageBox.Image = img.ToBitmap(); }
public string ProcessAndRender(IInputArray imageIn, IInputOutputArray imageOut) { Stopwatch watch = Stopwatch.StartNew(); #region Pre-processing //Convert the image to grayscale and filter out the noise CvInvoke.CvtColor(imageIn, _gray, ColorConversion.Bgr2Gray); //Remove noise CvInvoke.GaussianBlur(_gray, _gray, new Size(3, 3), 1); double cannyThreshold = 180.0; double cannyThresholdLinking = 120.0; CvInvoke.Canny(_gray, _cannyEdges, cannyThreshold, cannyThresholdLinking); #endregion #region circle detection double circleAccumulatorThreshold = 120; CircleF[] circles = CvInvoke.HoughCircles(_gray, HoughModes.Gradient, 2.0, 20.0, cannyThreshold, circleAccumulatorThreshold, 5); #endregion #region Edge detection LineSegment2D[] lines = CvInvoke.HoughLinesP( _cannyEdges, 1, //Distance resolution in pixel-related units Math.PI / 45.0, //Angle resolution measured in radians. 20, //threshold 30, //min Line width 10); //gap between lines #endregion #region Find triangles and rectangles List <Triangle2DF> triangleList = new List <Triangle2DF>(); List <RotatedRect> boxList = new List <RotatedRect>(); //a box is a rotated rectangle using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint()) { CvInvoke.FindContours(_cannyEdges, contours, null, RetrType.List, ChainApproxMethod.ChainApproxSimple); int count = contours.Size; for (int i = 0; i < count; i++) { using (VectorOfPoint contour = contours[i]) using (VectorOfPoint approxContour = new VectorOfPoint()) { CvInvoke.ApproxPolyDP(contour, approxContour, CvInvoke.ArcLength(contour, true) * 0.05, true); if (CvInvoke.ContourArea(approxContour, false) > 250 ) //only consider contours with area greater than 250 { if (approxContour.Size == 3) //The contour has 3 vertices, it is a triangle { Point[] pts = approxContour.ToArray(); triangleList.Add(new Triangle2DF( pts[0], pts[1], pts[2] )); } else if (approxContour.Size == 4) //The contour has 4 vertices. { #region determine if all the angles in the contour are within [80, 100] degree bool isRectangle = true; Point[] pts = approxContour.ToArray(); LineSegment2D[] edges = PointCollection.PolyLine(pts, true); for (int j = 0; j < edges.Length; j++) { double angle = Math.Abs( edges[(j + 1) % edges.Length].GetExteriorAngleDegree(edges[j])); if (angle < 80 || angle > 100) { isRectangle = false; break; } } #endregion if (isRectangle) { boxList.Add(CvInvoke.MinAreaRect(approxContour)); } } } } } } #endregion watch.Stop(); using (Mat triangleRectangleImage = new Mat(_gray.Size, DepthType.Cv8U, 3)) //image to draw triangles and rectangles on using (Mat circleImage = new Mat(_gray.Size, DepthType.Cv8U, 3)) //image to draw circles on using (Mat lineImage = new Mat(_gray.Size, DepthType.Cv8U, 3)) //image to draw lines on { #region draw triangles and rectangles triangleRectangleImage.SetTo(new MCvScalar(0)); foreach (Triangle2DF triangle in triangleList) { CvInvoke.Polylines(triangleRectangleImage, Array.ConvertAll(triangle.GetVertices(), Point.Round), true, new Bgr(Color.DarkBlue).MCvScalar, 2); } foreach (RotatedRect box in boxList) { CvInvoke.Polylines(triangleRectangleImage, Array.ConvertAll(box.GetVertices(), Point.Round), true, new Bgr(Color.DarkOrange).MCvScalar, 2); } //Drawing a light gray frame around the image CvInvoke.Rectangle(triangleRectangleImage, new Rectangle(Point.Empty, new Size(triangleRectangleImage.Width - 1, triangleRectangleImage.Height - 1)), new MCvScalar(120, 120, 120)); //Draw the labels CvInvoke.PutText(triangleRectangleImage, "Triangles and Rectangles", new Point(20, 20), FontFace.HersheyDuplex, 0.5, new MCvScalar(120, 120, 120)); #endregion #region draw circles circleImage.SetTo(new MCvScalar(0)); foreach (CircleF circle in circles) { CvInvoke.Circle(circleImage, Point.Round(circle.Center), (int)circle.Radius, new Bgr(Color.Brown).MCvScalar, 2); } //Drawing a light gray frame around the image CvInvoke.Rectangle(circleImage, new Rectangle(Point.Empty, new Size(circleImage.Width - 1, circleImage.Height - 1)), new MCvScalar(120, 120, 120)); //Draw the labels CvInvoke.PutText(circleImage, "Circles", new Point(20, 20), FontFace.HersheyDuplex, 0.5, new MCvScalar(120, 120, 120)); #endregion #region draw lines lineImage.SetTo(new MCvScalar(0)); foreach (LineSegment2D line in lines) { CvInvoke.Line(lineImage, line.P1, line.P2, new Bgr(Color.Green).MCvScalar, 2); } //Drawing a light gray frame around the image CvInvoke.Rectangle(lineImage, new Rectangle(Point.Empty, new Size(lineImage.Width - 1, lineImage.Height - 1)), new MCvScalar(120, 120, 120)); //Draw the labels CvInvoke.PutText(lineImage, "Lines", new Point(20, 20), FontFace.HersheyDuplex, 0.5, new MCvScalar(120, 120, 120)); #endregion using (InputArray iaImageIn = imageIn.GetInputArray()) using (Mat imageInMat = iaImageIn.GetMat()) CvInvoke.VConcat(new Mat[] { imageInMat, triangleRectangleImage, circleImage, lineImage }, imageOut); } return(String.Format("Detected in {0} milliseconds.", watch.ElapsedMilliseconds)); }
/// <summary> /// Determina si un contorno es un rectangulo verificando que tenga 4 vertices y sus angulos sean 90° /// </summary> /// <param name="MaxAngleDeviationDeg"> Máxima desviacion permitida respecto a 90° </param> /// <returns></returns> public static bool isSquare(this VectorOfPoint contour, float MaxAngleDeviationDeg = 10, float MaxSideLengthVariation = 0.1f) { bool result = false; if (contour.Size == 4) { result = true; Point[] pts = contour.ToArray(); LineSegment2D[] edges = PointCollection.PolyLine(pts, true); //Verifica que el ángulo sea 90° for (int j = 0; j < edges.Length; j++) { double angle = Math.Abs(edges[(j + 1) % edges.Length].GetExteriorAngleDegree(edges[j])); if (angle < 90 - MaxAngleDeviationDeg || angle > 90 + MaxAngleDeviationDeg) { result = false; break; } } //Verifica que todos los lados tengan el mismo largo if (result) { double MaxL = edges[0].Length; double MinL = MaxL; for (int j = 1; j < edges.Length; j++) { double L = edges[j].Length; if (L > MaxL) { MaxL = L; } if (L < MinL) { MinL = L; } } if (MaxL <= 0) { return(false); } double factor = MinL / MaxL; if (factor < 1 - MaxSideLengthVariation) { result = false; } } pts = null; edges = null; } return(result); }
public void DetectShapes() { StringBuilder msgBuilder = new StringBuilder("Performance: "); double cannyThreshold = 180.0; //Load the image from file and resize it for display var fileImage = _SavePath + _CaptureCounter.ToString() + ".png"; Mat image = CvInvoke.Imread(_SavePath + (_CaptureCounter - 1).ToString() + ".png", Emgu.CV.CvEnum.LoadImageType.AnyColor); //Convert the image to grayscale and filter out the noise UMat uimage = new UMat(); CvInvoke.CvtColor(image, uimage, ColorConversion.Bgr2Gray); //use image pyr to remove noise UMat pyrDown = new UMat(); CvInvoke.PyrDown(uimage, pyrDown); CvInvoke.PyrUp(pyrDown, uimage); //Image<Gray, Byte> gray = img.Convert<Gray, Byte>().PyrDown().PyrUp(); Stopwatch watch = Stopwatch.StartNew(); #region circle detection #endregion #region Canny and edge detection //watch.Reset(); watch.Start(); double cannyThresholdLinking = 120.0; UMat cannyEdges = new UMat(); CvInvoke.Canny(uimage, cannyEdges, cannyThreshold, cannyThresholdLinking); #endregion #region Find triangles and rectangles watch.Reset(); watch.Start(); List <Triangle2DF> triangleList = new List <Triangle2DF>(); List <Emgu.CV.Structure.RotatedRect> boxList = new List <Emgu.CV.Structure.RotatedRect>(); //a box is a rotated rectangle using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint()) { CvInvoke.FindContours(cannyEdges, contours, null, RetrType.List, ChainApproxMethod.ChainApproxSimple); int count = contours.Size; for (int i = 0; i < count; i++) { using (VectorOfPoint contour = contours[i]) using (VectorOfPoint approxContour = new VectorOfPoint()) { CvInvoke.ApproxPolyDP(contour, approxContour, CvInvoke.ArcLength(contour, true) * 0.05, true); if (CvInvoke.ContourArea(approxContour, false) > 250) //only consider contours with area greater than 250 { if (approxContour.Size == 4) //The contour has 4 vertices. { #region determine if all the angles in the contour are within [80, 100] degree bool isRectangle = true; System.Drawing.Point[] pts = approxContour.ToArray(); LineSegment2D[] edges = PointCollection.PolyLine(pts, true); for (int j = 0; j < edges.Length; j++) { double angle = Math.Abs( edges[(j + 1) % edges.Length].GetExteriorAngleDegree(edges[j])); //if (angle < 80 || angle > 100) //{ isRectangle = false; m_MyAudioSource.Play(0); SceneManager.LoadScene("Acertou"); //} } #endregion if (isRectangle) { boxList.Add(CvInvoke.MinAreaRect(approxContour)); } } } } } } watch.Stop(); #endregion }
/// <summary> /// evaluate specified ROI and return confidence value for each location /// </summary> /// <param name="img"></param> /// <param name="locations"></param> /// <param name="foundLocations"></param> /// <param name="confidences"></param> /// <param name="hitThreshold"></param> /// <param name="winStride"></param> /// <param name="padding"></param> public void DetectROI( Mat img, Point[] locations, out Point[] foundLocations, out double[] confidences, double hitThreshold = 0, Size? winStride = null, Size? padding = null) { if (disposed) throw new ObjectDisposedException("HOGDescriptor"); if (img == null) throw new ArgumentNullException("img"); if (locations == null) throw new ArgumentNullException("locations"); img.ThrowIfDisposed(); Size winStride0 = winStride.GetValueOrDefault(new Size()); Size padding0 = padding.GetValueOrDefault(new Size()); using (var flVec = new VectorOfPoint()) using (var cVec = new VectorOfDouble()) { NativeMethods.objdetect_HOGDescriptor_detectROI(ptr, img.CvPtr, locations, locations.Length, flVec.CvPtr, cVec.CvPtr, hitThreshold, winStride0, padding0); foundLocations = flVec.ToArray(); confidences = cVec.ToArray(); } }
public static Image <Bgr, byte> detectShape(Image <Bgr, byte> imgInput, int drawtag, out int outcount) { int c = 0; //Convert the image to grayscale and filter out the noise UMat uimage = new UMat(); Image <Bgr, byte> result = imgInput; CvInvoke.CvtColor(imgInput, uimage, ColorConversion.Bgr2Gray); //use image pyr to remove noise UMat pyrDown = new UMat(); CvInvoke.PyrDown(uimage, pyrDown); CvInvoke.PyrUp(pyrDown, uimage); #region circle detection double cannyThreshold = 180.0; double circleAccumulatorThreshold = 120; CircleF[] circles = CvInvoke.HoughCircles(uimage, HoughType.Gradient, 85, 20.0, cannyThreshold, circleAccumulatorThreshold, 10, 100); #endregion #region Canny and edge detection double cannyThresholdLinking = 120.0; UMat cannyEdges = new UMat(); CvInvoke.Canny(uimage, cannyEdges, cannyThreshold, cannyThresholdLinking); LineSegment2D[] lines = CvInvoke.HoughLinesP( cannyEdges, 2, //Distance resolution in pixel-related units Math.PI / 45.0, //Angle resolution measured in radians. 20, //threshold 30, //min Line width 10); //gap between lines #endregion #region Find triangles and rectangles List <Triangle2DF> triangleList = new List <Triangle2DF>(); List <RotatedRect> boxList = new List <RotatedRect>(); using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint()) { CvInvoke.FindContours(cannyEdges, contours, null, RetrType.List, ChainApproxMethod.ChainApproxSimple); int count = contours.Size; for (int i = 0; i < count; i++) { using (VectorOfPoint contour = contours[i]) using (VectorOfPoint approxContour = new VectorOfPoint()) { CvInvoke.ApproxPolyDP(contour, approxContour, CvInvoke.ArcLength(contour, true) * 0.05, true); if (CvInvoke.ContourArea(approxContour, false) > 250) //only consider contours with area greater than 250 { if (approxContour.Size == 3) //The contour has 3 vertices, it is a triangle { Point[] pts = approxContour.ToArray(); triangleList.Add(new Triangle2DF( pts[0], pts[1], pts[2] )); } else if (approxContour.Size == 4) //The contour has 4 vertices. { #region determine if all the angles in the contour are within [80, 100] degree bool isRectangle = true; Point[] pts = approxContour.ToArray(); LineSegment2D[] edges = PointCollection.PolyLine(pts, true); for (int j = 0; j < edges.Length; j++) { double angle = Math.Abs( edges[(j + 1) % edges.Length].GetExteriorAngleDegree(edges[j])); if (angle < 80 || angle > 100) { isRectangle = false; break; } } #endregion if (isRectangle) { boxList.Add(CvInvoke.MinAreaRect(approxContour)); } } } } } } #region draw lines if (drawtag == 1 || (drawtag == 5)) { foreach (LineSegment2D line in lines) { result.Draw(line, new Bgr(Color.Green), 5); c++; } } #endregion #endregion #region draw triangles if ((drawtag == 2 || (drawtag == 5))) { foreach (Triangle2DF triangle in triangleList) { result.Draw(triangle, new Bgr(Color.DarkBlue), 5); c++; } } #endregion #region draw rectangles if ((drawtag == 3 || (drawtag == 5))) { foreach (RotatedRect box in boxList) { result.Draw(box, new Bgr(Color.DarkOrange), 5); c++; } } #endregion /* #region draw circles * if ((drawtag == 4 || (drawtag == 5))) * { * foreach (CircleF circle in circles) * result.Draw(circle, new Bgr(Color.Brown), 5); * } #endregion*/ outcount = c; return(result); }
private static double GetScore(VectorOfPoint contours, Mat fMapMat) { short xmin = 9999; short xmax = 0; short ymin = 9999; short ymax = 0; try { foreach (Point point in contours.ToArray()) { if (point.X < xmin) { //var xx = nd[point.X]; xmin = (short)point.X; } if (point.X > xmax) { xmax = (short)point.X; } if (point.Y < ymin) { ymin = (short)point.Y; } if (point.Y > ymax) { ymax = (short)point.Y; } } int roiWidth = xmax - xmin + 1; int roiHeight = ymax - ymin + 1; Image <Gray, float> bitmap = fMapMat.ToImage <Gray, float>(); Image <Gray, float> roiBitmap = new Image <Gray, float>(roiWidth, roiHeight); float[,,] dataFloat = bitmap.Data; float[,,] data = roiBitmap.Data; for (int j = ymin; j < ymin + roiHeight; j++) { for (int i = xmin; i < xmin + roiWidth; i++) { try { data[j - ymin, i - xmin, 0] = dataFloat[j, i, 0]; } catch (Exception ex2) { Console.WriteLine(ex2.Message); } } } Mat mask = Mat.Zeros(roiHeight, roiWidth, DepthType.Cv8U, 1); List <Point> pts = new List <Point>(); foreach (Point point in contours.ToArray()) { pts.Add(new Point(point.X - xmin, point.Y - ymin)); } using (VectorOfPoint vp = new VectorOfPoint(pts.ToArray <Point>())) using (VectorOfVectorOfPoint vvp = new VectorOfVectorOfPoint(vp)) { CvInvoke.FillPoly(mask, vvp, new MCvScalar(1)); } return(CvInvoke.Mean(roiBitmap, mask).V0); } catch (Exception ex) { Console.WriteLine(ex.Message + ex.StackTrace); } return(0); }
// get all of the valid contour maps, valid means circumfence > 200 px // this was not in their code, I added this feature, but I used their logic public static List <ColorfulContourMap> getAllContourMap(Mat input, int index, int mode = 0) { // use for all members List <ColorfulContourMap> result = new List <ColorfulContourMap>(); MatImage m1 = new MatImage(input); m1.Convert(); Mat gray = m1.Out(); // use for black background if (mode == 0) { MatImage m2 = new MatImage(gray); m2.SmoothGaussian(3); m2.ThresholdBinaryInv(245, 255); gray = m2.Out(); } // use for white background else { MatImage m2 = new MatImage(gray); m2.SmoothGaussian(3); m2.ThresholdBinaryInv(100, 255); gray = m2.Out(); } // one time use List <Point> pointList = new List <Point>(); List <Point> polyPointList = new List <Point>(); List <ColorfulPoint> cps = new List <ColorfulPoint>(); List <ColorfulPoint> pcps = new List <ColorfulPoint>(); // fetch all the contours using Emgu CV // fetch all the polys using Emgu CV // extract the points and colors Mat temp = gray.Clone(); VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint(); CvInvoke.FindContours(gray, contours, new Mat(), RetrType.List, ChainApproxMethod.ChainApproxNone); double area = Math.Abs(CvInvoke.ContourArea(contours[0])); VectorOfPoint maxArea = contours[0]; // maxArea is used as the current contour //contour = contour.HNext; // use this to loop for (int i = 0; i < contours.Size; i++) { double nextArea = Math.Abs(CvInvoke.ContourArea(contours[i], false)); // Find the area of contour area = nextArea; if (area >= Constants.MIN_AREA) { maxArea = contours[i]; VectorOfPoint poly = new VectorOfPoint(); CvInvoke.ApproxPolyDP(maxArea, poly, 1.0, true); pointList = maxArea.ToArray().ToList(); polyPointList = poly.ToArray().ToList(); foreach (Point p in pointList) { ColorfulPoint cp = new ColorfulPoint { X = p.X, Y = p.Y, color = extractPointColor(p, input) }; cps.Add(cp); } foreach (Point p in polyPointList) { ColorfulPoint cp = new ColorfulPoint { X = p.X, Y = p.Y, color = extractPointColor(p, input) }; pcps.Add(cp); } result.Add(new ColorfulContourMap(cps, pcps, index)); // clear temporal lists pointList = new List <Point>(); polyPointList = new List <Point>(); cps = new List <ColorfulPoint>(); pcps = new List <ColorfulPoint>(); } } return(result); }
public static VectorOfVectorOfPoint FindRectangle(IInputOutputArray cannyEdges, IInputOutputArray result, int areaSize = 250) { using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint()) { CvInvoke.FindContours(cannyEdges, contours, null, RetrType.List, ChainApproxMethod.ChainApproxSimple); int count = contours.Size; for (int i = 0; i < count; i++) { var rect = CvInvoke.MinAreaRect(contours[i]).MinAreaRect(); CvInvoke.Rectangle(result, rect, new MCvScalar(0, 0, 255), 3); using (VectorOfPoint contour = contours[i]) using (VectorOfPoint approxContour = new VectorOfPoint()) { CvInvoke.ApproxPolyDP(contour, approxContour, CvInvoke.ArcLength(contour, true) * 0.05, true); if (CvInvoke.ContourArea(approxContour, false) > areaSize) //only consider contours with area greater than 250 { if (approxContour.Size >= 4) //The contour has 4 vertices. { #region determine if all the angles in the contour are within [80, 100] degree bool isRectangle = true; Point[] pts = approxContour.ToArray(); LineSegment2D[] edges = PointCollection.PolyLine(pts, true); for (int j = 0; j < edges.Length; j++) { double angle = Math.Abs( edges[(j + 1) % edges.Length].GetExteriorAngleDegree(edges[j])); if (angle < 80 || angle > 100) { isRectangle = false; break; } } #endregion //if (isRectangle) //{ // var rect = CvInvoke.MinAreaRect(approxContour).MinAreaRect(); // CvInvoke.Rectangle(result, rect, new MCvScalar(0, 0, 255), 3); // //boxList.Add(CvInvoke.MinAreaRect(approxContour)); //} } } } } return contours; } }
public Image <Bgr, byte> Search(int thresholdValue, int minArea, Label label) { if (binarImage == null) { binarImage = Binarization(thresholdValue); } var resultImage = binarImage.Convert <Gray, byte>(); int triangle = 0; int rectangle = 0; int circleС = 0; // shapes var contours = new VectorOfVectorOfPoint(); CvInvoke.FindContours( resultImage, // исходное чёрно-белое изображение contours, // найденные контуры null, // объект для хранения иерархии контуров (в данном случае не используется) RetrType.List, // структура возвращаемых данных (в данном случае список) ChainApproxMethod.ChainApproxSimple); // метод аппроксимации (сжимает горизонтальные, //вертикальные и диагональные сегменты //и оставляет только их конечные точки) var contoursImage = sourceImage.Copy(); for (int i = 0; i < contours.Size; i++) { var approxContour = new VectorOfPoint(); CvInvoke.ApproxPolyDP(contours[i], approxContour, CvInvoke.ArcLength(contours[i], true) * 0.05, true); var points = approxContour.ToArray(); if (CvInvoke.ContourArea(approxContour, false) > minArea) { if (approxContour.Size == 3) { triangle++; var pointsTri = approxContour.ToArray(); contoursImage.Draw(new Triangle2DF(pointsTri[0], pointsTri[1], pointsTri[2]), new Bgr(Color.GreenYellow), 2); } } if (isRectangle(points)) { if (CvInvoke.ContourArea(approxContour, false) > minArea) { rectangle++; contoursImage.Draw(CvInvoke.MinAreaRect(approxContour), new Bgr(Color.Purple), 2); } } } List <CircleF> circles = new List <CircleF>(CvInvoke.HoughCircles(resultImage, HoughModes.Gradient, 1.0, 250, 100, 50, 5, 500)); foreach (CircleF circle in circles) { circleС++; contoursImage.Draw(circle, new Bgr(Color.Pink), 2); } label.Text = "Количество треугольников = " + triangle + "\nКоличество прямоугольников = " + rectangle + "\nКоличество кругов = " + circleС; return(contoursImage); }
/// <summary> /// Approximates contour or a curve using Douglas-Peucker algorithm /// </summary> /// <param name="curve">The polygon or curve to approximate.</param> /// <param name="epsilon">Specifies the approximation accuracy. /// This is the maximum distance between the original curve and its approximation.</param> /// <param name="closed">The result of the approximation; /// The type should match the type of the input curve</param> /// <returns>The result of the approximation; /// The type should match the type of the input curve</returns> public static Point[] ApproxPolyDP(IEnumerable<Point> curve, double epsilon, bool closed) { if(curve == null) throw new ArgumentNullException("curve"); Point[] curveArray = EnumerableEx.ToArray(curve); IntPtr approxCurvePtr; NativeMethods.imgproc_approxPolyDP_Point(curveArray, curveArray.Length, out approxCurvePtr, epsilon, closed ? 1 : 0); using (var approxCurveVec = new VectorOfPoint(approxCurvePtr)) { return approxCurveVec.ToArray(); } }
public Bitmap FormatImage(Bitmap bitmap) { if (bitmap.Width > bitmap.Height) { bitmap.RotateFlip(RotateFlipType.Rotate90FlipNone); } int originalWidth = bitmap.Width; int originalHeight = bitmap.Height; Image <Bgr, Byte> img = new Image <Bgr, byte>(bitmap).Resize(400, 400, Inter.Linear, true); //resizing is needed for better rectangle detection int resizedWidth = img.Width; int resizedHeight = img.Height; //Convert the image to grayscale and filter out the noise UMat uimage = new UMat(); CvInvoke.CvtColor(img, uimage, ColorConversion.Bgr2Gray); //use image pyr to remove noise UMat pyrDown = new UMat(); CvInvoke.PyrDown(uimage, pyrDown); CvInvoke.PyrUp(pyrDown, uimage); // These values work best double cannyThreshold = 180.0; double cannyThresholdLinking = 120.0; UMat cannyEdges = new UMat(); CvInvoke.Canny(uimage, cannyEdges, cannyThreshold, cannyThresholdLinking); List <Bitmap> cropedImagesList = new List <Bitmap>(); using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint()) { CvInvoke.FindContours(cannyEdges, contours, null, RetrType.List, ChainApproxMethod.ChainApproxSimple); int count = contours.Size; for (int i = 0; i < count; i++) { using (VectorOfPoint contour = contours[i]) using (VectorOfPoint approxContour = new VectorOfPoint()) { CvInvoke.ApproxPolyDP(contour, approxContour, CvInvoke.ArcLength(contour, true) * 0.05, true); if (CvInvoke.ContourArea(approxContour, false) > (resizedHeight * resizedWidth) / 3) //only consider contours with area greater than the third of the whole image { if (approxContour.Size == 4) //The contour has 4 vertices. { //determine if all the angles in the contour are within [70, 110] degree bool isRectangle = true; Point[] pts = approxContour.ToArray(); LineSegment2D[] edges = PointCollection.PolyLine(pts, true); for (int j = 0; j < edges.Length; j++) { double angle = Math.Abs( edges[(j + 1) % edges.Length].GetExteriorAngleDegree(edges[j])); if (angle < 70 || angle > 110) // these values mean that the angle must be a right angle { isRectangle = false; break; } } if (isRectangle) { double[] corners = new double[8]; for (int j = 0; j < 4; j++) { corners[2 * j] = Convert.ToDouble(approxContour[j].X) * originalWidth / resizedWidth; corners[2 * j + 1] = Convert.ToDouble(approxContour[j].Y) * originalHeight / resizedHeight; } //crop only if X1 is to the left of X2 if (corners[0] <= corners[2]) { cropedImagesList.Add(Crop(bitmap, corners)); } } } } } } } if (FilterCropedImages(cropedImagesList) != null) //if we crop something { //crop image and add filter var result = FilterCropedImages(cropedImagesList); result = BradleyLocalThreshold(result); if (result.Width > result.Height) { result.RotateFlip(RotateFlipType.Rotate90FlipNone); return(result); } return(result); } else { //add only filter on original image var result = BradleyLocalThreshold(bitmap); return(result); } }
/// <summary> /// finds intersection of two convex polygons /// </summary> /// <param name="p1"></param> /// <param name="p2"></param> /// <param name="p12"></param> /// <param name="handleNested"></param> /// <returns></returns> public static float IntersectConvexConvex(IEnumerable<Point> p1, IEnumerable<Point> p2, out Point[] p12, bool handleNested = true) { if (p1 == null) throw new ArgumentNullException("p1"); if (p2 == null) throw new ArgumentNullException("p2"); Point[] p1Array = EnumerableEx.ToArray(p1); Point[] p2Array = EnumerableEx.ToArray(p2); IntPtr p12Ptr; float ret = NativeMethods.imgproc_intersectConvexConvex_Point(p1Array, p1Array.Length, p2Array, p2Array.Length, out p12Ptr, handleNested ? 1 : 0); using (var p12Vec = new VectorOfPoint(p12Ptr)) { p12 = p12Vec.ToArray(); } return ret; }
private List <MetricaContour> GetAllPoints(Bitmap img, List <System.Drawing.Point> Key) { List <MetricaContour> TotalResult = new List <MetricaContour>(); Image <Bgr, byte> imgIn = new Image <Bgr, byte>(img); imgIn = imgIn.Rotate(180, new Bgr(255, 255, 255), false); imgIn = imgIn.Flip(FlipType.Horizontal); // создать новый обьект изображения Canny Image <Gray, byte> _imgCanny = new Image <Gray, byte>(img.Width, img.Width, new Gray(0)); // вызов Canny из библиотеки _imgCanny = imgIn.Canny(100, 150); // выделение массива для хранения контуров Mat hierarchy = new Mat(); // количество ключ-точек для контура List <int> countDot = new List <int>(); using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint()) { CvInvoke.FindContours(_imgCanny, contours, hierarchy, RetrType.List, ChainApproxMethod.ChainApproxNone);//поиск контуров { for (int i = 0; i < contours.Size; i++) { // ищем i-тый контур в коллекции всех контуров using (VectorOfPoint contour = contours[i]) { var ContrList = new List <System.Drawing.Point>(); ContrList.AddRange(contour.ToArray()); ClusterServise Clproc = new ClusterServise(); var NewKey = Clproc.Comparison(Key, ContrList);// поиск ключевых в контуре if (NewKey.Count != 0) { MetricaContour el = new MetricaContour { ContourPoint = ContrList, KeysPoint = NewKey.OrderBy(p => p.X).ToList() }; TotalResult.Add(el); if (!countDot.Contains(NewKey.Count)) { countDot.Add(NewKey.Count); } } else { continue; } } } } //сортировка для контуров с одинаковым количеством ключ-точек foreach (var index in countDot) { var countrepeat = TotalResult.Count(i => i.KeysPoint.Count == index); if (countrepeat >= 2) // 2 { var min = (from x in TotalResult where x.KeysPoint.Count == index select x).Min(); var indexmin = TotalResult.IndexOf(min); TotalResult.RemoveAt(indexmin); } else { continue; } } if (countDot.Sum() == Key.Count) { return(TotalResult); } else // если фигура не имеет внутренних контуров { //сортировка для нахождения максимального List <MetricaContour> TotalResult_Max = new List <MetricaContour>(); foreach (var index in countDot) { var max = (from x in TotalResult where x.KeysPoint.Count >= index select x).Max(); var indexmin = TotalResult.IndexOf(max); if (TotalResult_Max.Contains(max)) { continue; } else { TotalResult_Max.Add(max); } } return(TotalResult_Max); } } }
//perform image processing and detect mark public void PerformMarkDetection() { if (String.IsNullOrEmpty(txtFileName.Text)) { return; } //设定待检测的闭环矩形的最小面积 int minContourArea = 2000; this.OutputMsg("======================================", Color.Lime); //load image from file Mat originalImg = CvInvoke.Imread(this.txtFileName.Text, ImreadModes.AnyColor); this.OutputMsg("\n********* Loading Image **********", Color.WhiteSmoke); this.OutputMsg(string.Format("\t Image File: {0}", this.txtFileName.Text), Color.Aqua); //display image in imagebox this.imgboxOriginal.Image = originalImg; this.OutputMsg("\n********* Processing Image **********", Color.WhiteSmoke); //Mat cutImg = new Mat(originalImg, new Range(350, 650),new Range(450, 850)); Mat cutImg = CutImage(originalImg.ToImage <Bgr, byte>(), 450, 350, 400, 300).ToUMat().GetMat(AccessType.Fast); //CvInvoke.Imshow("Cut Image", cutImg); cutImg.Save("cutImg.png"); //Convert the image to grayscale and filter out the noise Mat uimage = new Mat(); Mat binaryImg = new Mat(); CvInvoke.CvtColor(cutImg.ToImage <Bgr, byte>(), uimage, ColorConversion.Bgr2Gray); //use image pyr to remove noise UMat pyrDown = new UMat(); CvInvoke.PyrDown(uimage, pyrDown); CvInvoke.PyrUp(pyrDown, uimage); //convert to binary image CvInvoke.Threshold(uimage, binaryImg, 100, 255, ThresholdType.BinaryInv); //CvInvoke.Imshow("Binary Image", binaryImg); binaryImg.Save("BinaryImg.png"); #region Canny and edge detection Stopwatch watch = Stopwatch.StartNew(); double cannyThreshold = 180.0; watch.Reset(); watch.Start(); double cannyThresholdLinking = 120.0; UMat cannyEdges = new UMat(); CvInvoke.Canny(uimage, cannyEdges, cannyThreshold, cannyThresholdLinking); //CvInvoke.Imshow("cannyEdges", cannyEdges); cannyEdges.Save("cannyEdges.png"); this.imgboxBinary.Image = cannyEdges; LineSegment2D[] lines = CvInvoke.HoughLinesP( cannyEdges, 1, //Distance resolution in pixel-related units Math.PI / 45.0, //Angle resolution measured in radians. 20, //threshold 30, //min Line width 10); //gap between lines watch.Stop(); this.OutputMsg(String.Format("\t Canny & Hough lines - {0} ms; ", watch.ElapsedMilliseconds), Color.Aqua); #endregion #region Find rectangles watch.Reset(); watch.Start(); List <RotatedRect> boxList = new List <RotatedRect>(); //a box is a rotated rectangle List <VectorOfPoint> contourList = new List <VectorOfPoint>(); VectorOfPoint markContour = new VectorOfPoint(); using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint()) { CvInvoke.FindContours(cannyEdges, contours, null, RetrType.List, ChainApproxMethod.ChainApproxSimple); int count = contours.Size; for (int i = 0; i < count; i++) { using (VectorOfPoint contour = contours[i]) using (VectorOfPoint approxContour = new VectorOfPoint()) { CvInvoke.ApproxPolyDP(contour, approxContour, CvInvoke.ArcLength(contour, true) * 0.05, true); if (CvInvoke.ContourArea(approxContour, false) > minContourArea) //only consider contours with area greater than 250 { if (approxContour.Size == 6) //The contour has 6 vertices. { #region determine if all the angles in the contour are within [80, 100] degree bool isRectangle = true; Point[] pts = approxContour.ToArray(); LineSegment2D[] edges = PointCollection.PolyLine(pts, true); for (int j = 0; j < edges.Length; j++) { double angle = Math.Abs( edges[(j + 1) % edges.Length].GetExteriorAngleDegree(edges[j])); if (angle < 80 || angle > 100) { isRectangle = false; break; } } #endregion if (isRectangle) { boxList.Add(CvInvoke.MinAreaRect(approxContour)); contourList.Add(approxContour); break; } } } } } } watch.Stop(); this.OutputMsg(String.Format("\t Finding Rectangles - {0} ms; ", watch.ElapsedMilliseconds), Color.Aqua); #endregion #region draw rectangles this.OutputMsg("\n********* Calculating mark center and angle *********", Color.WhiteSmoke); Mat triangleRectangleImage = cutImg; //triangleRectangleImage.SetTo(new MCvScalar(0)); //foreach (RotatedRect box in boxList) //{ // CvInvoke.Polylines(cutImg, Array.ConvertAll(box.GetVertices(), Point.Round), true, new Bgr(Color.Red).MCvScalar, 2); //} PointF markCenter = new PointF(); double markAngle = 0; Mat testImg = new Mat(); if (boxList.Count > 0) { //CvInvoke.Polylines(triangleRectangleImage, Array.ConvertAll(boxList[0].GetVertices(), Point.Round), true, new Bgr(Color.Red).MCvScalar, 2); //CvInvoke.DrawContours(triangleRectangleImage, contourList[0], -1, new Bgr(Color.DarkOrange).MCvScalar); CvInvoke.CvtColor(originalImg, testImg, ColorConversion.Gray2Bgr); Point[] pts = Array.ConvertAll(boxList[0].GetVertices(), Point.Round); for (int i = 0; i < pts.Length; i++) { pts[i] = new Point(pts[i].X + 450, pts[i].Y + 350); } CvInvoke.Polylines(testImg, pts, true, new Bgr(Color.Red).MCvScalar, 2); markCenter = boxList[0].Center; markAngle = Math.Round(boxList[0].Angle, 3); // CvInvoke.PutText(triangleRectangleImage, string.Format("Center: [{0},{1}]\nAngle: {2}", markCenter.X, markCenter.Y, markAngle), markCenter, FontFace.HersheyPlain, 1.0, new Bgr(Color.DarkOrange).MCvScalar); } this.OutputMsg(String.Format("\tMark center: {0}\n\tMark angle: {1}", markCenter, markAngle), Color.Gold); this.imgboxDetectedRec.Image = testImg; triangleRectangleImage.Save("markImage"); #endregion }
// //COMIENZAN FUNCIONES DE EDDIE // private void DetectObject(Mat detectionFrame, Mat displayFrame) { System.Drawing.Rectangle box = new System.Drawing.Rectangle(); Image <Bgr, byte> temp = detectionFrame.ToImage <Bgr, byte>(); temp = temp.Rotate(180, new Bgr(0, 0, 0)); Image <Bgr, Byte> buffer_im = displayFrame.ToImage <Bgr, Byte>(); float a = buffer_im.Width; float b = buffer_im.Height; MessageBox.Show("El tamano camara es W: " + a.ToString() + " y H:" + b.ToString()); boxList.Clear(); rect.Clear(); triangleList.Clear(); circleList.Clear(); ellipseList.Clear(); //transforma imagen //UMat uimage = new UMat(); // CvInvoke.CvtColor(displayFrame, uimage, ColorConversion.Bgr2Gray); using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint()) { /// IOutputArray hirarchy = null; /// CvInvoke.FindContours(detectionFrame, contours, hirarchy, RetrType.External, ChainApproxMethod.ChainApproxSimple); ///CvInvoke.Polylines(detectionFrame, contours, true, new MCvScalar(255, 0, 0), 2, LineType.FourConnected); Image <Bgr, Byte> resultadoFinal = displayFrame.ToImage <Bgr, byte>(); resultadoFinal = resultadoFinal.Rotate(180, new Bgr(0, 0, 0)); //Circulos //double cannyThreshold = 180.0; //double circleAccumulatorThreshold = 120; //CircleF[] circles = CvInvoke.HoughCircles(detectionFrame, HoughType.Gradient, 2.0, 20.0, cannyThreshold, circleAccumulatorThreshold, 5); /// if (contours.Size > 0) ///{ double maxArea = 1000; int chosen = 0; VectorOfPoint contour = null; /* * for (int i = 0; i < contours.Size; i++) * { * contour = contours[i]; * * double area = CvInvoke.ContourArea(contour); * if (area > maxArea) * { * System.Drawing.Rectangle rect = new System.Drawing.Rectangle(); * // maxArea = area; * chosen = i; * //} * //} * * //Boxes * VectorOfPoint hullPoints = new VectorOfPoint(); * VectorOfInt hullInt = new VectorOfInt(); * * CvInvoke.ConvexHull(contours[chosen], hullPoints, true); * CvInvoke.ConvexHull(contours[chosen], hullInt, false); * * Mat defects = new Mat(); * * if (hullInt.Size > 3) * CvInvoke.ConvexityDefects(contours[chosen], hullInt, defects); * * box = CvInvoke.BoundingRectangle(hullPoints); * CvInvoke.Rectangle(displayFrame, box, drawingColor);//Box rectangulo que encierra el area mas grande * // cropbox = crop_color_frame(displayFrame, box); * * buffer_im.ROI = box; * * Image<Bgr, Byte> cropped_im = buffer_im.Copy(); * //pictureBox8.Image = cropped_im.Bitmap; * System.Drawing.Point center = new System.Drawing.Point(box.X + box.Width / 2, box.Y + box.Height / 2);//centro rectangulo MOUSE * System.Drawing.Point esquina_superiorI = new System.Drawing.Point(box.X, box.Y); * System.Drawing.Point esquina_superiorD = new System.Drawing.Point(box.Right, box.Y); * System.Drawing.Point esquina_inferiorI = new System.Drawing.Point(box.X, box.Y + box.Height); * System.Drawing.Point esquina_inferiorD = new System.Drawing.Point(box.Right, box.Y + box.Height); * CvInvoke.Circle(displayFrame, esquina_superiorI, 5, new MCvScalar(0, 0, 255), 2); * CvInvoke.Circle(displayFrame, esquina_superiorD, 5, new MCvScalar(0, 0, 255), 2); * CvInvoke.Circle(displayFrame, esquina_inferiorI, 5, new MCvScalar(0, 0, 255), 2); * CvInvoke.Circle(displayFrame, esquina_inferiorD, 5, new MCvScalar(0, 0, 255), 2); * CvInvoke.Circle(displayFrame, center, 5, new MCvScalar(0, 0, 255), 2); * VectorOfPoint start_points = new VectorOfPoint(); * VectorOfPoint far_points = new VectorOfPoint(); * * * * * * * } * } */ //Dibuja borde rojo var temp2 = temp.SmoothGaussian(5).Convert <Gray, byte>().ThresholdBinary(new Gray(20), new Gray(255)); temp2 = temp2.Rotate(180, new Gray(0)); VectorOfVectorOfPoint contorno = new VectorOfVectorOfPoint(); Mat mat = new Mat(); CvInvoke.FindContours(temp2, contorno, mat, Emgu.CV.CvEnum.RetrType.External, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxSimple); for (int i = 0; i < contorno.Size; i++) { VectorOfPoint approxContour = new VectorOfPoint(); double perimetro = CvInvoke.ArcLength(contorno[i], true); VectorOfPoint approx = new VectorOfPoint(); VectorOfPointF approxF = new VectorOfPointF(); double area = CvInvoke.ContourArea(contorno[i]); if (area > 5000) { CvInvoke.ApproxPolyDP(contorno[i], approx, 0.04 * perimetro, true); // CvInvoke.DrawContours(displayFrame, contorno, i, new MCvScalar(255, 0, 0), 2); //pictureBox4.Image = temp2.Bitmap; var moments = CvInvoke.Moments(contorno[i]); int x = (int)(moments.M10 / moments.M00); int y = (int)(moments.M01 / moments.M00); resultados.Add(approx); bool isShape; if (approx.Size == 3) //The contour has 3 vertices, it is a triangle { System.Drawing.Point[] pts = approx.ToArray(); double perimetro2 = CvInvoke.ArcLength(contorno[i], true); double area2 = CvInvoke.ContourArea(contorno[i]); double circularidad = 4 * Math.PI * area2 / Math.Pow(perimetro2, 2); MessageBox.Show("circularidad triangulo" + circularidad); MessageBox.Show("Es triangulo "); /*Triangle2DF triangle = new Triangle2DF(pts[0], pts[1], pts[2]); * resultadoFinal.Draw(triangle, new Bgr(System.Drawing.Color.Cyan), 1); * CvInvoke.DrawContours(resultadoFinal, contorno, i, new MCvScalar(255, 255, 255), 1, LineType.AntiAlias); * CvInvoke.PutText(resultadoFinal, "Triangle", new System.Drawing.Point(x, y), * Emgu.CV.CvEnum.FontFace.HersheySimplex, 0.5, new MCvScalar(0, 255, 255), 2); * resTri.Add(approx);*/ //MessageBox.Show("No es triangulo "); //Triangle2DF triangle = new Triangle2DF(pts[0], pts[1], pts[2]); //resultadoFinal.Draw(triangle, new Bgr(System.Drawing.Color.Red), 2); RotatedRect rectangle = CvInvoke.MinAreaRect(approx); CvInvoke.DrawContours(resultadoFinal, contorno, i, new MCvScalar(255, 255, 255), 1, LineType.AntiAlias); resultadoFinal.Draw(rectangle, new Bgr(System.Drawing.Color.Cyan), 1); rect.Add(CvInvoke.BoundingRectangle(approx)); } if (approx.Size == 4) //The contour has 4 vertices. { //RotatedRect tt = new RotatedRect(CvInvoke.MinAreaRect(approx).Center, CvInvoke.MinAreaRect(approx).Size, 270) ; //boxList.Add(tt); //Calcular si es cuadrado System.Drawing.Rectangle rectAux = CvInvoke.BoundingRectangle(contorno[i]); double ar = (double)rectAux.Width / rectAux.Height; //Calcular circularidad double perimetro2 = CvInvoke.ArcLength(contorno[i], true); double area2 = CvInvoke.ContourArea(contorno[i]); double circularidad = 4 * Math.PI * area2 / Math.Pow(perimetro2, 2); MessageBox.Show("circularidad rect " + circularidad); if (circularidad > 0.69) { //Si la circularidad>0.6 y cumple proporcion es cuadrado if (ar >= 0.8 && ar <= 1.0) { MessageBox.Show("Cuadrado "); RotatedRect rectangle = CvInvoke.MinAreaRect(contorno[i]); CvInvoke.DrawContours(resultadoFinal, contorno, i, new MCvScalar(255, 255, 255), 1, LineType.AntiAlias); resultadoFinal.Draw(rectangle, new Bgr(System.Drawing.Color.Cyan), 1); //CvInvoke.PutText(resultadoFinal, "Rectangle", new System.Drawing.Point(x, y), //Emgu.CV.CvEnum.FontFace.HersheySimplex, 0.5, new MCvScalar(0, 255, 255), 2); rect.Add(CvInvoke.BoundingRectangle(approx)); } //Es elipse else { MessageBox.Show("parecia rectangulo pero era elipse "); Ellipse final_ellipse = new Ellipse(CvInvoke.MinAreaRect(contorno[i]).Center, CvInvoke.MinAreaRect(contorno[i]).Size, 0); Ellipse final_ellipseDibujo = new Ellipse(CvInvoke.MinAreaRect(contorno[i]).Center, CvInvoke.MinAreaRect(contorno[i]).Size, 90); ellipseList.Add(final_ellipse); //IConvexPolygonF poligono = CvInvoke.MinAreaRect(approx); //resultadoFinal.Draw(poligono, new Bgr(Color.Cyan), 1); resultadoFinal.Draw(final_ellipseDibujo, new Bgr(System.Drawing.Color.Cyan), 1); CvInvoke.DrawContours(resultadoFinal, contorno, i, new MCvScalar(255, 255, 255), 1, LineType.AntiAlias); //CvInvoke.PutText(resultadoFinal, "Figura circular", new System.Drawing.Point(x, y), // Emgu.CV.CvEnum.FontFace.HersheySimplex, 0.5, new MCvScalar(0, 255, 255), 2); } } //Es rectangulo else { RotatedRect rectangle = CvInvoke.MinAreaRect(contorno[i]); CvInvoke.DrawContours(resultadoFinal, contorno, i, new MCvScalar(255, 255, 255), 1, LineType.AntiAlias); resultadoFinal.Draw(rectangle, new Bgr(System.Drawing.Color.Cyan), 1); //CvInvoke.PutText(resultadoFinal, "Rectangle", new System.Drawing.Point(x, y), //Emgu.CV.CvEnum.FontFace.HersheySimplex, 0.5, new MCvScalar(0, 255, 255), 2); rect.Add(CvInvoke.BoundingRectangle(approx)); } /* //prueba imagen de rectangulo * //--------------------------------------PART 1 : DRAWING STUFF IN A BITMAP------------------------------------------------------------------------------------ * System.Drawing.Point[] pts = approx.ToArray(); * * System.Drawing.PointF[] mypoints = Array.ConvertAll( * pts.ToArray<System.Drawing.Point>(), * value => new System.Drawing.PointF(value.X, value.Y) * ); * * System.Drawing.Rectangle r = new System.Drawing.Rectangle(0, 0, CvInvoke.BoundingRectangle(approx).Width, CvInvoke.BoundingRectangle(approx).Height); * Pen blackPen = new Pen(System.Drawing.Color.FromArgb(255, 255, 0, 0), 1); * bmp = new Bitmap(r.Width+100,r.Height+10, PixelFormat.Format32bppArgb); * Graphics g = Graphics.FromImage(bmp); * g.DrawRectangle(blackPen, r); //rectangle 1 * g.DrawPolygon(blackPen,mypoints); * System.Drawing.Rectangle rcrop = new System.Drawing.Rectangle(r.X, r.Y, r.Width + 10, r.Height + 10);//This is the cropping rectangle (bonding box adding 10 extra units width and height) * * //Crop the model from the bmp * Bitmap src = bmp; * // Bitmap target = new Bitmap(r.Width, r.Height); * //using (Graphics gs = Graphics.FromImage(target)) * //{ * // gs.DrawImage(src, rcrop, r, GraphicsUnit.Pixel); * // gs.Dispose(); * //} * //--------------------------------------PART 2 : SAVING THE BMP AS JPG------------------------------------------------------------------------------------ * src.Save("testOJO.jpg");*/ } /* ELIMINAR * if (approx.Size == 5 ) * { * System.Drawing.Point[] pts = approx.ToArray(); * * //MessageBox.Show("Cantidad puntos poligono "+pts.Length); * //IConvexPolygonF poligono = CvInvoke.MinAreaRect(approx); * //resultadoFinal.Draw(poligono, new Bgr(Color.Cyan), 1); * CvInvoke.DrawContours(resultadoFinal, contorno, i, new MCvScalar(255, 255, 0), 1, LineType.AntiAlias); * CvInvoke.PutText(resultadoFinal, "Pentagon", new System.Drawing.Point(x, y), * Emgu.CV.CvEnum.FontFace.HersheySimplex, 0.5, new MCvScalar(0, 255, 255), 2); * }*/ if (approx.Size >= 5) { double perimetro2 = CvInvoke.ArcLength(contorno[i], true); double area2 = CvInvoke.ContourArea(contorno[i]); double circularidad = 4 * Math.PI * area2 / Math.Pow(perimetro2, 2); MessageBox.Show("circularidad elipse " + circularidad); Ellipse final_ellipse = new Ellipse(CvInvoke.MinAreaRect(contorno[i]).Center, CvInvoke.MinAreaRect(contorno[i]).Size, 0); Ellipse final_ellipseDibujo = new Ellipse(CvInvoke.MinAreaRect(contorno[i]).Center, CvInvoke.MinAreaRect(contorno[i]).Size, 90); ellipseList.Add(final_ellipse); //IConvexPolygonF poligono = CvInvoke.MinAreaRect(approx); //resultadoFinal.Draw(poligono, new Bgr(Color.Cyan), 1); resultadoFinal.Draw(final_ellipseDibujo, new Bgr(System.Drawing.Color.Cyan), 1); CvInvoke.DrawContours(resultadoFinal, contorno, i, new MCvScalar(255, 255, 255), 1, LineType.AntiAlias); //CvInvoke.PutText(resultadoFinal, "Figura circular", new System.Drawing.Point(x, y), // Emgu.CV.CvEnum.FontFace.HersheySimplex, 0.5, new MCvScalar(0, 255, 255), 2); } /* _Eliminar * if (approx.Size > 6) * { * * double circularidad = 4 * Math.PI * area / (Math.Pow(2, perimetro)); * MessageBox.Show("circularidad circulo "+circularidad); * CvInvoke.PutText(resultadoFinal, "Circle", new System.Drawing.Point(x, y), * Emgu.CV.CvEnum.FontFace.HersheySimplex, 0.5, new MCvScalar(0, 255, 255), 2); * CircleF circle = CvInvoke.MinEnclosingCircle(approx); * circleList.Add(circle); * CvInvoke.DrawContours(resultadoFinal, contorno, i, new MCvScalar(255, 255, 255), 1, LineType.AntiAlias); * resultadoFinal.Draw(circle, new Bgr(System.Drawing.Color.Cyan), 1); * * * * }*/ } } pictureBox2.Image = resultadoFinal.Bitmap; button2.Enabled = true; ///} } }
public static IEnumerable <Rectangle> DetectSquares(Mat sourceImage) { Mat destinationImage = new Mat(); destinationImage.Create(sourceImage.Rows, sourceImage.Cols, sourceImage.Depth, 1); Mat greyscaleImage = new Mat(); CvInvoke.CvtColor(sourceImage, greyscaleImage, ColorConversion.Bgr2Gray); Mat detectedEdges = new Mat(); CvInvoke.GaussianBlur(greyscaleImage, detectedEdges, new Size(1, 1), 1); CvInvoke.Canny(detectedEdges, detectedEdges, Treshold, Treshold * 3); CvInvoke.Dilate(detectedEdges, detectedEdges, new Mat(), new Point(-1, -1), 3, BorderType.Default, new MCvScalar(255, 255, 255)); //ImageViewer.Show(detectedEdges); List <Rectangle> boxList = new List <Rectangle>(); //List<LineSegment2D> lines = new List<LineSegment2D>(); using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint()) { CvInvoke.FindContours(detectedEdges, contours, null, RetrType.List, ChainApproxMethod.ChainApproxSimple); int count = contours.Size; for (int i = 0; i < count; i++) { using (VectorOfPoint approxContour = new VectorOfPoint()) using (VectorOfPoint approx = contours[i]) { CvInvoke.ApproxPolyDP(approx, approxContour, CvInvoke.ArcLength(approx, true) * 0.035, true); Point[] pts = approxContour.ToArray(); LineSegment2D[] edges = PointCollection.PolyLine(pts, true); //lines.AddRange(edges); double contourArea = CvInvoke.ContourArea(approxContour, true); if (contourArea >= 500 && contourArea <= detectedEdges.Width * detectedEdges.Height / 5) { if (approxContour.Size >= 2) { bool isRectangle = true; for (int j = 0; j < edges.Length; j++) { double angle = Math.Abs(edges[(j + 1) % edges.Length] .GetExteriorAngleDegree(edges[j])); if (angle < 85 || angle > 95) { isRectangle = false; break; } } if (isRectangle) { RotatedRect currentRectangle = CvInvoke.MinAreaRect(approxContour); Rectangle minRectangle = currentRectangle.MinAreaRect(); //int ninetyPercentWidth = minRectangle.Width - (int)(minRectangle.Width * 0.05); //int ninetyPercentHeight = minRectangle.Height - (int)(minRectangle.Height * 0.05); //minRectangle.Size = new Size(ninetyPercentWidth, ninetyPercentHeight); //minRectangle.Offset(5, 5); boxList.Add(minRectangle); } } } } } } return(boxList); }
// Partial Match Algorithm using color feature // I used the framework for the partial matching algorithm using turning angle // but using the differences of the color on the edge public static Match partialColorMatch(List <Phi> DNAseq1, List <Phi> DNAseq2) { bool flag = true; // ToDo: Compare the control points in contours between two parts Match segment; // create an empty match segment segment.t11 = 0; segment.t12 = 0; segment.t21 = 0; segment.t22 = 0; List <Phi> seq1, seq2; // two empty List of edge maps int best = 0, max_match; int offset = 0, length = 0; if (DNAseq1.Count > DNAseq2.Count) // if the contour in first part has more control points than the second part { seq1 = replicateDNA(DNAseq1); //replicate the larger DNA seq2 = DNAseq2.ToList(); //reverse the smaller one seq2.Reverse(); } else { flag = false; seq1 = replicateDNA(DNAseq2); // if the first one has less control point, attach all the control points of the second part seq2 = DNAseq1.ToList(); //reverse the smaller one seq2.Reverse(); } List <int> zc = new List <int>(); List <int> starts = new List <int>(); int iteration = 0; for (int shift = 0; shift < seq1.Count - seq2.Count; shift += Constants.STEP) { List <int> diff = new List <int>(); bool flag1 = false; int start = 0, end = 0; // TODO: change the differences into color difference (done) List <int> zeroCounts = new List <int>(); int zeroCount = 0; List <int> starts2 = new List <int>(); // TODO: need to add a tolerance level for some random non 0 differences int tolerCount = 0; // tolerance count for random non 0s. for (int i = 0; i < seq2.Count; ++i) { int difference = Metrics.colorDifference(seq1[i + shift].color, seq2[i].color); // if difference==0, flag // if difference!=0, unflag if (difference <= 0) { // if it is in unflag state, mark the point as starting point if (!flag1) { flag1 = true; start = i; //starts.Add(start); starts2.Add(start); } // count the number of zero difference points in this section zeroCount++; tolerCount = 0; } else { if (tolerCount <= Constants.COLOR_TOLERANCE) { if (flag1) { zeroCount++; tolerCount++; } } else { if (flag1) { zeroCounts.Add(zeroCount); // add to a upper level storage zeroCount = 0; // reset the counter flag1 = false; // unflag tolerCount = 0; } } } diff.Add(difference); } if (iteration == 33) { Console.WriteLine("33"); } if (zeroCounts.Count == 0) { starts.Add(-1); } else { starts.Add(starts2[zeroCounts.IndexOf(zeroCounts.Max())]); } if (zeroCounts.Count == 0) { zc.Add(0); } else { zc.Add(zeroCounts.Max()); } // TTODO: implement a histogram algorithm for color match //max_match = colorHistogram(diff, seq2, ref start, ref end, Util.DELTA_THETA); max_match = 0; iteration++; } Console.WriteLine("Max:" + zc.Max()); if (zc.Max() == 0) { goto a; } int t_shift = 0; int s_start = 0; for (int i = 0; i < zc.Count; i++) { if (zc[i] == zc.Max()) { t_shift = Constants.STEP * i; s_start = starts[i]; } } int startPos1 = t_shift + s_start; int endPos1 = startPos1 + zc.Max(); int startPos2 = s_start; int endPos2 = startPos2 + zc.Max(); length = zc.Max(); // check if the algorithm get the correct position of the matching color Console.WriteLine("Flag:" + flag); Console.WriteLine("Shiftreq:" + startPos1); Console.WriteLine("Count:" + DNAseq1.Count); Console.WriteLine("P1_start_x" + seq1[startPos1].x); Console.WriteLine("P1_start_y" + seq1[startPos1].y); Console.WriteLine("P1_end_x" + seq1[endPos1].x); Console.WriteLine("P1_end_y" + seq1[endPos1].y); Console.WriteLine("P2_start_x" + seq2[startPos2].x); Console.WriteLine("P2_start_y" + seq2[startPos2].y); Console.WriteLine("P2_end_x" + seq2[endPos2].x); Console.WriteLine("P2_end_y" + seq2[endPos2].y); // correct for all the code above // regression analysis for the relationship between seq and DNA // flag=true for 3*3 frag5 and frag6 if (flag) { for (int j = 0; j < DNAseq1.Count; j++) { if ((seq1[startPos1].x == DNAseq1[j].x) && (seq1[startPos1].y == DNAseq1[j].y)) { segment.t11 = j; segment.t12 = j + zc.Max(); if (segment.t12 >= DNAseq1.Count) { segment.t12 -= DNAseq1.Count; } } } for (int j = 0; j < DNAseq2.Count; j++) { if ((seq2[startPos2].x == DNAseq2[j].x) && (seq2[startPos2].y == DNAseq2[j].y)) { segment.t21 = j; segment.t22 = j - zc.Max(); if (segment.t22 < 0) { segment.t22 += DNAseq2.Count; } } } } else { for (int j = 0; j < DNAseq2.Count; j++) { if ((seq1[startPos1].x == DNAseq2[j].x) && (seq1[startPos1].y == DNAseq2[j].y)) { segment.t21 = j; segment.t22 = j + zc.Max(); if (segment.t22 >= DNAseq2.Count) { segment.t22 -= DNAseq2.Count; } } } for (int j = 0; j < DNAseq1.Count; j++) { if ((seq2[startPos2].x == DNAseq1[j].x) && (seq2[startPos2].y == DNAseq1[j].y)) { segment.t11 = j; segment.t12 = j - zc.Max(); if (segment.t12 < 0) { segment.t12 += DNAseq1.Count; } } } } // fine code below a : segment.x11 = (int)DNAseq1[segment.t11].x; segment.y11 = (int)DNAseq1[segment.t11].y; segment.x12 = (int)DNAseq1[segment.t12].x; segment.y12 = (int)DNAseq1[segment.t12].y; segment.x21 = (int)DNAseq2[segment.t21].x; segment.y21 = (int)DNAseq2[segment.t21].y; segment.x22 = (int)DNAseq2[segment.t22].x; segment.y22 = (int)DNAseq2[segment.t22].y; // correct at this point /*if (best == 0) * segment.confidence = 0; * else * segment.confidence = Math.Sqrt((double)(length * length) / best); */ segment.confidence = length; Console.WriteLine(segment.ToString()); // all the code above is the matching segment without considering edge feature // we need to consider edge feature at this point // and cull out the matching edge that does not match // consider the most simple case: straight line without rotating // then to edges with turning angles but still without rotating // then to general case // Step 1: extract the portion of DNA that forms the matching edge (done) List <Phi> edge1 = new List <Phi>(); // valid edge in image 1 List <Phi> edge2 = new List <Phi>(); // valid edge in image 2 for (int i = Math.Min(segment.t11, segment.t12); i < Math.Max(segment.t11, segment.t12); i++) { edge1.Add(DNAseq1[i]); } for (int i = Math.Min(segment.t21, segment.t22); i < Math.Max(segment.t21, segment.t22); i++) { edge2.Add(DNAseq2[i]); } if (edge1.Count == 0 || edge2.Count == 0) { goto r; // if there is no matching edge, it is not nessesary for culling } // Step 2: Analyze the edge feature // convert edge into contour VectorOfPoint c1; VectorOfPoint c2; List <Point> pedge1; List <Point> pedge2; c1 = new VectorOfPoint(); List <Point> ps = new List <Point>(); foreach (Phi p in edge1) { ps.Add(new Point((int)p.x, (int)p.y)); } c1.Push(ps.ToArray()); CvInvoke.ApproxPolyDP(c1, c1, 2.0, false); pedge1 = c1.ToArray().ToList(); c2 = new VectorOfPoint(); List <Point> ps2 = new List <Point>(); foreach (Phi p in edge2) { ps2.Add(new Point((int)p.x, (int)p.y)); } c2.Push(ps2.ToArray()); CvInvoke.ApproxPolyDP(c2, c2, 2.0, false); pedge2 = c2.ToArray().ToList(); // Step 3: Cull the edge // calculate the turning angle for each edge // if the cumulative turning angle change is greater than 90, restart // use a brute force longest straight line approach first, this solves a lot of cases int maxDistance = -99999; int pos1 = 0, pos2 = 0; for (int i = 0; i < pedge1.Count - 1; i++) { if (pedge1[i + 1].X == pedge1[i].X) { if (Math.Abs(pedge1[i + 1].Y - pedge1[i].Y) > maxDistance) { maxDistance = Math.Abs(pedge1[i + 1].Y - pedge1[i].Y); pos1 = i; } } else if (pedge1[i + 1].Y == pedge1[i].Y) { if (Math.Abs(pedge1[i + 1].X - pedge1[i].X) > maxDistance) { maxDistance = Math.Abs(pedge1[i + 1].X - pedge1[i].X); pos1 = i; } } } maxDistance = -99999; for (int i = 0; i < pedge2.Count - 1; i++) { if (pedge2[i + 1].X == pedge2[i].X) { if (Math.Abs(pedge2[i + 1].Y - pedge2[i].Y) > maxDistance) { maxDistance = Math.Abs(pedge2[i + 1].Y - pedge2[i].Y); pos2 = i; } } else if (pedge2[i + 1].Y == pedge2[i].Y) { if (Math.Abs(pedge2[i + 1].X - pedge2[i].X) > maxDistance) { maxDistance = Math.Abs(pedge2[i + 1].X - pedge2[i].X); pos2 = i; } } } // now the new matching edge is calculated, send the result to output for (int j = 0; j < DNAseq1.Count; j++) { if ((pedge1[pos1].X == DNAseq1[j].x) && (pedge1[pos1].Y == DNAseq1[j].y)) { segment.t11 = j; } } for (int j = 0; j < DNAseq2.Count; j++) { if ((pedge2[pos2].X == DNAseq2[j].x) && (pedge2[pos2].Y == DNAseq2[j].y)) { segment.t21 = j; } } for (int j = 0; j < DNAseq1.Count; j++) { if ((pedge1[pos1 + 1].X == DNAseq1[j].x) && (pedge1[pos1 + 1].Y == DNAseq1[j].y)) { segment.t12 = j; } } for (int j = 0; j < DNAseq2.Count; j++) { if ((pedge2[pos2 + 1].X == DNAseq2[j].x) && (pedge2[pos2 + 1].Y == DNAseq2[j].y)) { segment.t22 = j; } } segment.x11 = (int)DNAseq1[segment.t11].x; segment.y11 = (int)DNAseq1[segment.t11].y; segment.x12 = (int)DNAseq1[segment.t12].x; segment.y12 = (int)DNAseq1[segment.t12].y; segment.x21 = (int)DNAseq2[segment.t21].x; segment.y21 = (int)DNAseq2[segment.t21].y; segment.x22 = (int)DNAseq2[segment.t22].x; segment.y22 = (int)DNAseq2[segment.t22].y; r : return(segment); }
public void PerformShapeDetection() { if (fileNameTextBox.Text != String.Empty) { StringBuilder msgBuilder = new StringBuilder("Performance: "); //Load the image from file and resize it for display Image <Bgr, Byte> img = new Image <Bgr, byte>(fileNameTextBox.Text) .Resize(400, 400, Emgu.CV.CvEnum.Inter.Linear, true); //Convert the image to grayscale and filter out the noise UMat uimage = new UMat(); CvInvoke.CvtColor(img, uimage, ColorConversion.Bgr2Gray); //use image pyr to remove noise UMat pyrDown = new UMat(); CvInvoke.PyrDown(uimage, pyrDown); CvInvoke.PyrUp(pyrDown, uimage); //Image<Gray, Byte> gray = img.Convert<Gray, Byte>().PyrDown().PyrUp(); #region circle detection Stopwatch watch = Stopwatch.StartNew(); double cannyThreshold = 180.0; double circleAccumulatorThreshold = 120; CircleF[] circles = CvInvoke.HoughCircles(uimage, HoughType.Gradient, 2.0, 20.0, cannyThreshold, circleAccumulatorThreshold, 5); watch.Stop(); msgBuilder.Append(String.Format("Hough circles - {0} ms; ", watch.ElapsedMilliseconds)); #endregion #region Canny and edge detection watch.Reset(); watch.Start(); double cannyThresholdLinking = 120.0; UMat cannyEdges = new UMat(); CvInvoke.Canny(uimage, cannyEdges, cannyThreshold, cannyThresholdLinking); LineSegment2D[] lines = CvInvoke.HoughLinesP( cannyEdges, 1, //Distance resolution in pixel-related units Math.PI / 45.0, //Angle resolution measured in radians. 20, //threshold 30, //min Line width 10); //gap between lines watch.Stop(); msgBuilder.Append(String.Format("Canny & Hough lines - {0} ms; ", watch.ElapsedMilliseconds)); #endregion #region Find triangles and rectangles watch.Reset(); watch.Start(); List <Triangle2DF> triangleList = new List <Triangle2DF>(); List <RotatedRect> boxList = new List <RotatedRect>(); //a box is a rotated rectangle using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint()) { CvInvoke.FindContours(cannyEdges, contours, null, RetrType.List, ChainApproxMethod.ChainApproxSimple); int count = contours.Size; for (int i = 0; i < count; i++) { using (VectorOfPoint contour = contours[i]) using (VectorOfPoint approxContour = new VectorOfPoint()) { CvInvoke.ApproxPolyDP(contour, approxContour, CvInvoke.ArcLength(contour, true) * 0.05, true); if (CvInvoke.ContourArea(approxContour, false) > 250) //only consider contours with area greater than 250 { if (approxContour.Size == 3) //The contour has 3 vertices, it is a triangle { Point[] pts = approxContour.ToArray(); triangleList.Add(new Triangle2DF( pts[0], pts[1], pts[2] )); } else if (approxContour.Size == 4) //The contour has 4 vertices. { #region determine if all the angles in the contour are within [80, 100] degree bool isRectangle = true; Point[] pts = approxContour.ToArray(); LineSegment2D[] edges = PointCollection.PolyLine(pts, true); for (int j = 0; j < edges.Length; j++) { double angle = Math.Abs( edges[(j + 1) % edges.Length].GetExteriorAngleDegree(edges[j])); if (angle < 80 || angle > 100) { isRectangle = false; break; } } #endregion if (isRectangle) { boxList.Add(CvInvoke.MinAreaRect(approxContour)); } } } } } } watch.Stop(); msgBuilder.Append(String.Format("Triangles & Rectangles - {0} ms; ", watch.ElapsedMilliseconds)); #endregion originalImageBox.Image = img; this.Text = msgBuilder.ToString(); #region draw triangles and rectangles Mat triangleRectangleImage = new Mat(img.Size, DepthType.Cv8U, 3); triangleRectangleImage.SetTo(new MCvScalar(0)); foreach (Triangle2DF triangle in triangleList) { CvInvoke.Polylines(triangleRectangleImage, Array.ConvertAll(triangle.GetVertices(), Point.Round), true, new Bgr(Color.DarkBlue).MCvScalar, 2); } foreach (RotatedRect box in boxList) { CvInvoke.Polylines(triangleRectangleImage, Array.ConvertAll(box.GetVertices(), Point.Round), true, new Bgr(Color.DarkOrange).MCvScalar, 2); } triangleRectangleImageBox.Image = triangleRectangleImage; #endregion #region draw circles Mat circleImage = new Mat(img.Size, DepthType.Cv8U, 3); circleImage.SetTo(new MCvScalar(0)); foreach (CircleF circle in circles) { CvInvoke.Circle(circleImage, Point.Round(circle.Center), (int)circle.Radius, new Bgr(Color.Brown).MCvScalar, 2); } circleImageBox.Image = circleImage; #endregion #region draw lines Mat lineImage = new Mat(img.Size, DepthType.Cv8U, 3); lineImage.SetTo(new MCvScalar(0)); foreach (LineSegment2D line in lines) { CvInvoke.Line(lineImage, line.P1, line.P2, new Bgr(Color.Green).MCvScalar, 2); } lineImageBox.Image = lineImage; #endregion } }
// Partial Match Algorithm using color feature // I used the framework for the partial matching algorithm using turning angle // but using the differences of the color on the edge public static Match partialColorMatch(List<Phi> DNAseq1, List<Phi> DNAseq2) { bool flag = true; // ToDo: Compare the control points in contours between two parts Match segment; // create an empty match segment segment.t11 = 0; segment.t12 = 0; segment.t21 = 0; segment.t22 = 0; List<Phi> seq1, seq2; // two empty List of edge maps int best = 0, max_match; int offset = 0, length = 0; if (DNAseq1.Count > DNAseq2.Count) // if the contour in first part has more control points than the second part { seq1 = replicateDNA(DNAseq1);//replicate the larger DNA seq2 = DNAseq2.ToList();//reverse the smaller one seq2.Reverse(); } else { flag = false; seq1 = replicateDNA(DNAseq2); // if the first one has less control point, attach all the control points of the second part seq2 = DNAseq1.ToList();//reverse the smaller one seq2.Reverse(); } List<int> zc = new List<int>(); List<int> starts = new List<int>(); int iteration = 0; for (int shift = 0; shift < seq1.Count - seq2.Count; shift += Constants.STEP) { List<int> diff = new List<int>(); bool flag1 = false; int start = 0, end = 0; // TODO: change the differences into color difference (done) List<int> zeroCounts = new List<int>(); int zeroCount = 0; List<int> starts2 = new List<int>(); // TODO: need to add a tolerance level for some random non 0 differences int tolerCount = 0; // tolerance count for random non 0s. for (int i = 0; i < seq2.Count; ++i) { int difference = Metrics.colorDifference(seq1[i + shift].color, seq2[i].color); // if difference==0, flag // if difference!=0, unflag if (difference <= 0) { // if it is in unflag state, mark the point as starting point if (!flag1) { flag1 = true; start = i; //starts.Add(start); starts2.Add(start); } // count the number of zero difference points in this section zeroCount++; tolerCount = 0; } else { if (tolerCount <= Constants.COLOR_TOLERANCE) { if (flag1) { zeroCount++; tolerCount++; } } else { if (flag1) { zeroCounts.Add(zeroCount); // add to a upper level storage zeroCount = 0; // reset the counter flag1 = false; // unflag tolerCount = 0; } } } diff.Add(difference); } if (iteration == 33) { Console.WriteLine("33"); } if (zeroCounts.Count == 0) { starts.Add(-1); } else { starts.Add(starts2[zeroCounts.IndexOf(zeroCounts.Max())]); } if (zeroCounts.Count == 0) { zc.Add(0); } else { zc.Add(zeroCounts.Max()); } // TTODO: implement a histogram algorithm for color match //max_match = colorHistogram(diff, seq2, ref start, ref end, Util.DELTA_THETA); max_match = 0; iteration++; } Console.WriteLine("Max:" + zc.Max()); if (zc.Max() == 0) { goto a; } int t_shift = 0; int s_start = 0; for (int i = 0; i < zc.Count; i++) { if (zc[i] == zc.Max()) { t_shift = Constants.STEP * i; s_start = starts[i]; } } int startPos1 = t_shift + s_start; int endPos1 = startPos1 + zc.Max(); int startPos2 = s_start; int endPos2 = startPos2 + zc.Max(); length = zc.Max(); // check if the algorithm get the correct position of the matching color Console.WriteLine("Flag:" + flag); Console.WriteLine("Shiftreq:" + startPos1); Console.WriteLine("Count:" + DNAseq1.Count); Console.WriteLine("P1_start_x" + seq1[startPos1].x); Console.WriteLine("P1_start_y" + seq1[startPos1].y); Console.WriteLine("P1_end_x" + seq1[endPos1].x); Console.WriteLine("P1_end_y" + seq1[endPos1].y); Console.WriteLine("P2_start_x" + seq2[startPos2].x); Console.WriteLine("P2_start_y" + seq2[startPos2].y); Console.WriteLine("P2_end_x" + seq2[endPos2].x); Console.WriteLine("P2_end_y" + seq2[endPos2].y); // correct for all the code above // regression analysis for the relationship between seq and DNA // flag=true for 3*3 frag5 and frag6 if (flag) { for (int j = 0; j < DNAseq1.Count; j++) { if ((seq1[startPos1].x == DNAseq1[j].x) && (seq1[startPos1].y == DNAseq1[j].y)) { segment.t11 = j; segment.t12 = j + zc.Max(); if (segment.t12 >= DNAseq1.Count) { segment.t12 -= DNAseq1.Count; } } } for (int j = 0; j < DNAseq2.Count; j++) { if ((seq2[startPos2].x == DNAseq2[j].x) && (seq2[startPos2].y == DNAseq2[j].y)) { segment.t21 = j; segment.t22 = j - zc.Max(); if (segment.t22 < 0) { segment.t22 += DNAseq2.Count; } } } } else { for (int j = 0; j < DNAseq2.Count; j++) { if ((seq1[startPos1].x == DNAseq2[j].x) && (seq1[startPos1].y == DNAseq2[j].y)) { segment.t21 = j; segment.t22 = j + zc.Max(); if (segment.t22 >= DNAseq2.Count) { segment.t22 -= DNAseq2.Count; } } } for (int j = 0; j < DNAseq1.Count; j++) { if ((seq2[startPos2].x == DNAseq1[j].x) && (seq2[startPos2].y == DNAseq1[j].y)) { segment.t11 = j; segment.t12 = j - zc.Max(); if (segment.t12 < 0) { segment.t12 += DNAseq1.Count; } } } } // fine code below a: segment.x11 = (int)DNAseq1[segment.t11].x; segment.y11 = (int)DNAseq1[segment.t11].y; segment.x12 = (int)DNAseq1[segment.t12].x; segment.y12 = (int)DNAseq1[segment.t12].y; segment.x21 = (int)DNAseq2[segment.t21].x; segment.y21 = (int)DNAseq2[segment.t21].y; segment.x22 = (int)DNAseq2[segment.t22].x; segment.y22 = (int)DNAseq2[segment.t22].y; // correct at this point /*if (best == 0) segment.confidence = 0; else segment.confidence = Math.Sqrt((double)(length * length) / best); */ segment.confidence = length; Console.WriteLine(segment.ToString()); // all the code above is the matching segment without considering edge feature // we need to consider edge feature at this point // and cull out the matching edge that does not match // consider the most simple case: straight line without rotating // then to edges with turning angles but still without rotating // then to general case // Step 1: extract the portion of DNA that forms the matching edge (done) List<Phi> edge1 = new List<Phi>(); // valid edge in image 1 List<Phi> edge2 = new List<Phi>(); // valid edge in image 2 for (int i = Math.Min(segment.t11, segment.t12); i < Math.Max(segment.t11, segment.t12); i++) { edge1.Add(DNAseq1[i]); } for (int i = Math.Min(segment.t21, segment.t22); i < Math.Max(segment.t21, segment.t22); i++) { edge2.Add(DNAseq2[i]); } if (edge1.Count == 0 || edge2.Count == 0) { goto r; // if there is no matching edge, it is not nessesary for culling } // Step 2: Analyze the edge feature // convert edge into contour VectorOfPoint c1; VectorOfPoint c2; List<Point> pedge1; List<Point> pedge2; c1 = new VectorOfPoint(); List<Point> ps=new List<Point>(); foreach(Phi p in edge1) { ps.Add(new Point((int)p.x, (int)p.y)); } c1.Push(ps.ToArray()); CvInvoke.ApproxPolyDP(c1, c1, 2.0, false); pedge1 = c1.ToArray().ToList(); c2 = new VectorOfPoint(); List<Point> ps2 = new List<Point>(); foreach (Phi p in edge2) { ps2.Add(new Point((int)p.x, (int)p.y)); } c2.Push(ps2.ToArray()); CvInvoke.ApproxPolyDP(c2, c2, 2.0, false); pedge2 = c2.ToArray().ToList(); // Step 3: Cull the edge // calculate the turning angle for each edge // if the cumulative turning angle change is greater than 90, restart // use a brute force longest straight line approach first, this solves a lot of cases int maxDistance = -99999; int pos1 = 0, pos2 = 0; for (int i = 0; i < pedge1.Count - 1; i++) { if (pedge1[i + 1].X == pedge1[i].X) { if (Math.Abs(pedge1[i + 1].Y - pedge1[i].Y) > maxDistance) { maxDistance = Math.Abs(pedge1[i + 1].Y - pedge1[i].Y); pos1 = i; } } else if (pedge1[i + 1].Y == pedge1[i].Y) { if (Math.Abs(pedge1[i + 1].X - pedge1[i].X) > maxDistance) { maxDistance = Math.Abs(pedge1[i + 1].X - pedge1[i].X); pos1 = i; } } } maxDistance = -99999; for (int i = 0; i < pedge2.Count - 1; i++) { if (pedge2[i + 1].X == pedge2[i].X) { if (Math.Abs(pedge2[i + 1].Y - pedge2[i].Y) > maxDistance) { maxDistance = Math.Abs(pedge2[i + 1].Y - pedge2[i].Y); pos2 = i; } } else if (pedge2[i + 1].Y == pedge2[i].Y) { if (Math.Abs(pedge2[i + 1].X - pedge2[i].X) > maxDistance) { maxDistance = Math.Abs(pedge2[i + 1].X - pedge2[i].X); pos2 = i; } } } // now the new matching edge is calculated, send the result to output for (int j = 0; j < DNAseq1.Count; j++) { if ((pedge1[pos1].X == DNAseq1[j].x) && (pedge1[pos1].Y == DNAseq1[j].y)) { segment.t11 = j; } } for (int j = 0; j < DNAseq2.Count; j++) { if ((pedge2[pos2].X == DNAseq2[j].x) && (pedge2[pos2].Y == DNAseq2[j].y)) { segment.t21 = j; } } for (int j = 0; j < DNAseq1.Count; j++) { if ((pedge1[pos1 + 1].X == DNAseq1[j].x) && (pedge1[pos1 + 1].Y == DNAseq1[j].y)) { segment.t12 = j; } } for (int j = 0; j < DNAseq2.Count; j++) { if ((pedge2[pos2 + 1].X == DNAseq2[j].x) && (pedge2[pos2 + 1].Y == DNAseq2[j].y)) { segment.t22 = j; } } segment.x11 = (int)DNAseq1[segment.t11].x; segment.y11 = (int)DNAseq1[segment.t11].y; segment.x12 = (int)DNAseq1[segment.t12].x; segment.y12 = (int)DNAseq1[segment.t12].y; segment.x21 = (int)DNAseq2[segment.t21].x; segment.y21 = (int)DNAseq2[segment.t21].y; segment.x22 = (int)DNAseq2[segment.t22].x; segment.y22 = (int)DNAseq2[segment.t22].y; r: return segment; }
public static Bitmap PerformShapeDetection(Bitmap frame, ShapeDetectionVariables detectionVars) { StringBuilder msgBuilder = new StringBuilder("Performance: "); Image <Bgr, Byte> img = new Image <Bgr, byte>(frame); Mat MatImg = img.Mat; Mat outputImg = new Mat(); if (CudaInvoke.HasCuda) { using (GpuMat gMatSrc = new GpuMat()) using (GpuMat gMatDst = new GpuMat()) { gMatSrc.Upload(MatImg); CudaGaussianFilter noiseReducetion = new CudaGaussianFilter(MatImg.Depth, img.NumberOfChannels, MatImg.Depth, img.NumberOfChannels, new Size(1, 1), 0); noiseReducetion.Apply(gMatSrc, gMatDst); gMatDst.Download(outputImg); } } else { Mat pyrDown = new Mat(); CvInvoke.PyrDown(img, pyrDown); CvInvoke.PyrUp(pyrDown, img); outputImg = img.Mat; } UMat uimage = new UMat(); CvInvoke.CvtColor(outputImg, uimage, ColorConversion.Bgr2Gray); CircleF[] circles = new CircleF[0]; if (detectionVars.calcCircles) { circles = CvInvoke.HoughCircles( uimage, HoughType.Gradient, 1.0, 20.0, detectionVars.circleCannyThreshold, detectionVars.circleAccumulatorThreshold == 0 ? 1 : detectionVars.circleAccumulatorThreshold, detectionVars.minradius, detectionVars.maxRadius); } #region Canny and edge detection UMat cannyEdges = new UMat(); CvInvoke.Canny(uimage, cannyEdges, detectionVars.lineCannyThreshold, detectionVars.cannyThresholdLinking); LineSegment2D[] lines = new LineSegment2D[0]; if (detectionVars.calcLines) { lines = CvInvoke.HoughLinesP( cannyEdges, 1, //Distance resolution in pixel-related units Math.PI / 45.0, //Angle resolution measured in radians. detectionVars.lineThreshold, //threshold detectionVars.minLineWidth, //min Line width 10); //gap between lines } #endregion #region Find triangles and rectangles List <RotatedRect> boxList = new List <RotatedRect>(); //a box is a rotated rectangle if (detectionVars.calcRectTri) { using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint()) { CvInvoke.FindContours(cannyEdges, contours, null, RetrType.List, ChainApproxMethod.ChainApproxSimple); int count = contours.Size; for (int i = 0; i < count; i++) { using (VectorOfPoint contour = contours[i]) using (VectorOfPoint approxContour = new VectorOfPoint()) { CvInvoke.ApproxPolyDP(contour, approxContour, CvInvoke.ArcLength(contour, true) * 0.05, true); if (CvInvoke.ContourArea(approxContour, false) > 250) //only consider contours with area greater than 250 { if (approxContour.Size == 4) //The contour has 4 vertices. { #region determine if all the angles in the contour are within [80, 100] degree bool isRectangle = true; Point[] pts = approxContour.ToArray(); LineSegment2D[] edges = PointCollection.PolyLine(pts, true); for (int j = 0; j < edges.Length; j++) { double angle = Math.Abs( edges[(j + 1) % edges.Length].GetExteriorAngleDegree(edges[j])); if (angle < 80 || angle > 100) { isRectangle = false; break; } } #endregion if (isRectangle) { boxList.Add(CvInvoke.MinAreaRect(approxContour)); } } } } } } } #endregion Image <Bgra, Byte> alphaImgShape = new Image <Bgra, byte>(img.Size.Width, img.Size.Height, new Bgra(0, 0, 0, .5)); Mat alphaimg = new Mat(); CvInvoke.CvtColor(img, alphaimg, ColorConversion.Bgr2Bgra); #region draw rectangles and triangles if (detectionVars.calcRectTri) { Image <Bgr, Byte> triangleRectangleImage = new Image <Bgr, Byte>(img.Size); foreach (RotatedRect box in boxList) { CvInvoke.Polylines(triangleRectangleImage, Array.ConvertAll(box.GetVertices(), Point.Round), true, new Bgr(0, 255, 0).MCvScalar, 2); } CvInvoke.AddWeighted(alphaImgShape, .5, BlackTransparent(triangleRectangleImage), .5, 0, alphaImgShape); if (CudaInvoke.HasCuda) { using (GpuMat gMatSrc = new GpuMat()) using (GpuMat gMatSrc2 = new GpuMat()) using (GpuMat gMatDst = new GpuMat()) { gMatSrc.Upload(alphaimg); gMatSrc2.Upload(alphaImgShape); CudaInvoke.AlphaComp(gMatSrc, gMatSrc2, gMatDst, AlphaCompTypes.Plus); gMatDst.Download(alphaimg); } } else { img = Overlay(img, alphaImgShape); } } #endregion #region draw circles if (detectionVars.calcCircles) { Image <Bgr, Byte> circleImage = new Image <Bgr, Byte>(img.Size); foreach (CircleF circle in circles.Take(10)) { CvInvoke.Circle(circleImage, Point.Round(circle.Center), (int)circle.Radius, new Bgr(0, 255, 0).MCvScalar, 2); } alphaImgShape = new Image <Bgra, byte>(img.Size.Width, img.Size.Height, new Bgra(0, 0, 0, .5)); CvInvoke.AddWeighted(alphaImgShape, .7, BlackTransparent(circleImage), .5, 0, alphaImgShape); if (CudaInvoke.HasCuda) { using (GpuMat gMatSrc = new GpuMat()) using (GpuMat gMatSrc2 = new GpuMat()) using (GpuMat gMatDst = new GpuMat()) { gMatSrc.Upload(alphaimg); gMatSrc2.Upload(alphaImgShape); CudaInvoke.AlphaComp(gMatSrc, gMatSrc2, gMatDst, AlphaCompTypes.Plus); gMatDst.Download(alphaimg); } } else { img = Overlay(img, alphaImgShape); } } #endregion #region draw lines if (detectionVars.calcLines) { Image <Bgr, Byte> lineImage = new Image <Bgr, Byte>(img.Size); foreach (LineSegment2D line in lines) { CvInvoke.Line(lineImage, line.P1, line.P2, new Bgr(0, 255, 0).MCvScalar, 2); } alphaImgShape = new Image <Bgra, byte>(img.Size.Width, img.Size.Height, new Bgra(0, 0, 0, .5)); CvInvoke.AddWeighted(alphaImgShape, .5, BlackTransparent(lineImage), .5, 0, alphaImgShape); if (CudaInvoke.HasCuda) { using (GpuMat gMatSrc = new GpuMat()) using (GpuMat gMatSrc2 = new GpuMat()) using (GpuMat gMatDst = new GpuMat()) { gMatSrc.Upload(alphaimg); gMatSrc2.Upload(alphaImgShape); CudaInvoke.AlphaComp(gMatSrc, gMatSrc2, gMatDst, AlphaCompTypes.Plus); gMatDst.Download(alphaimg); } } else { img = Overlay(img, alphaImgShape); } } #endregion GC.Collect(); // first time I've had to use this but this program will use as much memory as possible, resulting in corrptions return(alphaimg.Bitmap ?? frame); }
private void button1_Click(object sender, EventArgs e) { //단일쓰레드 일때 문자열을 많이 읽고 변경할경우 좋은 StringBuilder 사용 ( string 비해 속도 매우빠름) // https://blog.naver.com/impressives2/221338797755 참고 StringBuilder msgBuilder = new StringBuilder("Performance: "); //Image<Bgr, Byte> img = // new Image<Bgr, byte>(fileNameTextBox.Text) // .Resize(400, 400, Emgu.CV.CvEnum.Inter.Linear, true); // OpenFileDialog 으로 image를 가지고와야 하므로 객체생성 OpenFileDialog ofd = new OpenFileDialog(); //// OpenFileDialog 상자가 열리고 확인버튼을 눌렀을 때 !! if (ofd.ShowDialog() == DialogResult.OK) { //https://dic1224.blog.me/220841161411 Resize는 확대축소 //https://blog.naver.com/PostView.nhn?blogId=dic1224&logNo=220841171866&parentCategoryNo=&categoryNo=152&viewDate=&isShowPopularPosts=true&from=search //위에꺼는 소스와 그림(?) //https://dic1224.blog.me/220841161411 Emgu.CV.CvEnum.Inter.Linear의 구조 img = new Image <Bgr, Byte>(ofd.FileName).Resize(400, 400, Emgu.CV.CvEnum.Inter.Linear, true); } UMat uimage = new UMat(); // img 객체 이미지가 Bgr 형식으로 되어있으니 Bgr2Gray로 그레이시킨 후 uimage에 값을 출력 CvInvoke.CvtColor(img, uimage, ColorConversion.Bgr2Gray); //use image pyr to remove noise UMat pyrDown = new UMat(); CvInvoke.PyrDown(uimage, pyrDown); // 노이즈제거 및 그레이된걸 샘플링다운으로 추출 (출력..?) CvInvoke.PyrUp(pyrDown, uimage); // 노이즈제거 후 샘플링업으로 추출( 출력..) Image <Gray, Byte> gray = img.Convert <Gray, Byte>().PyrDown().PyrUp(); #region circle detection // 경과시간을 정확히 추출하는 객체 하나생성 ( 0초로) 시간을 재야함 Stopwatch watch = Stopwatch.StartNew(); //원형 만들기 위한 수치 double cannyThreshold = 180.0; double circleAccumulatorThreshold = 120; //uimage는 노이즈제거, 그레이, 샘플링다운상태 //uimage로 원그리기 CircleF[] circles = CvInvoke.HoughCircles(uimage, HoughType.Gradient, 2.0, 20.0, cannyThreshold, circleAccumulatorThreshold, 5); // 원다그렸으니 시간을 멈추기 watch.Stop(); // 얼마나 걸렸는지 출력 msgBuilder.Append(String.Format("Hough circles - {0} ms; ", watch.ElapsedMilliseconds)); #endregion #region Canny and edge detection // 0초로만들고 다시시작 watch.Reset(); watch.Start(); double cannyThresholdLinking = 120.0; UMat cannyEdges = new UMat(); //Canny알고리즘사용하여 cannyEdges 객체에 값넣기 3,4번째 임계값1,2 CvInvoke.Canny(uimage, cannyEdges, cannyThreshold, cannyThresholdLinking); //cannyEdges 한 후 Hough lines한다 LineSegment2D[] lines = CvInvoke.HoughLinesP( cannyEdges, 1, //Distance resolution in pixel-related units Math.PI / 45.0, //Angle resolution measured in radians. 20, //threshold 30, //min Line width 10); //gap between lines // 측정을멈춤 watch.Stop(); //몇초 걸렸는지 출력 msgBuilder.Append(String.Format("Canny & Hough lines - {0} ms; ", watch.ElapsedMilliseconds)); #endregion #region Find triangles and rectangles // 새로운 테스트위해 시간을 0초로 되돌리고 시작 watch.Reset(); watch.Start(); // triangles 객체생성 List <Triangle2DF> triangleList = new List <Triangle2DF>(); // rectangles 객체생성 List <RotatedRect> boxList = new List <RotatedRect>(); //a box is a rotated rectangle using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint()) { CvInvoke.FindContours(cannyEdges, contours, null, RetrType.List, ChainApproxMethod.ChainApproxSimple); int count = contours.Size; for (int i = 0; i < count; i++) { using (VectorOfPoint contour = contours[i]) using (VectorOfPoint approxContour = new VectorOfPoint()) { CvInvoke.ApproxPolyDP(contour, approxContour, CvInvoke.ArcLength(contour, true) * 0.05, true); if (CvInvoke.ContourArea(approxContour, false) > 100) //only consider contours with area greater than 250 { if (approxContour.Size == 3) //The contour has 3 vertices, it is a triangle { Point[] pts = approxContour.ToArray(); triangleList.Add(new Triangle2DF( pts[0], pts[1], pts[2] )); } else if (approxContour.Size == 4) //The contour has 4 vertices. { #region determine if all the angles in the contour are within [80, 100] degree bool isRectangle = true; Point[] pts = approxContour.ToArray(); LineSegment2D[] edges = PointCollection.PolyLine(pts, true); for (int j = 0; j < edges.Length; j++) { double angle = Math.Abs( edges[(j + 1) % edges.Length].GetExteriorAngleDegree(edges[j])); if (angle < 90 || angle > 110) { isRectangle = false; break; } } #endregion if (isRectangle) { boxList.Add(CvInvoke.MinAreaRect(approxContour)); } } } } } } //측정종료 watch.Stop(); //출력 msgBuilder.Append(String.Format("Triangles & Rectangles - {0} ms; ", watch.ElapsedMilliseconds)); #endregion // 원본 img는 그대로 pictureBox에 출력 originalImageBox.Image = img; // 폼제목을 측정한 데이터로 변경 this.Text = msgBuilder.ToString(); #region draw triangles and rectangles Image <Bgr, Byte> triangleRectangleImage = img.CopyBlank(); foreach (Triangle2DF triangle in triangleList) { triangleRectangleImage.Draw(triangle, new Bgr(Color.DarkBlue), 2); } foreach (RotatedRect box in boxList) { triangleRectangleImage.Draw(box, new Bgr(Color.DarkOrange), 2); } // triangles and rectangles 한 img를 pictureBox2에출력 triangleRectangleImageBox.Image = triangleRectangleImage; #endregion Image <Bgr, Byte> circleImage = img.CopyBlank(); #region draw circles img.CopyBlank(); foreach (CircleF circle in circles) { circleImage.Draw(circle, new Bgr(Color.Brown), 2); } // circles 한 img를 pictureBox3에출력 circleImageBox.Image = circleImage; #endregion #region draw lines Image <Bgr, Byte> lineImage = img.CopyBlank(); foreach (LineSegment2D line in lines) { lineImage.Draw(line, new Bgr(Color.Green), 2); } // Detected Lines 한 이미지를 pictureBox4에출력 lineImageBox.Image = lineImage; #endregion }
public void getBlackContours(Image<Gray, Byte> src, VectorOfVectorOfPoint blackborders, List<RotatedRect> Black_boxList, VectorOfVectorOfPoint othercontours_black) { //blackborders = new VectorOfVectorOfPoint();//list of black borders //Black_boxList = new List<RotatedRect>(); //a box is a rotated rectangle //othercontours_black = new VectorOfVectorOfPoint(); Bitmap TMPGood = new Bitmap(src.ToBitmap() , src.Width, src.Height); Bitmap TMPBad = new Bitmap(src.ToBitmap(), src.Width, src.Height); Graphics gGood = Graphics.FromImage(TMPGood); Graphics gBad = Graphics.FromImage(TMPBad); //Pen RedPen = new Pen(Color.Red); //Pen GreenPen = new Pen(Color.Green); Brush RedBrush = new SolidBrush(Color.Red); Brush GreenBrush = new SolidBrush(Color.Green); using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint()) { CvInvoke.FindContours(src, contours, null, RetrType.List, ChainApproxMethod.ChainApproxSimple); for (int i = 0; i < contours.Size; i++) { using (VectorOfPoint contour = contours[i]) using (VectorOfPoint approxContour = new VectorOfPoint()) { Point[] ptsContour = contour.ToArray(); for (int k = 0; k < ptsContour.Length; k++) { gBad.FillEllipse(RedBrush, ptsContour[k].X, ptsContour[k].Y, 6, 6); } CvInvoke.ApproxPolyDP(contour, approxContour, CvInvoke.ArcLength(contour, true) * 0.05, true); if (CvInvoke.ContourArea(approxContour, false) > 250) //only consider contours with area greater than 250 { Point[] ptsApprox = approxContour.ToArray(); //TMP.Draw(pts, new Bgr(Color.DarkOrange), 5); //!!!!!!!!!!!!!!! for (int k = 0; k < ptsApprox.Length; k++) { gGood.FillEllipse(GreenBrush, ptsApprox[k].X, ptsApprox[k].Y, 6, 6); } if (CvInvoke.ContourArea(approxContour, false) > 250 && approxContour.Size == 4) { Black_boxList.Add(CvInvoke.MinAreaRect(approxContour)); blackborders.Push(contour); } else { othercontours_black.Push(contour); //Point[] pts = approxContour.ToArray(); //other.Add(PointCollection.PolyLine(pts, true)); } } } } } TMPGood.Save("C:\\Emgu\\Dump\\Black contour corners GOOD.png", System.Drawing.Imaging.ImageFormat.Png); TMPBad.Save("C:\\Emgu\\Dump\\Black contour corners BAD.png", System.Drawing.Imaging.ImageFormat.Png); }
/// <summary> /// Detect basic elements (such as circlr, line, rectangle and triangle) /// </summary> /// <param name="argPath"></param> /// <param name="argtMode"></param> /// <returns></returns> public static DetectBasicEleementResult DetectBasicElement(string argPath, DetectMode argtMode) { StringBuilder msgBuilder = new StringBuilder("Performance: "); //Load the image from file and resize it for display Image <Bgr, byte> img = new Image <Bgr, byte>(argPath).Resize(400, 400, Emgu.CV.CvEnum.Inter.Linear, true); //Convert the image to grayscale and filter out the noise UMat uimage = new UMat(); CvInvoke.CvtColor(img, uimage, ColorConversion.Bgr2Gray); //use image pyr to remove noise UMat pyrDown = new UMat(); CvInvoke.PyrDown(uimage, pyrDown); CvInvoke.PyrUp(pyrDown, uimage); //Image<Gray, Byte> gray = img.Convert<Gray, Byte>().PyrDown().PyrUp(); #region circle detection CircleF[] circles = null; Stopwatch watch = new Stopwatch(); double cannyThreshold = 180.0; if (argtMode == DetectMode.IncludeCircle) { watch = Stopwatch.StartNew(); double circleAccumulatorThreshold = 120; circles = CvInvoke.HoughCircles(uimage, HoughType.Gradient, 2.0, 20.0, cannyThreshold, circleAccumulatorThreshold, 5); watch.Stop(); msgBuilder.Append(String.Format("Hough circles - {0} ms; ", watch.ElapsedMilliseconds)); } #endregion #region Canny and edge detection watch.Reset(); watch.Start(); double cannyThresholdLinking = 120.0; UMat cannyEdges = new UMat(); CvInvoke.Canny(uimage, cannyEdges, cannyThreshold, cannyThresholdLinking); LineSegment2D[] lines = CvInvoke.HoughLinesP(cannyEdges, 1, //Distance resolution in pixel-related units Math.PI / 45.0, //Angle resolution measured in radians. 20, //threshold 30, //min Line width 10); //gap between lines watch.Stop(); msgBuilder.Append(String.Format("Canny & Hough lines - {0} ms; ", watch.ElapsedMilliseconds)); #endregion #region Find triangles and rectangles watch.Reset(); watch.Start(); List <Triangle2DF> triangleList = new List <Triangle2DF>(); List <RotatedRect> boxList = new List <RotatedRect>(); //a box is a rotated rectangle using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint()) { CvInvoke.FindContours(cannyEdges, contours, null, RetrType.List, ChainApproxMethod.ChainApproxSimple); int count = contours.Size; for (int i = 0; i < count; i++) { using (VectorOfPoint contour = contours[i]) using (VectorOfPoint approxContour = new VectorOfPoint()) { CvInvoke.ApproxPolyDP(contour, approxContour, CvInvoke.ArcLength(contour, true) * 0.05, true); if (CvInvoke.ContourArea(approxContour, false) > 250) //only consider contours with area greater than 250 { if (approxContour.Size == 3) //The contour has 3 vertices, it is a triangle { Point[] pts = approxContour.ToArray(); triangleList.Add(new Triangle2DF(pts[0], pts[1], pts[2])); } else if (approxContour.Size == 4) //The contour has 4 vertices. { #region determine if all the angles in the contour are within [80, 100] degree bool isRectangle = true; Point[] pts = approxContour.ToArray(); LineSegment2D[] edges = PointCollection.PolyLine(pts, true); for (int j = 0; j < edges.Length; j++) { double angle = Math.Abs(edges[(j + 1) % edges.Length].GetExteriorAngleDegree(edges[j])); if (angle < 80 || angle > 100) { isRectangle = false; break; } } #endregion if (isRectangle) { boxList.Add(CvInvoke.MinAreaRect(approxContour)); } } } } } } watch.Stop(); msgBuilder.Append(String.Format("Triangles & Rectangles - {0} ms; ", watch.ElapsedMilliseconds)); #endregion return(new DetectBasicEleementResult(img, triangleList, boxList, circles, lines, msgBuilder.ToString())); }
/// <summary> /// Get the contour that defines the blob /// </summary> /// <returns>The contour of the blob</returns> public Point[] GetContour() { using (VectorOfPoint vp = new VectorOfPoint()) { cvbCvBlobGetContour(_ptr, vp.Ptr); return vp.ToArray(); } }
public void GetBoundries(Image <Gray, Byte> binaryBackground, out List <Point[]> boundries, out List <Point[]> artefacts, out List <RotatedRect> boxes) { //Find outer boundries double minimumContourArea = 250; double minimumBoundryArea = 1000; //double approximationFactor = 0.001; List <Point[]> allBoundries = new List <Point[]>(); List <Point[]> allObjects = new List <Point[]>(); List <RotatedRect> boxList = new List <RotatedRect>(); using (Image <Gray, Byte> filteredBinary = binaryBackground.SmoothMedian(7)) using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint()) using (Mat hierarchy = new Mat()) { CvInvoke.FindContours(filteredBinary, contours, hierarchy, RetrType.Tree, ChainApproxMethod.ChainApproxNone); var temp = hierarchy.ToImage <Bgra, Byte>(); int count = contours.Size; List <int> boundryIds = new List <int>(); for (int i = 0; i < count; i++) { using (VectorOfPoint contour = contours[i]) { double contourArea = CvInvoke.ContourArea(contour); if (contourArea >= minimumBoundryArea) { Bgra currentContour = temp[0, i]; if (currentContour.Alpha == 0) { allBoundries.Add(contour.ToArray()); boundryIds.Add(i); } } } } for (int i = 0; i < count; i++) { using (VectorOfPoint contour = contours[i]) { double contourArea = CvInvoke.ContourArea(contour); if (contourArea >= minimumContourArea) { Bgra currentContour = temp[0, i]; if (!boundryIds.Contains(i) && boundryIds.Contains((int)currentContour.Alpha)) { bool isRectangle = true; bool isCircle = false; //Can the object be approximated as a circle or rectangle? using (VectorOfPoint apxContour = new VectorOfPoint()) { double epsilon = CvInvoke.ArcLength(contour, true) * 0.05; CvInvoke.ApproxPolyDP(contour, apxContour, epsilon, true); if (apxContour.Size == 4) //The contour has 4 vertices. { Point[] pts = apxContour.ToArray(); LineSegment2D[] edges = PointCollection.PolyLine(pts, true); for (int j = 0; j < edges.Length; j++) { double angle = Math.Abs(edges[(j + 1) % edges.Length].GetExteriorAngleDegree(edges[j])); if (angle < 70 || angle > 110) { isRectangle = false; break; } } if (isRectangle) { boxList.Add(CvInvoke.MinAreaRect(apxContour)); } } else { isRectangle = false; } } if (!isRectangle && !isCircle) { allObjects.Add(contour.ToArray()); } } } } } } //Find mouse //mousePoints = null; //using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint()) //{ // CvInvoke.FindContours(binaryMouse, contours, null, RetrType.External, ChainApproxMethod.ChainApproxNone); // int count = contours.Size; // double maxArea = 0; // for (int j = 0; j < count; j++) // { // using (VectorOfPoint contour = contours[j]) // { // double contourArea = CvInvoke.ContourArea(contour); // if (contourArea >= maxArea) // { // maxArea = contourArea; // mousePoints = contour.ToArray(); // } // } // } //} boundries = allBoundries; artefacts = allObjects; boxes = boxList; //Check if any contours can be approximated as shapes //We now have a list of boundries, if there's more than one it means something is sticking across the screen if (allBoundries.Count > 1) { //Need to find points from all boundries that are effectively parallel } //Image<Bgr, Byte> allContourImage = FirstFrame.Clone(); //allContourImage.DrawPolyline(mousePoints, true, new Bgr(Color.Yellow), 2); //allContourImage.DrawPolyline(allBoundries.ToArray(), true, new Bgr(Color.Red), 2); //allContourImage.DrawPolyline(allObjects.ToArray(), true, new Bgr(Color.LightGreen), 2); //foreach (var box in boxList) //{ // allContourImage.Draw(box.GetVertices().Select(x => new Point((int)x.X, (int)x.Y)).ToArray(), new Bgr(Color.Aqua), 2); //} }
private void ProcessFrame() { try { #region Background/Foreground Image<Bgr, byte> difference = BackgroundSubstractionOptions.Substract(_currentFrame, _frameHistoryBuffer); Rectangle? handArea = ForegoundExtractionOptions.HighlightForeground(difference); Image<Bgr, byte> skinDetectionFrame = _currentFrame.Copy(); if (handArea.HasValue) ForegoundExtractionOptions.CutBackground(skinDetectionFrame, handArea.Value); #endregion #region Skin filtering / Morphological / Smooth filtering Image<Gray, byte> skinDetectionFrameGray = SkinFilteringOptions.ActiveItem.FilterFrame(skinDetectionFrame); MorphologicalFilteringOptions.StackSync.EnterReadLock(); foreach (var operation in MorphologicalFilteringOptions.OperationStack) { if (operation.FilterType == Model.Enums.MorphologicalFilterType.Dilatation) { CvInvoke.Dilate(skinDetectionFrameGray, skinDetectionFrameGray, operation.GetKernel(), new Point(operation.KernelAnchorX, operation.KernelAnchorY), operation.Intensity, operation.KernelBorderType, new MCvScalar(operation.KernelBorderThickness)); } else { CvInvoke.Erode(skinDetectionFrameGray, skinDetectionFrameGray, operation.GetKernel(), new Point(operation.KernelAnchorX, operation.KernelAnchorY), operation.Intensity, operation.KernelBorderType, new MCvScalar(operation.KernelBorderThickness)); } } MorphologicalFilteringOptions.StackSync.ExitReadLock(); skinDetectionFrameGray = SmoothFilteringOptions.FilterFrame(skinDetectionFrameGray); #endregion #region Contours / ConvexHull / ConvexityDefects Image<Bgr, byte> fingerTrackerFrame = _currentFrame.Copy(); List<Point> fingers = new List<Point>(); using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint()) { CvInvoke.FindContours(skinDetectionFrameGray.Copy(), contours, null, RetrType.List, FingerTrackingOptions.ApproxMethod); if (contours.Size > 0) { VectorOfPoint biggestContour = contours[0]; if (contours.Size > 1) { for (int i = 1; i < contours.Size; i++) { if (CvInvoke.ContourArea(contours[i], false) > CvInvoke.ContourArea(biggestContour, false)) biggestContour = contours[i]; } } if (CvInvoke.ContourArea(biggestContour, false) > FingerTrackingOptions.MinContourArea) { using (VectorOfPoint contour = biggestContour) { using (VectorOfPoint approxContour = new VectorOfPoint()) { CvInvoke.ApproxPolyDP(contour, approxContour, CvInvoke.ArcLength(contour, true) * FingerTrackingOptions.PerimeterScalingFactor.Value, true); fingerTrackerFrame.Draw(approxContour.ToArray(), new Bgr(FingerTrackingOptions.ContourHighlightColor), 2); VectorOfPoint convexHull = new VectorOfPoint(); VectorOfInt intHull = new VectorOfInt(); CvInvoke.ConvexHull(approxContour, convexHull, FingerTrackingOptions.ConvexHullCW); CvInvoke.ConvexHull(approxContour, intHull, FingerTrackingOptions.ConvexHullCW); fingerTrackerFrame.DrawPolyline(convexHull.ToArray(), true, new Bgr(FingerTrackingOptions.ConvexHullColor), 2); var countourRect = CvInvoke.MinAreaRect(approxContour); fingerTrackerFrame.Draw(new CircleF(new PointF(countourRect.Center.X, countourRect.Center.Y), 3), new Bgr(FingerTrackingOptions.DefectLinesColor), 2); Mat defects = new Mat(); CvInvoke.ConvexityDefects(approxContour, intHull, defects); if (!defects.IsEmpty) { var contourPoints = approxContour.ToArray(); Matrix<int> m = new Matrix<int>(defects.Rows, defects.Cols, defects.NumberOfChannels); defects.CopyTo(m); for (int i = 0; i < m.Rows; i++) { int startIdx = m.Data[i, 0]; int endIdx = m.Data[i, 1]; int depthIdx = m.Data[i, 2]; Point startPoint = contourPoints[startIdx]; Point endPoint = contourPoints[endIdx]; Point depthPoint = contourPoints[depthIdx]; LineSegment2D startDepthLine = new LineSegment2D(startPoint, depthPoint); LineSegment2D depthEndLine = new LineSegment2D(depthPoint, endPoint); LineSegment2D startCenterLine = new LineSegment2D(startPoint, new Point((int)countourRect.Center.X, (int)countourRect.Center.Y)); LineSegment2D depthCenterLine = new LineSegment2D(depthPoint, new Point((int)countourRect.Center.X, (int)countourRect.Center.Y)); LineSegment2D endCenterLine = new LineSegment2D(endPoint, new Point((int)countourRect.Center.X, (int)countourRect.Center.Y)); CircleF startCircle = new CircleF(startPoint, 5); CircleF depthCircle = new CircleF(depthPoint, 5); CircleF endCircle = new CircleF(endPoint, 5); if (startPoint.Y < countourRect.Center.Y) fingers.Add(startPoint); if (!FingerTrackingOptions.TrackOnlyControlPoint) { fingerTrackerFrame.Draw(startCircle, new Bgr(FingerTrackingOptions.DefectStartPointHighlightColor), 2); fingerTrackerFrame.Draw(depthCircle, new Bgr(FingerTrackingOptions.DefectDepthPointHighlightColor), 2); fingerTrackerFrame.Draw(endCircle, new Bgr(FingerTrackingOptions.DefectEndPointHighlightColor), 2); fingerTrackerFrame.Draw(startDepthLine, new Bgr(FingerTrackingOptions.DefectLinesColor), 2); //fingerTrackerFrame.Draw(depthEndLine, new Bgr(FingerTrackingOptions.DefectLinesColor), 2); fingerTrackerFrame.Draw(startCenterLine, new Bgr(FingerTrackingOptions.DefectLinesColor), 2); //fingerTrackerFrame.Draw(depthCenterLine, new Bgr(FingerTrackingOptions.DefectLinesColor), 2); // fingerTrackerFrame.Draw(endCenterLine, new Bgr(FingerTrackingOptions.DefectLinesColor), 2); } } _lastControlPoint = _currentControlPoint; _currentControlPoint = MouseControlOptions.UseHandCenter ? new Point((int)countourRect.Center.X, (int)countourRect.Center.Y) : fingers.FirstOrDefault(f => f.Y == fingers.Min(line => line.Y)); fingers.Clear(); if (FingerTrackingOptions.TrackOnlyControlPoint) { fingerTrackerFrame = new Image<Bgr, byte>(fingerTrackerFrame.Width, fingerTrackerFrame.Height, new Bgr(Color.Black)); fingerTrackerFrame.Draw(new CircleF(_currentControlPoint, 5), new Bgr(Color.Red), 2); } } } } } } } #endregion #region Mouse control if (_currentControlPoint.X != -1 && _currentControlPoint.Y != -1 && _lastControlPoint.X != -1 && _lastControlPoint.Y != -1 && _currentControlPoint.X != _lastControlPoint.X && _currentControlPoint.Y != _lastControlPoint.Y && Math.Abs(_currentControlPoint.X - _lastControlPoint.X) < (MouseControlOptions.FrameWidth / 10) && Math.Abs(_currentControlPoint.Y - _lastControlPoint.Y) < (MouseControlOptions.FrameHeight / 10)) { int frameX = _currentControlPoint.X; int frameY = _currentControlPoint.Y; int moveX = _currentControlPoint.X - _lastControlPoint.X; int moveY = _currentControlPoint.Y - _lastControlPoint.Y; int sensitiveX = 1; int sensitiveY = 1; if (MouseControlOptions.MouseSensitive.Value > 0) { sensitiveX = (int)(((double)MouseControlOptions.ScreenWidth / MouseControlOptions.FrameWidth) * MouseControlOptions.MouseSensitive.Value); sensitiveY = (int)(((double)MouseControlOptions.ScreenHeight / MouseControlOptions.FrameHeight) * MouseControlOptions.MouseSensitive.Value); } else if (MouseControlOptions.MouseSensitive.Value < 0) { sensitiveX = (int)(((double)MouseControlOptions.FrameWidth / MouseControlOptions.ScreenWidth) * MouseControlOptions.MouseSensitive.Value * -1); sensitiveY = (int)(((double)MouseControlOptions.FrameHeight / MouseControlOptions.ScreenHeight) * MouseControlOptions.MouseSensitive.Value * -1); } moveX *= sensitiveX * -1; moveY *= sensitiveY; Point currentMousePosition = GetMousePosition(); int destinationX = currentMousePosition.X + moveX; int destinationY = currentMousePosition.Y + moveY; Messanger.PublishOnCurrentThread(new FingerMovedMessage(MouseControlOptions.ControlMouse, frameX, frameY, destinationX, destinationY)); if (MouseControlOptions.ControlMouse && MouseControlOptions.MouseSensitive.Value != 0 && destinationX >= 0 && destinationY >= 0) SetCursorPos(destinationX, destinationY); } #endregion Messanger.PublishOnCurrentThread(new FrameProcessedMessage(_currentFrame, difference, skinDetectionFrameGray, fingerTrackerFrame)); } catch { } }
public static void Run(Options options) { //load the image and compute the ratio of the old height //to the new height, clone it, and resize it using (var disposer = new Disposer()) { var image = new Image <Bgr, byte>(options.Image); disposer.Add(image); Image <Bgr, byte> orig = image.Clone(); disposer.Add(orig); double ratio = image.Height / 500.0; image = ImageUtil.Resize(image, height: 500); disposer.Add(image); Image <Gray, byte> gray = image.Convert <Gray, byte>(); disposer.Add(gray); gray = gray.SmoothGaussian(5); disposer.Add(gray); Image <Gray, byte> edged = gray.Canny(75, 200); disposer.Add(edged); Console.WriteLine("STEP 1: Edge Detection"); CvInvoke.Imshow("Image", image); CvInvoke.Imshow("Edged", edged); CvInvoke.WaitKey(); CvInvoke.DestroyAllWindows(); //find the contours in the edged image, keeping only the //largest ones, and initialize the screen contour VectorOfVectorOfPoint cnts = new VectorOfVectorOfPoint(); disposer.Add(cnts); using (Image <Gray, byte> edgedClone = edged.Clone()) { CvInvoke.FindContours(edgedClone, cnts, null, RetrType.List, ChainApproxMethod.ChainApproxSimple); } Point[] screenCnt = null; foreach (VectorOfPoint c in Enumerable.Range(0, cnts.Size).Select(i => cnts[i]).OrderByDescending(c => CvInvoke.ContourArea(c)).Take(5)) { //approximate the contour double peri = CvInvoke.ArcLength(c, true); using (VectorOfPoint approx = new VectorOfPoint()) { CvInvoke.ApproxPolyDP(c, approx, 0.02 * peri, true); if (approx.Size == 4) { screenCnt = approx.ToArray(); break; } } } if (screenCnt == null) { Console.WriteLine("Failed to find polygon with four points"); return; } //show the contour (outline) of the piece of paper Console.WriteLine("STEP 2: Find contours of paper"); image.Draw(screenCnt, new Bgr(0, 255, 0), 2); CvInvoke.Imshow("Outline", image); CvInvoke.WaitKey(); CvInvoke.DestroyAllWindows(); //apply the four point transform to obtain a top-down //view of the original image Image <Bgr, byte> warped = FourPointTransform(orig, screenCnt.Select(pt => new PointF((int)(pt.X * ratio), (int)(pt.Y * ratio)))); disposer.Add(warped); //convert the warped image to grayscale, then threshold it //to give it that 'black and white' paper effect Image <Gray, byte> warpedGray = warped.Convert <Gray, byte>(); disposer.Add(warpedGray); warpedGray = warpedGray.ThresholdAdaptive(new Gray(251), AdaptiveThresholdType.GaussianC, ThresholdType.Binary, 251, new Gray(10)); disposer.Add(warpedGray); Console.WriteLine("STEP 3: Apply perspective transform"); Image <Bgr, byte> origResized = ImageUtil.Resize(orig, height: 650); disposer.Add(origResized); CvInvoke.Imshow("Original", origResized); Image <Gray, byte> warpedResized = ImageUtil.Resize(warpedGray, height: 650); disposer.Add(warpedResized); CvInvoke.Imshow("Scanned", warpedResized); CvInvoke.WaitKey(); CvInvoke.DestroyAllWindows(); } }
/// <summary> /// Computes convex hull for a set of 2D points. /// </summary> /// <param name="points">The input 2D point set, represented by CV_32SC2 or CV_32FC2 matrix</param> /// <param name="clockwise">If true, the output convex hull will be oriented clockwise, /// otherwise it will be oriented counter-clockwise. Here, the usual screen coordinate /// system is assumed - the origin is at the top-left corner, x axis is oriented to the right, /// and y axis is oriented downwards.</param> /// <returns>The output convex hull. It is a vector of points that form /// the hull (must have the same type as the input points).</returns> public static Point[] ConvexHull(IEnumerable<Point> points, bool clockwise = false) { if (points == null) throw new ArgumentNullException("points"); Point[] pointsArray = EnumerableEx.ToArray(points); IntPtr hullPtr; NativeMethods.imgproc_convexHull_Point_ReturnsPoints(pointsArray, pointsArray.Length, out hullPtr, clockwise ? 1 : 0); using (var hullVec = new VectorOfPoint(hullPtr)) { return hullVec.ToArray(); } }
private Mat DoCalibration(Image <Bgr, byte> medianBlurImageIn) { DebugImages[(int)SelectedImage.InImageB] = medianBlurImageIn[0].Mat; DebugImages[(int)SelectedImage.InImageG] = medianBlurImageIn[1].Mat; DebugImages[(int)SelectedImage.InImageR] = medianBlurImageIn[2].Mat; var InImageSum = medianBlurImageIn[0] + medianBlurImageIn[1] + medianBlurImageIn[2]; DebugImages[(int)SelectedImage.InImageSum] = InImageSum.Mat; Mat threshold = new Mat(); CvInvoke.Threshold(InImageSum, threshold, Parameters.Threshold, 255, ThresholdType.Binary); DebugImages[(int)SelectedImage.threshold] = threshold; Mat CannyImage = new Mat(); CvInvoke.Canny(threshold, CannyImage, Parameters.CannyThreshold1, Parameters.CannyThreshold2, 3, true); DebugImages[(int)SelectedImage.Canny] = CannyImage; var contoursImage = medianBlurImageIn.Clone(); DebugImages[(int)SelectedImage.approxContour] = contoursImage.Mat; using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint()) { CvInvoke.FindContours(CannyImage, contours, null, RetrType.External, ChainApproxMethod.ChainApproxNone); VectorOfPoint maxContour = null; double arcSize = -1; for (int i = 0; i < contours.Size; i++) { var arc = CvInvoke.ArcLength(contours[i], true); if (arc > arcSize) { arcSize = arc; maxContour = contours[i]; } } if (maxContour != null) { using (VectorOfPoint approxContour = new VectorOfPoint()) { CvInvoke.ApproxPolyDP(maxContour, approxContour, Parameters.ContourEpsilon, true); var convexContour = CvInvoke.ConvexHull(approxContour.ToArray().Select((x) => new PointF(x.X, x.Y)).ToArray()); var pointConvexContour = convexContour.Select((x) => new Point((int)x.X, (int)x.Y)).ToArray(); var circle = CvInvoke.MinEnclosingCircle(convexContour); if (convexContour.Length == 6 && validateAsked) { validateAsked = false; Parameters.Center = new Point((int)circle.Center.X, (int)circle.Center.Y); var maxY = convexContour.Max((x) => x.Y); var indexSommetHaut = convexContour.ToList().FindIndex((x) => x.Y >= maxY - 0.1); for (int i = indexSommetHaut; i < convexContour.Length; i++) { Parameters.Points[i] = pointConvexContour[i]; } for (int i = 0; i < indexSommetHaut; i++) { Parameters.Points[i] = pointConvexContour[i]; } grid.Refresh(); Save(); chkAutoCalibration.Checked = false; } contoursImage.DrawPolyline(pointConvexContour, true, new Bgr(Color.Green), 3); contoursImage.Draw(circle, new Bgr(Color.DarkGreen), 3); contoursImage.Draw(new Cross2DF(circle.Center, 10, 10), new Bgr(Color.DarkGreen), 3); } } return(DebugImages[(int)Parameters.SelectedImage]); } }
/// <summary> /// Performs object detection without a multi-scale window. /// </summary> /// <param name="img">Source image. CV_8UC1 and CV_8UC4 types are supported for now.</param> /// <param name="weights"></param> /// <param name="hitThreshold">Threshold for the distance between features and SVM classifying plane. /// Usually it is 0 and should be specfied in the detector coefficients (as the last free coefficient). /// But if the free coefficient is omitted (which is allowed), you can specify it manually here.</param> /// <param name="winStride">Window stride. It must be a multiple of block stride.</param> /// <param name="padding">Mock parameter to keep the CPU interface compatibility. It must be (0,0).</param> /// <param name="searchLocations"></param> /// <returns>Left-top corner points of detected objects boundaries.</returns> public virtual Point[] Detect(Mat img, out double[] weights, double hitThreshold = 0, Size? winStride = null, Size? padding = null, Point[] searchLocations = null) { if (disposed) throw new ObjectDisposedException("HOGDescriptor"); if (img == null) throw new ArgumentNullException("img"); img.ThrowIfDisposed(); Size winStride0 = winStride.GetValueOrDefault(new Size()); Size padding0 = padding.GetValueOrDefault(new Size()); using (var flVec = new VectorOfPoint()) using (var weightsVec = new VectorOfDouble()) { int slLength = (searchLocations != null) ? searchLocations.Length : 0; NativeMethods.objdetect_HOGDescriptor_detect(ptr, img.CvPtr, flVec.CvPtr, weightsVec.CvPtr, hitThreshold, winStride0, padding0, searchLocations, slLength); weights = weightsVec.ToArray(); return flVec.ToArray(); } }
public Image <Bgr, byte> ReturnContours(Image <Bgr, byte> image, int minArea, Label label) { if (prepImage == null) { prepImage = ReturnBinarized(image, 90); } var resultImage = prepImage.Convert <Gray, byte>(); int trisCount = 0; int rectCount = 0; int circleCount = 0; // shapes var contours = new VectorOfVectorOfPoint(); CvInvoke.FindContours( resultImage, contours, null, RetrType.List, ChainApproxMethod.ChainApproxSimple); var contoursImage = sourceImage.Copy(); for (int i = 0; i < contours.Size; i++) { //contoursImage.Draw(points, new Bgr(Color.GreenYellow), 2); // отрисовка точек var approxContour = new VectorOfPoint(); CvInvoke.ApproxPolyDP(contours[i], approxContour, CvInvoke.ArcLength(contours[i], true) * 0.05, true); var points = approxContour.ToArray(); if (approxContour.Size == 3) { var S = CvInvoke.ContourArea(approxContour, false); if (S > minArea) { trisCount++; var pointsTri = approxContour.ToArray(); contoursImage.Draw(new Triangle2DF(pointsTri[0], pointsTri[1], pointsTri[2]), new Bgr(Color.GreenYellow), 2); } } if (isRectangle(points)) { var S = CvInvoke.ContourArea(approxContour, false); if (S > minArea) { rectCount++; contoursImage.Draw(CvInvoke.MinAreaRect(approxContour), new Bgr(Color.Blue), 2); } } } //circles List <CircleF> circles = new List <CircleF>(CvInvoke.HoughCircles(resultImage, HoughModes.Gradient, 1.0, 250, 100, 50, 5, contoursImage.Width / 3)); foreach (CircleF circle in circles) { CvInvoke.Circle(contoursImage, Point.Round(circle.Center), (int)circle.Radius, new Bgr(Color.Red).MCvScalar, 2); circleCount++; //resultImage.Draw(circle, new Bgr(Color.GreenYellow), 2); } label.Text = "Количество треугольников = " + trisCount + "\nКоличество прямоугольников = " + rectCount + "\nКоличество кругов = " + circleCount; return(contoursImage); }
// get all of the valid contour maps, valid means circumfence > 200 px // this was not in their code, I added this feature, but I used their logic public static List<ColorfulContourMap> getAllContourMap(Mat input, int index, int mode = 0) { // use for all members List<ColorfulContourMap> result = new List<ColorfulContourMap>(); MatImage m1 = new MatImage(input); m1.Convert(); Mat gray = m1.Out(); // use for black background if (mode == 0) { MatImage m2 = new MatImage(gray); m2.SmoothGaussian(3); m2.ThresholdBinaryInv(245, 255); gray = m2.Out(); } // use for white background else { MatImage m2 = new MatImage(gray); m2.SmoothGaussian(3); m2.ThresholdBinaryInv(100, 255); gray = m2.Out(); } // one time use List<Point> pointList = new List<Point>(); List<Point> polyPointList = new List<Point>(); List<ColorfulPoint> cps = new List<ColorfulPoint>(); List<ColorfulPoint> pcps = new List<ColorfulPoint>(); // fetch all the contours using Emgu CV // fetch all the polys using Emgu CV // extract the points and colors Mat temp = gray.Clone(); VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint(); CvInvoke.FindContours(gray, contours, new Mat(), RetrType.List, ChainApproxMethod.ChainApproxNone); double area = Math.Abs(CvInvoke.ContourArea(contours[0])); VectorOfPoint maxArea = contours[0]; // maxArea is used as the current contour //contour = contour.HNext; // use this to loop for (int i = 0; i < contours.Size; i++) { double nextArea = Math.Abs(CvInvoke.ContourArea(contours[i], false)); // Find the area of contour area = nextArea; if (area >= Constants.MIN_AREA) { maxArea = contours[i]; VectorOfPoint poly = new VectorOfPoint(); CvInvoke.ApproxPolyDP(maxArea, poly, 1.0, true); pointList = maxArea.ToArray().ToList(); polyPointList = poly.ToArray().ToList(); foreach (Point p in pointList) { ColorfulPoint cp = new ColorfulPoint { X = p.X, Y = p.Y, color = extractPointColor(p, input) }; cps.Add(cp); } foreach (Point p in polyPointList) { ColorfulPoint cp = new ColorfulPoint { X = p.X, Y = p.Y, color = extractPointColor(p, input) }; pcps.Add(cp); } result.Add(new ColorfulContourMap(cps, pcps, index)); // clear temporal lists pointList = new List<Point>(); polyPointList = new List<Point>(); cps = new List<ColorfulPoint>(); pcps = new List<ColorfulPoint>(); } } return result; }
public void PerformShapeDetection() { if (fileNameTextBox.Text != String.Empty) { StringBuilder msgBuilder = new StringBuilder("Performance: "); //Load the image from file and resize it for display Image<Bgr, Byte> img = new Image<Bgr, byte>(fileNameTextBox.Text) .Resize(400, 400, Emgu.CV.CvEnum.Inter.Linear, true); //Convert the image to grayscale and filter out the noise UMat uimage = new UMat(); CvInvoke.CvtColor(img, uimage, ColorConversion.Bgr2Gray); //use image pyr to remove noise UMat pyrDown = new UMat(); CvInvoke.PyrDown(uimage, pyrDown); CvInvoke.PyrUp(pyrDown, uimage); //Image<Gray, Byte> gray = img.Convert<Gray, Byte>().PyrDown().PyrUp(); #region circle detection Stopwatch watch = Stopwatch.StartNew(); double cannyThreshold = 180.0; double circleAccumulatorThreshold = 120; CircleF[] circles = CvInvoke.HoughCircles(uimage, HoughType.Gradient, 2.0, 20.0, cannyThreshold, circleAccumulatorThreshold, 5); watch.Stop(); msgBuilder.Append(String.Format("Hough circles - {0} ms; ", watch.ElapsedMilliseconds)); #endregion #region Canny and edge detection watch.Reset(); watch.Start(); double cannyThresholdLinking = 120.0; UMat cannyEdges = new UMat(); CvInvoke.Canny(uimage, cannyEdges, cannyThreshold, cannyThresholdLinking); LineSegment2D[] lines = CvInvoke.HoughLinesP( cannyEdges, 1, //Distance resolution in pixel-related units Math.PI/45.0, //Angle resolution measured in radians. 20, //threshold 30, //min Line width 10); //gap between lines watch.Stop(); msgBuilder.Append(String.Format("Canny & Hough lines - {0} ms; ", watch.ElapsedMilliseconds)); #endregion #region Find triangles and rectangles watch.Reset(); watch.Start(); List<Triangle2DF> triangleList = new List<Triangle2DF>(); List<RotatedRect> boxList = new List<RotatedRect>(); //a box is a rotated rectangle using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint()) { CvInvoke.FindContours(cannyEdges, contours, null, RetrType.List, ChainApproxMethod.ChainApproxSimple ); int count = contours.Size; for (int i = 0; i < count; i++) { using (VectorOfPoint contour = contours[i]) using (VectorOfPoint approxContour = new VectorOfPoint()) { CvInvoke.ApproxPolyDP(contour, approxContour, CvInvoke.ArcLength(contour, true) * 0.05, true); if (CvInvoke.ContourArea(approxContour, false) > 250) //only consider contours with area greater than 250 { if (approxContour.Size == 3) //The contour has 3 vertices, it is a triangle { Point[] pts = approxContour.ToArray(); triangleList.Add(new Triangle2DF( pts[0], pts[1], pts[2] )); } else if (approxContour.Size == 4) //The contour has 4 vertices. { #region determine if all the angles in the contour are within [80, 100] degree bool isRectangle = true; Point[] pts = approxContour.ToArray(); LineSegment2D[] edges = PointCollection.PolyLine(pts, true); for (int j = 0; j < edges.Length; j++) { double angle = Math.Abs( edges[(j + 1) % edges.Length].GetExteriorAngleDegree(edges[j])); if (angle < 80 || angle > 100) { isRectangle = false; break; } } #endregion if (isRectangle) boxList.Add(CvInvoke.MinAreaRect(approxContour)); } } } } } watch.Stop(); msgBuilder.Append(String.Format("Triangles & Rectangles - {0} ms; ", watch.ElapsedMilliseconds)); #endregion originalImageBox.Image = img; this.Text = msgBuilder.ToString(); #region draw triangles and rectangles Mat triangleRectangleImage = new Mat(img.Size, DepthType.Cv8U, 3); triangleRectangleImage.SetTo(new MCvScalar(0)); foreach (Triangle2DF triangle in triangleList) { CvInvoke.Polylines(triangleRectangleImage, Array.ConvertAll(triangle.GetVertices(), Point.Round), true, new Bgr(Color.DarkBlue).MCvScalar, 2); } foreach (RotatedRect box in boxList) { CvInvoke.Polylines(triangleRectangleImage, Array.ConvertAll(box.GetVertices(), Point.Round), true, new Bgr(Color.DarkOrange).MCvScalar, 2); } triangleRectangleImageBox.Image = triangleRectangleImage; #endregion #region draw circles Mat circleImage = new Mat(img.Size, DepthType.Cv8U, 3); circleImage.SetTo(new MCvScalar(0)); foreach (CircleF circle in circles) CvInvoke.Circle(circleImage, Point.Round(circle.Center), (int) circle.Radius, new Bgr(Color.Brown).MCvScalar, 2); circleImageBox.Image = circleImage; #endregion #region draw lines Mat lineImage = new Mat(img.Size, DepthType.Cv8U, 3); lineImage.SetTo(new MCvScalar(0)); foreach (LineSegment2D line in lines) CvInvoke.Line(lineImage, line.P1, line.P2, new Bgr(Color.Green).MCvScalar, 2); lineImageBox.Image = lineImage; #endregion } }