public void FindBoundPolygon() { // find convexhull PointF[] ps = new PointF[this.slicePoints2d.Count]; for (int i = 0; i < this.slicePoints2d.Count; i++) { PointF p = new PointF((float)this.slicePoints2d[i].x, (float)this.slicePoints2d[i].y); ps[i] = p; } PointF[] hull = CvInvoke.ConvexHull(ps); // find boundary polygon VectorOfPointF hull2 = new VectorOfPointF(); hull2.Push(hull); VectorOfPointF poly = new VectorOfPointF(); // when inferring # of polygon edge, the 3-rd param can be [0.0005,0.0015], than choose the best(how to define "best"??) CvInvoke.ApproxPolyDP(hull2, poly, 0.0003, true); for (int i = 0; i < poly.Size; i++) { this.cornerPoints2d.Add(new MyVector2(poly[i].X, poly[i].Y)); } // unproject to 3d foreach (MyVector2 corner2d in this.cornerPoints2d) { MyVector3 corner3d = frame.GetPointSpaceCoord(new MyVector3(corner2d, 0.0)); this.cornerPoints3d.Add(corner3d); } }
private Mat DrawObjects(CircleF detectedBall, VectorOfPoint detectedHand, Mat mat) { if (detectedHand.Size != 0) { VectorOfPoint hull = new VectorOfPoint(); CvInvoke.ConvexHull(detectedHand, hull, false, true); var cont = new VectorOfVectorOfPoint(hull); CvInvoke.DrawContours(mat, cont, 0, new Bgr(System.Drawing.Color.Green).MCvScalar, -1); } if (detectedBall.Radius > 0) { int averageBallRadius = lastHalfSecondRadiuses.Count > 0 ? lastHalfSecondRadiuses.Max() : (int)detectedBall.Radius; CvInvoke.Circle(mat, System.Drawing.Point.Round(detectedBall.Center), averageBallRadius, new Bgr(System.Drawing.Color.Red).MCvScalar, -1); } else { if (lastFoundBall.Radius > 0 && lastFoundBall.Center.X != 0) { int averageBallRadius = lastHalfSecondRadiuses.Count > 0 ? lastHalfSecondRadiuses.Max() : (int)detectedBall.Radius; CvInvoke.Circle(mat, System.Drawing.Point.Round(lastFoundBall.Center), averageBallRadius, new Bgr(System.Drawing.Color.Red).MCvScalar, -1); } } if (line.P1.X != 0) { CvInvoke.Line(mat, new System.Drawing.Point(line.P1.X, line.P1.Y), new System.Drawing.Point(line.P2.X, line.P2.Y), new Bgr(System.Drawing.Color.Yellow).MCvScalar, 3); } //CvInvoke.CvtColor(mat, mat, ColorConversion.Bgr2Hsv); //CvInvoke.InRange(mat, new ScalarArray(new MCvScalar(low_H, low_S, low_V)), new ScalarArray(new MCvScalar(high_H, high_S, high_V)), mat); return(mat); }
public void _findOutline() { using (var im = _mat.Clone()) { VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint(); CvInvoke.FindContours(im, contours, null, RetrType.List, ChainApproxMethod.ChainApproxSimple); var cts = contours.ToArrayOfArray() .Select(x => CvInvoke.ConvexHull(x.Select(y => new PointF(y.X, y.Y)).ToArray())).ToArray(); var cnt = cts.OrderByDescending(z => CvInvoke.ContourArea(new VectorOfPointF(z))).ToList(); var result = new VectorOfPointF(); foreach (var c in cnt) { var vop = new VectorOfPointF(c); var peri = CvInvoke.ArcLength(vop, true); var approx = new VectorOfPointF(); CvInvoke.ApproxPolyDP(vop, approx, _approxE * peri, true); if (approx.Size == 4) { result = approx; break; } } _outline = result; } }
public static void TestConvexHall() { #region Create some random points Random r = new Random(); PointF[] pts = new PointF[200]; for (int i = 0; i < pts.Length; i++) { pts[i] = new PointF((float)(100 + r.NextDouble() * 400), (float)(100 + r.NextDouble() * 400)); } #endregion Mat img = new Mat(600, 600, DepthType.Cv8U, 3); img.SetTo(new MCvScalar(255.0, 255.0, 255.0)); //Draw the points foreach (PointF p in pts) { CvInvoke.Circle(img, Point.Round(p), 3, new MCvScalar(0.0, 0.0, 0.0)); } //Find and draw the convex hull Stopwatch watch = Stopwatch.StartNew(); PointF[] hull = CvInvoke.ConvexHull(pts, true); watch.Stop(); CvInvoke.Polylines( img, #if NETFX_CORE Extensions.ConvertAll <PointF, Point>(hull, Point.Round), #else Array.ConvertAll <PointF, Point>(hull, Point.Round), #endif true, new MCvScalar(255.0, 0.0, 0.0)); Emgu.CV.UI.ImageViewer.Show(img, String.Format("Convex Hull Computed in {0} milliseconds", watch.ElapsedMilliseconds)); }
private static IEnumerable <PointF> GetConvexHull(IEnumerable <PointF> points) { var pts = points.ToArray(); var result = CvInvoke.ConvexHull(pts, true); return(result); }
public Image <Bgr, byte> GetPalm(Mat mask) { int width = mask.Width; int height = mask.Height; var temp = new Mat(); var result = mask.ToImage <Bgr, byte>(); VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint(); VectorOfPoint biggestContour = new VectorOfPoint(); CvInvoke.FindContours(mask, contours, null, RetrType.List, ChainApproxMethod.ChainApproxSimple); if (contours.Size > 0) { biggestContour = contours[0]; for (int i = 0; i < contours.Size; i++) { if (contours[i].Size > biggestContour.Size) { biggestContour = contours[i]; } } } if (biggestContour.Size != 0) { //Gaunam rankos konturus CvInvoke.ApproxPolyDP(biggestContour, biggestContour, 0.00000001, false); var points = biggestContour.ToArray(); VectorOfInt hull = new VectorOfInt(); //find the palm hand area using convexitydefect CvInvoke.ConvexHull(biggestContour, hull, true); var box = CvInvoke.MinAreaRect(biggestContour); Mat defects = new Mat(); CvInvoke.ConvexityDefects(biggestContour, hull, defects); if (!defects.IsEmpty) { //Data from Mat are not directly readable so we convert it to Matrix<> Matrix <int> m = new Matrix <int>(defects.Rows, defects.Cols, defects.NumberOfChannels); defects.CopyTo(m); for (int i = 0; i < m.Rows; i++) { int startIdx = m.Data[i, 0]; int endIdx = m.Data[i, 1]; Point startPoint = points[startIdx]; Point endPoint = points[endIdx]; //draw a line connecting the convexity defect start point and end point in thin red line CvInvoke.Line(result, startPoint, endPoint, new MCvScalar(0, 0, 255)); } } } return(result); }
private void convexHullToolStripMenuItem_Click(object sender, EventArgs e) { try { if (pictureBox1.Image == null) { return; } var img = new Bitmap(pictureBox1.Image) .ToImage <Bgr, byte>(); img = img.SmoothGaussian(3); var gray = img.Convert <Gray, byte>() .ThresholdBinaryInv(new Gray(225), new Gray(255)); VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint(); Mat hier = new Mat(); CvInvoke.FindContours(gray, contours, hier, Emgu.CV.CvEnum.RetrType.External, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxSimple); int index = -1; double maxarea = -100; for (int i = 0; i < contours.Size; i++) { double area = CvInvoke.ContourArea(contours[i]); if (area > maxarea) { maxarea = area; index = i; } } if (index > -1) { var biggestcontour = contours[index]; //Mat hull = new Mat(); VectorOfPoint hull = new VectorOfPoint(); CvInvoke.ConvexHull(biggestcontour, hull); //CvInvoke.DrawContours(img, hull, -1, new MCvScalar(0, 0, 255), 3); CvInvoke.Polylines(img, hull.ToArray(), true, new MCvScalar(0, 0.0, 255.0), 3); } pictureBox1.Image = img.AsBitmap(); } catch (Exception ex) { MessageBox.Show(ex.Message); } }
/// <summary> /// get all convex hull that satisfy is_contour_a_character condition. /// </summary> /// <param name="plate_after_preprocessing"></param> /// <param name="contours"></param> /// <returns></returns> private static List <VectorOfPoint> get_convex_hull(Image <Gray, byte> plate_after_preprocessing, VectorOfVectorOfPoint contours) { List <VectorOfPoint> hulls = new List <VectorOfPoint>(); for (int i = 0; i < contours.Size; i++) { if (is_contour_a_character(plate_after_preprocessing, contours[i])) { VectorOfPoint hull = new VectorOfPoint(); CvInvoke.ConvexHull(contours[i], hull); hulls.Add(hull); //CvInvoke.FillConvexPoly(hull_img, hull, new MCvScalar(255)); } } return(hulls); }
Image <Bgr, byte> ProcessFrame(Mat colorPicture, MCvScalar skinHsv) {//, Mat binPicture) { Mat picture = colorPicture.Clone(); picture = BackgroundSubtraction(picture, skinHsv); //picture = binPicture; //return new Image<Bgr, byte>(picture.Bitmap); //contour stuff VectorOfVectorOfPoint contoursss = new VectorOfVectorOfPoint(); CvInvoke.FindContours(picture, contoursss, null, RetrType.List, ChainApproxMethod.ChainApproxNone); VectorOfPoint handContour = FindLargestContour(contoursss); if ((handContour == null || CvInvoke.ContourArea(handContour) < 100 || CvInvoke.ContourArea(handContour) > 200000)) { return(new Image <Bgr, byte>(colorPicture.Bitmap)); } VectorOfVectorOfPoint hulls = new VectorOfVectorOfPoint(1); //VectorOfVectorOfPoint hullDefects = new VectorOfVectorOfPoint(1); VectorOfInt hullI = new VectorOfInt(); CvInvoke.ConvexHull(handContour, hullI, false, false); CvInvoke.ConvexHull(handContour, hulls[0], false, true); //convexity defects Mat defects = new Mat(); CvInvoke.ConvexityDefects(handContour, hullI, defects); try { Matrix <int> m = new Matrix <int>(defects.Rows, defects.Cols, defects.NumberOfChannels); // copy Mat to a matrix... defects.CopyTo(m); CvInvoke.DrawContours(colorPicture, hulls, -1, new MCvScalar(0, 0, 255), 1); Image <Bgr, byte> image = new Image <Bgr, byte>(colorPicture.Bitmap); return(DrawPoints(image, m, handContour)); } catch (Exception) { return(new Image <Bgr, byte>(colorPicture.Bitmap)); } //CvInvoke.Imshow("picture", colorPicture); //CvInvoke.WaitKey(); // Render image and keep window opened until any key is pressed }
// 补全轮廓并填充 private Image <Bgr, byte> ContourFilling(Image <Gray, byte> pic) { Image <Bgr, byte> outpic = new Image <Bgr, byte>(pic.Size); pic = pic.Canny(100, 255); Image <Gray, byte> outcon = new Image <Gray, byte>(pic.Size); VectorOfVectorOfPoint con = new VectorOfVectorOfPoint(); CvInvoke.FindContours(pic, con, outcon, RetrType.External, ChainApproxMethod.ChainApproxNone); Point[][] con1 = con.ToArrayOfArray(); PointF[][] con2 = Array.ConvertAll(con1, new Converter <Point[], PointF[]>(PointToPointF)); PointF[] hull = new PointF[con[0].Size]; for (int i = 0; i < con.Size; i++) { hull = CvInvoke.ConvexHull(con2[i], true); for (int j = 0; j < hull.Length; j++) { Point p1 = new Point((int)(hull[j].X + 0.5), (int)(hull[j].Y + 0.5)); Point p2; if (j == hull.Length - 1) { p2 = new Point((int)(hull[0].X + 0.5), (int)(hull[0].Y + 0.5)); } else { p2 = new Point((int)(hull[j + 1].X + 0.5), (int)(hull[j + 1].Y + 0.5)); } CvInvoke.Line(outpic, p1, p2, new MCvScalar(255, 0, 255, 255), 2, 0, 0); } } Image <Gray, byte> gray = new Image <Gray, byte>(pic.Size); gray = outpic.Convert <Gray, byte>(); gray = gray.ThresholdBinary(new Gray(100), new Gray(255)); gray = gray.Canny(100, 255); VectorOfVectorOfPoint con3 = new VectorOfVectorOfPoint(); CvInvoke.FindContours(gray, con3, outcon, RetrType.External, ChainApproxMethod.ChainApproxNone); for (int i = 0; i < con3.Size; i++) { CvInvoke.DrawContours(outpic, con3, i, new MCvScalar(255, 0, 0), -1); } return(outpic); }
/// Calculate convex hull and convexity defects for accurate finger calculation private Matrix <int> CalculateConvexityDefects(Image <Gray, Byte> img, VectorOfPoint biggestContour, VectorOfVectorOfPoint contours) { VectorOfPoint currentContour = new VectorOfPoint(); VectorOfInt hullIndices = new VectorOfInt(); CvInvoke.ApproxPolyDP(biggestContour, currentContour, CvInvoke.ArcLength(biggestContour, true) * .005, true); biggestContour = currentContour; CvInvoke.ConvexHull(biggestContour, hullIndices, false, false); /// Calcualate convexity defects /// Defects is a 4-element integer vector /// (start_index, end_index, farthest_pt_index, fixpt_depth) /// stored in a matrix where each row is a defect Matrix <int> defects = null; Mat mat = new Mat(); CvInvoke.ConvexityDefects(biggestContour, hullIndices, mat); if (mat.Rows > 0) { defects = new Matrix <int>(mat.Rows, mat.Cols, mat.NumberOfChannels); mat.CopyTo(defects); /// For debugging and training purposes /// Draws finger points using convexity defects Matrix <int>[] channels = defects.Split(); /// channel[0] = start_point, channel[1] = end_point, channel[2] = fixpt_depth for (int j = 0; j < defects.Rows; ++j) { if (j < 5) { CvInvoke.Circle(img, System.Drawing.Point.Round(new System.Drawing.PointF(biggestContour[channels[0][j, 0]].X, biggestContour[channels[0][j, 0]].Y)), 10, new MCvScalar(255, 255, 255), 10); } } } /// For debugging and training purposes /// Draws convex hull of biggest contour VectorOfPoint hullPoints = new VectorOfPoint(); CvInvoke.ConvexHull(biggestContour, hullPoints, false); CvInvoke.Polylines(img, hullPoints.ToArray(), true, new MCvScalar(255, 255, 255), 10); return(defects); }
public override void Process(Image <Bgr, byte> image, out Image <Bgr, byte> annotatedImage, out List <object> data) { base.Process(image, out annotatedImage, out data); using (var contours = new VectorOfVectorOfPoint()) { // find the contours CvInvoke.FindContours( image.Convert <Gray, byte>(), contours, null, RetrType.List, ChainApproxMethod.ChainApproxSimple); // optionally show the contours if (_showContours) { annotatedImage.DrawPolyline(contours.ToArrayOfArray(), false, new Bgr(_contourColor.Color()), _lineThick); } // flatten the points of the contours into a single array var points = contours .ToArrayOfArray() .SelectMany(c => c) .Select(p => new PointF(p.X, p.Y)) .ToArray(); // get the convex hull var convexHull = CvInvoke.ConvexHull(points); // draw the convex hull annotatedImage.DrawPolyline( convexHull.Select(Point.Round).ToArray(), true, new Bgr(_annoColor.Color()), _lineThick); // set the data as a single contour for the convex hull data = new List <object> { new Contour(convexHull) }; } }
public static Bitmap Run(Bitmap arg, ConfigSetting cfg) { var original = ((beans.blob_tag)arg.Tag).contour; ((beans.blob_tag)arg.Tag).convexity = CvInvoke.IsContourConvex(original); VectorOfPoint hull = new VectorOfPoint(); CvInvoke.ConvexHull(original, hull, false); VectorOfPoint contur = new VectorOfPoint(); CvInvoke.ApproxPolyDP(hull, contur, 0.001, true); ((beans.blob_tag)arg.Tag).convexity_area = new square(CvInvoke.ContourArea(contur)); ((beans.blob_tag)arg.Tag).convexity_perimetr = new distance(CvInvoke.ArcLength(contur, true)); return(arg); }
private void button2_Click(object sender, EventArgs e) { Image <Gray, byte> image2 = new Image <Gray, byte>(image1.Width, image1.Height); Image <Gray, byte> image3 = new Image <Gray, byte>(image1.Width, image1.Height); Image <Bgr, byte> image4 = new Image <Bgr, byte>(image1.Width, image1.Height); CvInvoke.Canny(image1, image2, 100, 60); VectorOfVectorOfPoint con = new VectorOfVectorOfPoint(); CvInvoke.FindContours(image2, con, image3, RetrType.Ccomp, ChainApproxMethod.ChainApproxSimple); Point[][] con1 = con.ToArrayOfArray(); PointF[][] con2 = Array.ConvertAll <Point[], PointF[]>(con1, new Converter <Point[], PointF[]>(PointToPointF)); for (int i = 0; i < con.Size; i++) { PointF[] hull = CvInvoke.ConvexHull(con2[i], true); for (int j = 0; j < hull.Length; j++) { Point p1 = new Point((int)(hull[j].X + 0.5), (int)(hull[j].Y + 0.5)); Point p2; if (j == hull.Length - 1) { p2 = new Point((int)(hull[0].X + 0.5), (int)(hull[0].Y + 0.5)); } else { p2 = new Point((int)(hull[j + 1].X + 0.5), (int)(hull[j + 1].Y + 0.5)); } CvInvoke.Circle(image4, p1, 3, new MCvScalar(0, 255, 255, 255), 6); CvInvoke.Line(image4, p1, p2, new MCvScalar(255, 255, 0, 255), 3); } } for (int i = 0; i < con.Size; i++) { CvInvoke.DrawContours(image4, con, i, new MCvScalar(255, 0, 255, 255), 2); } imageBox2.Image = image1.ConcateVertical(image4); }
public static float[] shapeFeatures(Image <Bgr, Byte> src) { List <float> vectorOfFeatures = new List <float>(); VectorOfPoint maxContour = getMaxContour(src); if (maxContour != null) { // Shape features Rectangle bounds = CvInvoke.BoundingRectangle(maxContour); CircleF circle = CvInvoke.MinEnclosingCircle(maxContour); RotatedRect ellipse = CvInvoke.FitEllipse(maxContour); float area = Convert.ToSingle(CvInvoke.ContourArea(maxContour, false)); float perimeter = Convert.ToSingle(CvInvoke.ArcLength(maxContour, true)); float shape = Convert.ToSingle((4 * Math.PI * area) / Math.Pow(perimeter, 2)); int npoints = maxContour.Size; float width = bounds.Width; float height = bounds.Height; float centroideY = circle.Center.Y; float centroideX = circle.Center.X; float radius = circle.Radius; float aspectRatio = width / height; float extent = Convert.ToSingle(area / (width * height)); float eccentricity = Convert.ToSingle(ellipse.Size.Height / ellipse.Size.Width); //float density = Convert.ToSingle(CvInvoke.CountNonZero(mask)); //float equivDiameter = Convert.ToSingle(Math.Sqrt(4 * area / Math.PI)); PointF[] hull = CvInvoke.ConvexHull(maxContour.ToArray().Select(pt => new PointF(pt.X, pt.Y)).ToArray()); float solidity = 0; using (VectorOfPoint hullContourn = new VectorOfPoint(hull.Select(pt => new Point((int)pt.X, (int)pt.Y)).ToArray())) solidity = Convert.ToSingle(area / CvInvoke.ContourArea(hullContourn, false)); vectorOfFeatures.AddRange(new float[] { area, shape, perimeter, npoints, width, height, centroideY, centroideX, radius, aspectRatio, extent }); } return(vectorOfFeatures.ToArray()); }
private Mat DetectObject(Mat detectionFrame, Mat displayFrame) { using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint()) { detectGesture = false; VectorOfPoint biggestContour = null; IOutputArray hirarchy = null; // Crear lista de contornos CvInvoke.FindContours(detectionFrame, contours, hirarchy, RetrType.List, ChainApproxMethod.ChainApproxSimple); // Selecciona el contour mas grande if (contours.Size > 0) { double maxArea = 0; int chosen = 0; VectorOfPoint contour = null; for (int i = 0; i < contours.Size; i++) { contour = contours[i]; double area = CvInvoke.ContourArea(contour); if (area > maxArea) { maxArea = area; chosen = i; } } // Dibuja un frame MarkDetectedObject(displayFrame, contours[chosen], maxArea); VectorOfPoint hullPoints = new VectorOfPoint(); VectorOfInt hullInt = new VectorOfInt(); CvInvoke.ConvexHull(contours[chosen], hullPoints, true); CvInvoke.ConvexHull(contours[chosen], hullInt, false); Mat defects = new Mat(); if (hullInt.Size > 3) { detectGesture = true; } CvInvoke.ConvexityDefects(contours[chosen], hullInt, defects); Rectangle box = CvInvoke.BoundingRectangle(hullPoints); CvInvoke.Rectangle(displayFrame, box, drawingColor); Point center = new Point(box.X + box.Width / 2, box.Y + box.Height / 2); VectorOfPoint start_points = new VectorOfPoint(); VectorOfPoint far_points = new VectorOfPoint(); if (!defects.IsEmpty) { //Los datos del Mat no se pueden leer directamente, por lo que los convertimos a Matrix<> Matrix <int> m = new Matrix <int>(defects.Rows, defects.Cols, defects.NumberOfChannels); defects.CopyTo(m); gestualNum = 0; int x = int.MaxValue, y = int.MaxValue; for (int i = 0; i < m.Rows; i++) { int startIdx = m.Data[i, 0]; int endIdx = m.Data[i, 1]; int farIdx = m.Data[i, 2]; Point startPoint = contours[chosen][startIdx]; Point endPoint = contours[chosen][endIdx]; Point farPoint = contours[chosen][farIdx]; CvInvoke.Circle(displayFrame, endPoint, 3, new MCvScalar(0, 255, 255)); CvInvoke.Circle(displayFrame, startPoint, 3, new MCvScalar(255, 255, 0)); //if (true) //{ if (endPoint.Y < y) { x = endPoint.X; y = endPoint.Y; } //} double distance = Math.Round(Math.Sqrt(Math.Pow((center.X - farPoint.X), 2) + Math.Pow((center.Y - farPoint.Y), 2)), 1); if (distance < box.Height * 0.3) { CvInvoke.Circle(displayFrame, farPoint, 10, new MCvScalar(255, 0, 0), 4); gestualNum++; } //dibuja una línea que conecta el punto de inicio del defecto de convexidad y el punto final en una línea roja CvInvoke.Line(displayFrame, startPoint, endPoint, new MCvScalar(0, 255, 255)); } if (gestualNum >= 4) { //Console.WriteLine("numero gestual 4"); gestualNumRepite++; } else { gestualNumRepite = 0; } if (gestualNumRepite == 3) { Console.WriteLine("numero gestual 5 Click"); gestualNumRepite = 0; if (detectClick == true) { detectClick = false; } else { detectClick = true; } } Console.WriteLine("numero gestual " + gestualNum); //var info = new string[] { $"Puntero", $"Posicion: {x}, {y}" }; //WriteMultilineText(displayFrame, info, new Point(x + 30, y)); centerSensor.X = x; centerSensor.Y = y; CvInvoke.Circle(displayFrame, new Point(x, y), 20, new MCvScalar(255, 0, 255), 2); //CvInvoke.Circle(picture, new Point(x * 2, y * 4), 20, new MCvScalar(255, 0, 255), 2); return(displayFrame); } // detectGesture = false; // return displayFrame; } return(displayFrame); } }
private void ExtractContourAndHull(Image <Gray, byte> skin) // kiem vien va lop ngoai { using (MemStorage storage = new MemStorage()) { VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint(); CvInvoke.FindContours(skin, contours, new Mat(), Emgu.CV.CvEnum.RetrType.External, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxSimple); VectorOfPoint biggestContour = new VectorOfPoint(); // mang point[] chua vien` lon nhat Double Result1 = 0; // area dang xet Result = 0; for (int i = 0; i < contours.Size; i++) { VectorOfPoint contour = contours[i]; // chuyen sang Point[][] double area = CvInvoke.ContourArea(contour, false); // tinh area Result1 = area; if (Result1 > Result) { Result = Result1; biggestContour = contour; } } label8.Text = "Size Rect :" + Result.ToString(); if (biggestContour != null) { CvInvoke.ApproxPolyDP(biggestContour, biggestContour, 0.00025, false); points = biggestContour.ToArray(); currentFrame.Draw(points, new Bgr(255, 0, 255), 4); VectorOfPoint hull = new VectorOfPoint(); VectorOfInt convexHull = new VectorOfInt(); CvInvoke.ConvexHull(biggestContour, hull, false); //~ Hull box = CvInvoke.MinAreaRect(hull); currentFrame.Draw(new CircleF(box.Center, 5), new Bgr(Color.Black), 4); CvInvoke.ConvexHull(biggestContour, convexHull); //PointF[] Vertices = box.GetVertices(); // handRect = box.MinAreaRect(); currentFrame.Draw(box, new Bgr(200, 0, 0), 1); // ve khung ban tay khung bao quanh tay currentFrame.DrawPolyline(hull.ToArray(), true, new Bgr(200, 125, 75), 4); currentFrame.Draw(new CircleF(new PointF(box.Center.X, box.Center.Y), 3), new Bgr(200, 125, 75)); // tim convex defect CvInvoke.ConvexityDefects(biggestContour, convexHull, defect); // chuyen sang Matrix if (!defect.IsEmpty) { mDefect = new Matrix <int>(defect.Rows, defect.Cols, defect.NumberOfChannels); defect.CopyTo(mDefect); } } } }
/// <summary> /// Count number of fingers on skinMask and draw debug information /// </summary> /// <param name="skinMask">Skin mask to count fingers on</param> /// <returns>Mat with detection debug information</returns> public Mat FindFingersCount(Mat skinMask) { Mat contoursImage = Mat.Ones(skinMask.Height, skinMask.Width, DepthType.Cv8U, 3); if (skinMask.IsEmpty || skinMask.NumberOfChannels != 1) { return(contoursImage); } VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint(); Mat hierarchy = new Mat(); CvInvoke.FindContours(skinMask, contours, hierarchy, RetrType.External, ChainApproxMethod.ChainApproxNone); if (contours.Size <= 0) { return(contoursImage); } int biggestContourIndex = -1; double biggestArea = 0; for (int i = 0; i < contours.Size; i++) { double area = CvInvoke.ContourArea(contours[i], false); if (area > biggestArea) { biggestArea = area; biggestContourIndex = i; } } if (biggestContourIndex < 0) { return(contoursImage); } VectorOfPoint hullPoints = new VectorOfPoint(); VectorOfInt hullInts = new VectorOfInt(); CvInvoke.ConvexHull(contours[biggestContourIndex], hullPoints, true); CvInvoke.ConvexHull(contours[biggestContourIndex], hullInts, false); Mat defects = new Mat(); if (hullInts.Size > 3) { CvInvoke.ConvexityDefects(contours[biggestContourIndex], hullInts, defects); } else { return(contoursImage); } Rectangle boundingRectangle = CvInvoke.BoundingRectangle(hullPoints); Point centerBoundingRectangle = new Point((boundingRectangle.X + boundingRectangle.Right) / 2, (boundingRectangle.Y + boundingRectangle.Bottom) / 2); VectorOfPoint startPoints = new VectorOfPoint(); VectorOfPoint farPoints = new VectorOfPoint(); int[,,] defectsData = (int[, , ])defects.GetData(); for (int i = 0; i < defectsData.Length / 4; i++) { Point startPoint = contours[biggestContourIndex][defectsData[i, 0, 0]]; if (!startPoints.ToArray().ToList().Any(p => Math.Abs(p.X - startPoint.X) < 30 && Math.Abs(p.Y - startPoint.Y) < 30)) { VectorOfPoint startPointVector = new VectorOfPoint(new Point[] { startPoint }); startPoints.Push(startPointVector); } Point farPoint = contours[biggestContourIndex][defectsData[i, 0, 2]]; if (findPointsDistance(farPoint, centerBoundingRectangle) < boundingRectangle.Height * BOUNDING_RECT_FINGER_SIZE_SCALING) { VectorOfPoint farPointVector = new VectorOfPoint(new Point[] { farPoint }); farPoints.Push(farPointVector); } } VectorOfPoint filteredStartPoints = CompactOnNeighborhoodMedian(startPoints, boundingRectangle.Height * BOUNDING_RECT_NEIGHBOR_DISTANCE_SCALING); VectorOfPoint filteredFarPoints = CompactOnNeighborhoodMedian(farPoints, boundingRectangle.Height * BOUNDING_RECT_NEIGHBOR_DISTANCE_SCALING); VectorOfPoint filteredFingerPoints = new VectorOfPoint(); if (filteredFarPoints.Size > 1) { VectorOfPoint fingerPoints = new VectorOfPoint(); for (int i = 0; i < filteredStartPoints.Size; i++) { VectorOfPoint closestPoints = findClosestOnX(filteredFarPoints, filteredStartPoints[i]); if (isFinger(closestPoints[0], filteredStartPoints[i], closestPoints[1], LIMIT_ANGLE_INF, LIMIT_ANGLE_SUP, centerBoundingRectangle, boundingRectangle.Height * BOUNDING_RECT_FINGER_SIZE_SCALING)) { fingerPoints.Push(new Point[] { filteredStartPoints[i] }); } } if (fingerPoints.Size > 0) { while (fingerPoints.Size > 5) { //Remove extra fingers //Convert to list and remove last item List <Point> points = new List <Point>(fingerPoints.ToArray()); points.Remove(points.Last()); fingerPoints = new VectorOfPoint(points.ToArray()); } for (int i = 0; i < fingerPoints.Size - 1; i++) { } filteredFingerPoints = fingerPoints; this.NumberOfFingersRaised = filteredFingerPoints.Size; } } Bgr colorRed = new Bgr(Color.Red); Bgr colorGreen = new Bgr(Color.Green); Bgr colorBlue = new Bgr(Color.Blue); Bgr colorYellow = new Bgr(Color.Yellow); Bgr colorPurple = new Bgr(Color.Purple); Bgr colorWhite = new Bgr(Color.White); //Debug, draw defects defectsData = (int[, , ])defects.GetData(); for (int i = 0; i < defectsData.Length / 4; i++) { Point start = contours[biggestContourIndex][defectsData[i, 0, 0]]; Point far = contours[biggestContourIndex][defectsData[i, 0, 2]]; Point end = contours[biggestContourIndex][defectsData[i, 0, 1]]; CvInvoke.Polylines(contoursImage, new Point[] { start, far, end }, true, colorPurple.MCvScalar, DRAW_THICKNESS / 2); CvInvoke.Circle(contoursImage, start, 5, colorWhite.MCvScalar); CvInvoke.Circle(contoursImage, far, 5, colorRed.MCvScalar, 10); CvInvoke.Circle(contoursImage, end, 5, colorBlue.MCvScalar); } //Draw information about what was detected (Contours, key points, fingers / how many fingers) CvInvoke.DrawContours(contoursImage, contours, 0, colorGreen.MCvScalar, DRAW_THICKNESS, LineType.AntiAlias); CvInvoke.Polylines(contoursImage, hullPoints, true, colorBlue.MCvScalar, DRAW_THICKNESS); CvInvoke.Rectangle(contoursImage, boundingRectangle, colorRed.MCvScalar, DRAW_THICKNESS); CvInvoke.Circle(contoursImage, centerBoundingRectangle, 5, colorYellow.MCvScalar, DRAW_THICKNESS); drawVectorPoints(contoursImage, filteredStartPoints, colorRed.MCvScalar, true, 3); drawVectorPoints(contoursImage, filteredFarPoints, colorWhite.MCvScalar, true, 3); drawVectorPoints(contoursImage, filteredFingerPoints, colorYellow.MCvScalar, false, 3); CvInvoke.PutText(contoursImage, filteredFingerPoints.Size.ToString(), centerBoundingRectangle, FontFace.HersheyComplex, 2, colorYellow.MCvScalar); return(contoursImage); }
private void ProcessFrame(object sender, EventArgs args) { //Get frame Mat frame = camera.QueryFrame(); //Process frame Image <Bgr, Byte> img = frame.ToImage <Bgr, Byte>(); img.ROI = new Rectangle(100, 100, 300, 300); Image <Hsv, Byte> HSVimg = img.Convert <Hsv, Byte>(); Image <Gray, Byte> binary = HSVimg.InRange(new Hsv(minH, minS, minV), new Hsv(maxH, maxS, maxV)); Image <Gray, Byte> eroded = binary.Erode(erosions); Image <Gray, Byte> dilated = eroded.Dilate(dilations); //Detect largest blob CvBlobDetector blobDetector = new CvBlobDetector(); CvBlobs blobs = new CvBlobs(); blobDetector.Detect(dilated, blobs); int maxBlobArea = 0; CvBlob largestBlob = null; foreach (CvBlob blob in blobs.Values) { if (blob.Area > maxBlobArea) { maxBlobArea = blob.Area; largestBlob = blob; } } if (largestBlob != null && largestBlob.Area >= 10000) { handContour = largestBlob.GetContour(); VectorOfInt convexHullIndices = new VectorOfInt(); VectorOfPoint convexHull = new VectorOfPoint(); CvInvoke.ConvexHull(new VectorOfPoint(handContour), convexHull); CvInvoke.ConvexHull(new VectorOfPoint(handContour), convexHullIndices); Mat defects = new Mat(); //img.Draw(handContour, new Bgr(0, 0, 255),3); img.Draw(convexHull.ToArray(), new Bgr(255, 0, 0), 3); try { CvInvoke.ConvexityDefects(new VectorOfPoint(handContour), convexHullIndices, defects); } catch (CvException exc) { MessageBox.Show(exc.Message); } if (!defects.IsEmpty) { Matrix <int> defectsInt = new Matrix <int>(defects.Rows, defects.Cols, defects.NumberOfChannels); defects.CopyTo(defectsInt); int countFingers = 0; for (int i = 0; i < defectsInt.Rows; i++) { int startIdx = defectsInt.Data[i, 0]; int endIdx = defectsInt.Data[i, 1]; int farthestIdx = defectsInt.Data[i, 2]; float distance = defectsInt.Data[i, 3]; if (distance >= 15000) { //distances.Add(distance); Point startPoint = handContour[startIdx]; Point endPoint = handContour[endIdx]; Point farthestPoint = handContour[farthestIdx]; img.Draw(new CircleF(startPoint, 2.0f), new Bgr(0, 255, 0), 2); img.Draw(new CircleF(endPoint, 2.0f), new Bgr(255, 0, 0), 2); img.Draw(new CircleF(farthestPoint, 2.0f), new Bgr(0, 0, 255), 2); CvInvoke.Line(img, startPoint, farthestPoint, new MCvScalar(255, 255, 0)); countFingers++; } } //Approssimo conteggio dita, e classifico : 1 dito = play, 5 dita = pausa if (Math.Abs(countFingers - 1) < Math.Abs(countFingers - 5) && Math.Abs(countFingers - 1) < Math.Abs(countFingers - 2)) { label10.Text = "Play"; axWindowsMediaPlayer1.Ctlcontrols.play(); } else if (Math.Abs(countFingers - 5) < Math.Abs(countFingers - 1) && Math.Abs(countFingers - 5) < Math.Abs(countFingers - 2)) { label10.Text = "Pause"; axWindowsMediaPlayer1.Ctlcontrols.pause(); } else if (Math.Abs(countFingers - 2) < Math.Abs(countFingers - 1) && Math.Abs(countFingers - 2) < Math.Abs(countFingers - 5)) { label10.Text = "Volume Up"; axWindowsMediaPlayer1.Ctlcontrols.pause(); axWindowsMediaPlayer1.settings.volume++; } } } pictureBox1.Image = binary.Bitmap; }
public Image <Gray, byte> Mask(Image <Bgr, byte> Image) { var lowR = 0.0228 * 255; var highR = 0.8876 * 255; var lowG = 0.0515 * 255; var highG = 0.9167 * 255; var lowB = 0 * 255; var highB = 0.3030 * 255; var lowH = 0.0228 * 180; var highH = 0.8876 * 180; var lowS = 0.0515 * 255; var highS = 0.9167 * 255; var lowV = 0 * 255; var highV = 0.3030 * 255; var lowRn = 0.2088 * 255; var highRn = 0.5097 * 255; var lowGn = 0.3726 * 255; var highGn = 0.6000 * 255; var lowBn = 0 * 255; var highBn = 0.3468 * 255; #region Color_mask var hsvImage = Image.Convert <Hsv, byte>(); var hsvMask = hsvImage.InRange(new Hsv(lowH, lowS, lowV), new Hsv(highH, highS, highV)); var cromaImage = ImageHelper.Bgr2Croma(Image); var cromaMask = cromaImage.InRange(new Bgr(lowBn, lowGn, lowRn), new Bgr(highBn, highGn, highRn)); var rgbMask = Image.InRange(new Bgr(lowB, lowG, lowR), new Bgr(highB, highG, highR)); #endregion Color_mask var combinedMasks = rgbMask.CopyBlank(); CvInvoke.Multiply(rgbMask, hsvMask, combinedMasks); CvInvoke.Multiply(cromaMask, combinedMasks, combinedMasks); CvInvoke.Dilate(combinedMasks, combinedMasks, CvInvoke.GetStructuringElement(ElementShape.Ellipse, new Size(7, 7), new Point(-1, -1)), new Point(-1, -1), 1, BorderType.Default, new MCvScalar(255)); CvInvoke.Erode(combinedMasks, combinedMasks, CvInvoke.GetStructuringElement(ElementShape.Ellipse, new Size(7, 7), new Point(-1, -1)), new Point(-1, -1), 1, BorderType.Default, new MCvScalar(255)); using (Mat hierarchy = new Mat()) using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint()) { CvInvoke.FindContours(combinedMasks, contours, hierarchy, RetrType.Ccomp, ChainApproxMethod.ChainApproxSimple); for (int i = 0; i < contours.Size; i++) { var contour = contours[i]; var vf = new PointF[contour.Size]; for (int ii = 0; ii < contour.Size; ii++) { vf[ii] = new PointF(contour[ii].X, contour[ii].Y); } VectorOfPointF vvf = new VectorOfPointF(vf); var c = new VectorOfPointF(); CvInvoke.ConvexHull(vvf, c, false, true); var cf = c.ToArray(); var vp = new Point[c.Size]; for (int ii = 0; ii < c.Size; ii++) { vp[ii] = new Point((int)cf[ii].X, (int)cf[ii].Y); } var c2 = new VectorOfPoint(vp); CvInvoke.DrawContours(combinedMasks, contours, i, new MCvScalar(255), -1); //if (c.Size > 1) // CvInvoke.FillConvexPoly(combinedMasks, c2, new MCvScalar(255), LineType.FourConnected); } } //combinedMasks._ThresholdBinary(new Gray(0), new Gray(255)); hsvImage.Dispose(); hsvMask.Dispose(); cromaImage.Dispose(); cromaMask.Dispose(); rgbMask.Dispose(); return(combinedMasks); }
private Mat DoCalibration(Image <Bgr, byte> medianBlurImageIn, GlobalState state) { DebugImages[(int)SelectedImage.InImageB] = medianBlurImageIn[0].Mat; DebugImages[(int)SelectedImage.InImageG] = medianBlurImageIn[1].Mat; DebugImages[(int)SelectedImage.InImageR] = medianBlurImageIn[2].Mat; var InImageSum = medianBlurImageIn[0] + medianBlurImageIn[1] + medianBlurImageIn[2]; DebugImages[(int)SelectedImage.InImageSum] = InImageSum.Mat; Mat threshold = new Mat(); CvInvoke.Threshold(InImageSum, threshold, state.VideoParameters.Threshold, 255, ThresholdType.Binary); DebugImages[(int)SelectedImage.threshold] = threshold; Mat CannyImage = new Mat(); CvInvoke.Canny(threshold, CannyImage, state.VideoParameters.CannyThreshold1, state.VideoParameters.CannyThreshold2, 3, true); DebugImages[(int)SelectedImage.Canny] = CannyImage; var contoursImage = medianBlurImageIn.Clone(); DebugImages[(int)SelectedImage.approxContour] = contoursImage.Mat; using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint()) { CvInvoke.FindContours(CannyImage, contours, null, RetrType.External, ChainApproxMethod.ChainApproxNone); VectorOfPoint maxContour = null; double arcSize = -1; for (int i = 0; i < contours.Size; i++) { var arc = CvInvoke.ArcLength(contours[i], true); if (arc > arcSize) { arcSize = arc; maxContour = contours[i]; } } if (maxContour != null) { using (VectorOfPoint approxContour = new VectorOfPoint()) { CvInvoke.ApproxPolyDP(maxContour, approxContour, state.VideoParameters.ContourEpsilon, true); var convexContour = CvInvoke.ConvexHull(approxContour.ToArray().Select((x) => new PointF(x.X, x.Y)).ToArray()); var pointConvexContour = convexContour.Select((x) => new Point((int)x.X, (int)x.Y)).ToArray(); var circle = CvInvoke.MinEnclosingCircle(convexContour); if (convexContour.Length == 6 && validateAsked) { validateAsked = false; if (MessageBox.Show("Un hexagonne a été trouvé, mettre à jour ?", "", MessageBoxButtons.YesNo) == DialogResult.Yes) { using (var globState = GlobalState.GetState()) { globState.VideoParameters.Center = new Point((int)circle.Center.X, (int)circle.Center.Y); var maxY = convexContour.Max((x) => x.Y); var indexSommetHaut = convexContour.ToList().FindIndex((x) => x.Y >= maxY - 0.1); for (int i = indexSommetHaut; i < convexContour.Length; i++) { globState.VideoParameters.Points[i] = pointConvexContour[i]; } for (int i = 0; i < indexSommetHaut; i++) { globState.VideoParameters.Points[i] = pointConvexContour[i]; } grid.Refresh(); globState.SaveConfiguration(); this.btnValidateCalib.BackgroundImage = global::Ballcuber.Properties.Resources.OK; } } } contoursImage.DrawPolyline(pointConvexContour, true, new Bgr(Color.Green), 3); contoursImage.Draw(circle, new Bgr(Color.DarkGreen), 3); contoursImage.Draw(new Cross2DF(circle.Center, 10, 10), new Bgr(Color.DarkGreen), 3); } } return(DebugImages[(int)state.VideoParameters.DebugImage]); } }
private bool DetectFirstRule(CircleF ball, VectorOfPoint detectedHand, Mat image) { Mat circle = new Mat(image.Rows, image.Cols, image.Depth, image.NumberOfChannels); Mat hand = new Mat(image.Rows, image.Cols, image.Depth, image.NumberOfChannels); circle.SetTo(new MCvScalar(0)); hand.SetTo(new MCvScalar(0)); int averageBallRadius = lastHalfSecondRadiuses.Count > 0 ? lastHalfSecondRadiuses.Max() : (int)ball.Radius; if (ball.Radius > 0) { CvInvoke.Circle(circle, System.Drawing.Point.Round(ball.Center), averageBallRadius, new Bgr(System.Drawing.Color.White).MCvScalar, -1); } if (detectedHand.Size != 0) { var cont = new VectorOfVectorOfPoint(detectedHand); VectorOfPoint hull = new VectorOfPoint(); CvInvoke.ConvexHull(detectedHand, hull, false, true); cont = new VectorOfVectorOfPoint(hull); CvInvoke.DrawContours(hand, cont, 0, new Bgr(System.Drawing.Color.White).MCvScalar, -1); } Mat res = new Mat(image.Rows, image.Cols, image.Depth, image.NumberOfChannels); CvInvoke.BitwiseAnd(circle, hand, res); CvInvoke.CvtColor(res, res, ColorConversion.Hsv2Bgr); CvInvoke.CvtColor(res, res, ColorConversion.Bgr2Gray); bool ballInHand = CvInvoke.CountNonZero(res) > 0; /*double isInSideSouth = -1.0; * double isInSideNorth = -1.0; * double isInSideEast = -1.0; * double isInSideWest = -1.0;*/ /*if (detectedHand.Size != 0 && ball.Radius > 0) * { * isInSideSouth = CvInvoke.PointPolygonTest(detectedHand, new PointF(ball.Center.X, ball.Center.Y + ball.Radius), false); * isInSideNorth = CvInvoke.PointPolygonTest(detectedHand, new PointF(ball.Center.X, ball.Center.Y - ball.Radius), false); * isInSideEast = CvInvoke.PointPolygonTest(detectedHand, new PointF(ball.Center.X + ball.Radius, ball.Center.Y), false); * isInSideWest = CvInvoke.PointPolygonTest(detectedHand, new PointF(ball.Center.X - ball.Radius, ball.Center.Y), false); * } * if (isInSideSouth == 1 || isInSideSouth == 0 || isInSideNorth == 1 || isInSideNorth == 0 || isInSideEast == 1 || isInSideEast == 0 || isInSideWest == 1 || isInSideWest == 0) * { * ballInHand = true; * } * else * { * ballInHand = false; * }*/ if (ballInHand == true) { lastFrameBallInHand = ball; } if (ballInHand == false && ballIsInHand == true) { rule = InitiateRule.SecondRule; } ballIsInHand = ballInHand; return(ballInHand); }
private Rectangle LocateROI() { int prediction = -1; VectorOfPoint contourOfInterest = new VectorOfPoint(); int index = 0; index = ImgProc.LargestContourIndex(_contour); contourOfInterest = _contour[index]; MCvMoments moment = CvInvoke.Moments(contourOfInterest); double[] huMoment = moment.GetHuMoment(); prediction = _svm.Compute(huMoment); //foreach (VectorOfPoint vp in _listOfContours.GetRange(0, 5)) //{ // MCvMoments moment = CvInvoke.Moments(vp); // double[] huMoment = moment.GetHuMoment(); // prediction = _svm.Compute(huMoment); // if (prediction == CLASSIFICATION_ARM || prediction == CLASSIFICATION_HAND) // { // contourOfInterest = vp; // break; // } //} if (prediction == CLASSIFICATION_REJECT) { return(Rectangle.Empty); } else if (prediction == CLASSIFICATION_HAND) { //Rectangle rectRotRect = rectRot.MinAreaRect(); //Rectangle init = CvInvoke.MinAreaRect(contoursEval1[largestContourIndexEval1]).MinAreaRect(); //Point final = new Point(rectRotRect.X + init.X, rectRotRect.Y + init.Y); //return new Rectangle(final, init.Size); return(CvInvoke.MinAreaRect(contourOfInterest).MinAreaRect()); } else if (prediction == CLASSIFICATION_ARM) { Mat convexityDefect = new Mat(); VectorOfInt hull = new VectorOfInt(); CvInvoke.ConvexHull(contourOfInterest, hull, false, false); CvInvoke.ConvexityDefects(contourOfInterest, hull, convexityDefect); RotatedRect rectRot = CvInvoke.MinAreaRect(contourOfInterest); ModifiedRotatedRect rotRectMod = new ModifiedRotatedRect(rectRot); int yDel = 0; double ptLftToRight = Geometry.Distance(rotRectMod.Pul, rotRectMod.Pur); double ptUpToDown = Geometry.Distance(rotRectMod.Pul, rotRectMod.Pll); if (!convexityDefect.IsEmpty) { Matrix <int> convex = new Matrix <int>(convexityDefect.Rows, convexityDefect.Cols, convexityDefect.NumberOfChannels); convexityDefect.CopyTo(convex); List <Point> contourTmp = new List <Point>(); for (int i = 0; i < contourOfInterest.Size; i++) { contourTmp.Add(contourOfInterest[i]); } List <ConvexDefects> convexDefectList = new List <ConvexDefects>(); for (int i = 0; i < convex.Rows; i++) { // do not touch int startIdx = convex.Data[i, 0]; int endIdx = convex.Data[i, 1]; int pointIdx = convex.Data[i, 2]; Point startPt = contourOfInterest[startIdx]; Point endPt = contourOfInterest[endIdx]; Point defectPt = contourOfInterest[pointIdx]; // do not touch convexDefectList.Add(new ConvexDefects(startPt, endPt, defectPt)); } if (ptLftToRight <= ptUpToDown) { Point pc1Tmp = convexDefectList[0].DefectPt; Point pc2Tmp = convexDefectList[1].DefectPt; Point pc = pc1Tmp.Y > pc2Tmp.Y ? pc1Tmp : pc2Tmp; Point ptUpLeft = rotRectMod.Pul; Point ptUpRight = rotRectMod.Pur; Point ptLowLeft = rotRectMod.Pll; Point ptLowRight = rotRectMod.Plr; ModifiedRotatedRect rotRectEval1 = ModifiedRotatedRect.Cut(ptUpLeft, ptUpRight, ptLowLeft, ptLowRight, pc); ModifiedRotatedRect rotRectEval2 = ModifiedRotatedRect.Cut(ptUpLeft, ptUpRight, ptLowLeft, ptLowRight, pc, true); Size sizeFrame = ImageInput.Size; Rectangle rectROIEval1 = rotRectEval1.ToRect(sizeFrame); Rectangle rectROIEval2 = rotRectEval2.ToRect(sizeFrame); Mat cloneMat1 = ImageInput.Clone().Mat; Mat matToBeEval1 = new Mat(cloneMat1, rectROIEval1); VectorOfVectorOfPoint contoursEval1 = new VectorOfVectorOfPoint(); Mat matHierachyEval1 = new Mat(); CvInvoke.FindContours(matToBeEval1, contoursEval1, matHierachyEval1, Emgu.CV.CvEnum.RetrType.External, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxTc89L1); int largestContourIndexEval1 = ImgProc.LargestContourIndex(contoursEval1); MCvMoments momentEval1 = CvInvoke.Moments(contoursEval1[largestContourIndexEval1]); double[] huMomentsEval1 = momentEval1.GetHuMoment(); double[] featureVectorSearch = ScaleValues(huMomentsEval1, 5000.0); int predictionEval1 = _svm.Compute(featureVectorSearch, MulticlassComputeMethod.Elimination); //double[] featureVectorHand = ScaleValues(huMomentsEval1. // .GetRange(0, _svmMachineHand.Inputs).ToArray(), 1000.0); if (predictionEval1 == CLASSIFICATION_HAND) { Rectangle rectRotRect = rectRot.MinAreaRect(); Rectangle init = CvInvoke.MinAreaRect(contoursEval1[largestContourIndexEval1]).MinAreaRect(); Point final = new Point(rectRotRect.X + init.X, rectRotRect.Y + init.Y); return(new Rectangle(final, init.Size)); } else { return(Rectangle.Empty); } } else { return(Rectangle.Empty); } } else { return(Rectangle.Empty); } } else { return(Rectangle.Empty); } }
void timer_Tick(object sender, EventArgs e) { sw.Start(); currentFrame = capture.QueryFrame().ToImage<Hsv, byte>(); currentFrame = currentFrame.AbsDiff(backgroundFrame); smoothedFrame = currentFrame.PyrDown().PyrUp(); smoothedFrame._SmoothGaussian(3); filteredFrame = smoothedFrame.InRange(new Hsv(hueLower, saturationLower, valueLower), new Hsv(hueHigher, saturationHigher, valueHigher)); outFrame = filteredFrame;//.Canny((cannyThresh), (cannyThreshLink)); CvInvoke.FindContours(outFrame, contours, hierarchy, Emgu.CV.CvEnum.RetrType.List, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxSimple); for (int i = 0; i < contours.Size; i++) { double area = CvInvoke.ContourArea(contours[i], false); if (area > maxArea) { maxArea = area; idx = i; } } image.SetValue(new MCvScalar(180, 0, 255)); CvInvoke.DrawContours(image, contours, idx, new MCvScalar(255, 255, 255), 3); ContourArea.Text = maxArea.ToString(); if (maxArea != 0) { CvInvoke.ConvexHull(contours[idx], cvh, false); CvInvoke.ConvexityDefects(contours[idx], cvh, cvd); moments = CvInvoke.Moments(contours[idx], true); centroid = new System.Drawing.Point((int) moments.GravityCenter.X, (int) moments.GravityCenter.Y); CvInvoke.Circle(image, centroid, 5, new MCvScalar(100,50,25), 1); if (contours.Size != 0) { polyline = contours[idx].ToArray(); if (!cvd.IsEmpty && contours[idx].Size > 10) { Matrix<int> m = new Matrix<int>(cvd.Rows, cvd.Cols, cvd.NumberOfChannels); cvd.CopyTo(m); for (int i = 0; i < m.Rows; i++) { int startIdx = m.Data[i, 0]; int endIdx = m.Data[i, 1]; int fpIdx = m.Data[i, 2]; int depth = m.Data[i, 3]; startPoint = polyline[startIdx]; endPoint = polyline[endIdx]; midPoint = new System.Drawing.Point( (startPoint.X + endPoint.X) / 2, (startPoint.Y + endPoint.Y) / 2); farthestPoint = polyline[fpIdx]; CvInvoke.Line(image, midPoint, farthestPoint, new MCvScalar(180, 255, 0)); CvInvoke.Line(image, startPoint, endPoint, new MCvScalar(180, 255, 255)); } } if(trained.Size!=0) { double match=1000000; int d = 0; for (int i = 0; i < trained.Size; i++) { double curr = CvInvoke.MatchShapes(contours[idx], trained[i], ContoursMatchType.I3); if(curr < match) { d = i; match = curr; } } if(match<0.25) { ContourArea.Text = words[d]; image.Draw(words[d], centroid, FontFace.HersheyTriplex, 1, new Hsv(90,100, 100)); } } } } if (currentFrame != null) { sw.Stop(); imgPros.Source = ToBitmapSource(outFrame); imgOrig.Source = ToBitmapSource(currentFrame); imgSmooth.Source = ToBitmapSource(image); sw.Reset(); } maxArea = 0; idx = 0; }
private void ExtractContourAndHull(Image <Bgr, byte> originalImage, Image <Gray, byte> skin) { var contours = new VectorOfVectorOfPoint(); CvInvoke.FindContours(skin, contours, new Mat(), RetrType.List, ChainApproxMethod.ChainApproxSimple); var result2 = 0; VectorOfPoint biggestContour = null; if (contours.Size != 0) { biggestContour = contours[0]; } for (var i = 0; i < contours.Size; i++) { var result1 = contours[i].Size; if (result1 <= result2) { continue; } result2 = result1; biggestContour = contours[i]; } if (biggestContour == null) { return; } currentContour = new VectorOfPoint(); CvInvoke.ApproxPolyDP(biggestContour, currentContour, 0, true); //TODO Get to know why it gives exception //ImageFrame.Draw(biggestContour, 3, new Bgr(Color.LimeGreen)); biggestContour = currentContour; var pointsToFs = new PointF[currentContour.Size]; for (var i = 0; i < currentContour.Size; i++) { pointsToFs[i] = new PointF(currentContour[i].X, currentContour[i].Y); } var hull = CvInvoke.ConvexHull(pointsToFs, true); pointsToFs = new PointF[biggestContour.Size]; for (var i = 0; i < biggestContour.Size; i++) { pointsToFs[i] = new PointF(biggestContour[i].X, biggestContour[i].Y); } box = CvInvoke.MinAreaRect(pointsToFs); var points = box.GetVertices(); var ps = new Point[points.Length]; for (var i = 0; i < points.Length; i++) { ps[i] = new Point((int)points[i].X, (int)points[i].Y); } var hullToPoints = new Point[hull.Length]; for (var i = 0; i < hull.Length; i++) { hullToPoints[i] = Point.Round(hull[i]); } originalImage.DrawPolyline(hullToPoints, true, new Bgr(200, 125, 75), 2); originalImage.Draw(new CircleF(new PointF(box.Center.X, box.Center.Y), 3), new Bgr(200, 125, 75), 2); var convexHull = new VectorOfInt(); CvInvoke.ConvexHull(currentContour, convexHull, false, false); defects = new Mat(); CvInvoke.ConvexityDefects(currentContour, convexHull, defects); if (!defects.IsEmpty) { Matrix <int> m = new Matrix <int>(defects.Rows, defects.Cols, defects.NumberOfChannels); // copy Mat to a matrix... defects.CopyTo(m); Matrix <int>[] channels = m.Split(); if (channels.Length >= 2) { startIndex = channels.ElementAt(0).Data; endIndex = channels.ElementAt(1).Data; depthIndex = channels.ElementAt(2).Data; } } }
// Show metrics method private void ShowMetrics() { // Variables declaration Image <Gray, byte> gray = imageView.Mat.ToImage <Gray, byte>(); Mat thresh = new Mat(); Mat cannyOutput = new Mat(); VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint(); Mat hierarchy = new Mat(); /// Central points int cX = 0, cY = 0; /// Image dimensions int totalPixels = 0, width = 0, height = 0; double area = 0, perimeter = 0; CvInvoke.Threshold(gray, thresh, 127, 255, ThresholdType.Binary); /// Getting moments Moments m = CvInvoke.Moments(thresh, true); CvInvoke.Blur(gray, gray, new Size(3, 3), new Point(-1, -1)); gray = gray.Canny(50, 100); ///// Calling findContours from canny threshold CvInvoke.FindContours(gray, contours, hierarchy, RetrType.Tree, ChainApproxMethod.ChainApproxSimple); /// Getting contour from contours VectorOfPoint cnt = contours[0]; /// Contour area and perimeter area = CvInvoke.ContourArea(cnt); perimeter = CvInvoke.ArcLength(cnt, true); /// Calculation of aspect ratio Rectangle rect = CvInvoke.BoundingRectangle(cnt); double aspectRatio = (double)(rect.Width) / rect.Height; /// Calculation of extent int rect_area = rect.Width * rect.Height; double extent = area / rect_area; /// Calculation of solidity VectorOfPoint hull = new VectorOfPoint(); CvInvoke.ConvexHull(cnt, hull); double hull_area = CvInvoke.ContourArea(hull); double solidity = area / hull_area; /// Calculation of equivalent diameter double eq_diameter = Math.Sqrt(4 * area / Math.PI); // Calculating dimensions for (int i = 0; i < gray.Rows; ++i) { width = 0; for (int j = 0; j < gray.Cols; ++j) { width++; totalPixels++; } height++; } // Calculating central point if (Convert.ToInt32(m.M00) != 0) { cX = Convert.ToInt32(m.M10 / m.M00); cY = Convert.ToInt32(m.M01 / m.M00); } StringBuilder stringBuilder = new StringBuilder(); stringBuilder. Append("Image Dimensions").Append(Environment.NewLine) .Append("Width: ").Append(width).Append(Environment.NewLine) .Append("Height: ").Append(height).Append(Environment.NewLine) .Append("Total pixels: ").Append(totalPixels).Append(Environment.NewLine) .Append("Megapixels: ").Append((double)(totalPixels) / 1000000).Append(" MPs").Append(Environment.NewLine) .Append("Channels: ").Append(gray.NumberOfChannels).Append(Environment.NewLine) .Append(Environment.NewLine) .Append("Moments").Append(Environment.NewLine) .Append("Moment m(0,0): ").Append(m.M00).Append(Environment.NewLine) .Append("Moment m(0,1): ").Append(m.M01).Append(Environment.NewLine) .Append("Moment m(1,0): ").Append(m.M10).Append(Environment.NewLine) .Append("Moment m(1,1): ").Append(m.M11).Append(Environment.NewLine) .Append(Environment.NewLine) .Append("Central Moments").Append(Environment.NewLine) .Append("Central Moment mu(20): ").Append(m.Mu20).Append(Environment.NewLine) .Append("Central Moment mu(11): ").Append(m.Mu11).Append(Environment.NewLine) .Append("Central Moment mu(02): ").Append(m.Mu02).Append(Environment.NewLine) .Append("Central Moment mu(30): ").Append(m.Mu30).Append(Environment.NewLine) .Append(Environment.NewLine) .Append("Center of mass").Append(Environment.NewLine) .Append("Central Point: ").Append("(").Append(cX).Append(", ").Append(cY).Append(")").Append(Environment.NewLine) .Append(Environment.NewLine) .Append("Object Properties").Append(Environment.NewLine) .Append("Area: ").Append(Convert.ToInt32(area)).Append(Environment.NewLine) .Append("Perimeter: ").Append(Convert.ToInt32(perimeter)).Append(Environment.NewLine) .Append(Environment.NewLine) .Append("Contour Properties").Append(Environment.NewLine) .Append("Aspect Ratio: 1 : ").Append(aspectRatio).Append(Environment.NewLine) .Append("Extent: ").Append(extent).Append(Environment.NewLine) .Append("Solidity: ").Append(solidity).Append(Environment.NewLine) .Append("Equivalent Diameter: ").Append(eq_diameter).Append(Environment.NewLine); textBox1.Text = stringBuilder.ToString(); }
public static VectorOfVectorOfPoint DetectEdges(UIImage myImage, double th1, double th2, int aperture, bool value) { //Load the image from file and resize it for display Image <Bgr, Byte> img = new Image <Bgr, byte>(myImage.CGImage); //.Resize(400, 400, Emgu.CV.CvEnum.Inter.Linear, true); //Convert the image to grayscale and filter out the noise UMat uimage = new UMat(); CvInvoke.CvtColor(img, uimage, ColorConversion.Bgr2Gray); //use image pyr to remove noise UMat pyrDown = new UMat(); CvInvoke.PyrDown(uimage, pyrDown); CvInvoke.PyrUp(pyrDown, uimage); //Image<Gray, Byte> gray = img.Convert<Gray, Byte>().PyrDown().PyrUp(); #region circle detection double cannyThreshold = th1; //double circleAccumulatorThreshold = 120; //CircleF[] circles = CvInvoke.HoughCircles(uimage, HoughType.Gradient, 2.0, 20.0, cannyThreshold, circleAccumulatorThreshold, 5); #endregion #region Canny and edge detection double cannyThresholdLinking = th2; UMat cannyEdges = new UMat(); CvInvoke.Canny(uimage, cannyEdges, cannyThreshold, cannyThresholdLinking, aperture, true); VectorOfVectorOfPoint contourEdges = new VectorOfVectorOfPoint(); UMat hierarchy = new UMat(); CvInvoke.FindContours(cannyEdges, contourEdges, hierarchy, 0, ChainApproxMethod.ChainApproxNone); VectorOfVectorOfPoint newContourEdges = new VectorOfVectorOfPoint(); for (int i = 0; i < contourEdges.Size; i++) { if (contourEdges [i].Size > 3000) { newContourEdges.Push(contourEdges [i]); } } contourEdges.Dispose(); VectorOfPoint test1 = new VectorOfPoint(); VectorOfVectorOfPoint temp = new VectorOfVectorOfPoint(); temp.Push(newContourEdges [0]); for (int i = 0; i < newContourEdges.Size; i++) { Point[] testing = newContourEdges [i].ToArray(); temp[0].Push(newContourEdges [i].ToArray()); } VectorOfVectorOfPoint hull = new VectorOfVectorOfPoint(1); CvInvoke.ConvexHull(temp[0], hull[0], true); /*LineSegment2D[] lines = CvInvoke.HoughLinesP( * cannyEdges, * 1, //Distance resolution in pixel-related units * Math.PI/45.0, //Angle resolution measured in radians. * 20, //threshold * 30, //min Line width * 5); //gap between lines * * //VectorOfPoint test1 = new VectorOfPoint(); * //VectorOfVectorOfPoint temp = new VectorOfVectorOfPoint(); * //temp.Push(contourEdges[0]); * for (int i = 0; i < contourEdges.Size; i++) { * //temp[0].Push(contourEdges[i].ToArray()); * * CvInvoke.DrawContours(img, contourEdges, i, new MCvScalar(255,255,0), 4); * }*/ //VectorOfVectorOfPoint hull = new VectorOfVectorOfPoint(1); //CvInvoke.ConvexHull(temp[0], hull[0], true); //VectorOfVectorOfPoint result = new VectorOfVectorOfPoint(); #endregion #region Find triangles and rectangles //List<Triangle2DF> triangleList = new List<Triangle2DF>(); //List<RotatedRect> boxList = new List<RotatedRect>(); //a box is a rotated rectangle /*using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint()) * { * CvInvoke.FindContours(cannyEdges, contours, null, RetrType.List, ChainApproxMethod.ChainApproxSimple ); * int count = contours.Size; * for (int i = 0; i < count; i++) * { * using (VectorOfPoint contour = contours[i]) * using (VectorOfPoint approxContour = new VectorOfPoint()) * { * CvInvoke.ApproxPolyDP(contour, approxContour, CvInvoke.ArcLength(contour, true) * 0.05, true); * if (CvInvoke.ContourArea(approxContour, false) > 250) //only consider contours with area greater than 250 * { * if (approxContour.Size == 3) //The contour has 3 vertices, it is a triangle * { * Point[] pts = approxContour.ToArray(); * triangleList.Add(new Triangle2DF( * pts[0], * pts[1], * pts[2] * )); * } else if (approxContour.Size == 4) //The contour has 4 vertices. * { #region determine if all the angles in the contour are within [80, 100] degree * bool isRectangle = true; * Point[] pts = approxContour.ToArray(); * LineSegment2D[] edges = PointCollection.PolyLine(pts, true); * * for (int j = 0; j < edges.Length; j++) * { * double angle = Math.Abs( * edges[(j + 1) % edges.Length].GetExteriorAngleDegree(edges[j])); * if (angle < 80 || angle > 100) * { * isRectangle = false; * break; * } * } #endregion * * if (isRectangle) boxList.Add(CvInvoke.MinAreaRect(approxContour)); * } * } * } * } * }*/ #endregion //imageView.Image = img; #region draw triangles and rectangles //Image<Bgr, Byte> triangleRectangleImage = img; //foreach (Triangle2DF triangle in triangleList) // triangleRectangleImage.Draw(triangle, new Bgr(Color.DarkBlue), 2); //foreach (RotatedRect box in boxList) // triangleRectangleImage.Draw(box, new Bgr(Color.DarkOrange), 2); //imageView.Image = triangleRectangleImage; #endregion #region draw circles //Image<Bgr, Byte> circleImage = img.CopyBlank(); //foreach (CircleF circle in circles) // triangleRectangleImage.Draw(circle, new Bgr(Color.Brown), 2); //imageView.Image = circleImage; #endregion #region draw lines //Image<Bgr, Byte> lineImage = img; //foreach (LineSegment2D line in lines) // img.Draw(line, new Bgr(Color.Yellow), 2); //imageView.Image = lineImage; #endregion return(value ? hull : newContourEdges); //lineImage.ToUIImage(); }
private void ProcessFrame(object sender, EventArgs arg) { Mat frame = new Mat(); capture.Retrieve(frame, 0); //preprocessing Image <Bgr, byte> finalImg = frame.ToImage <Bgr, byte>().Flip(FlipType.Horizontal); Image <Gray, byte> processingImg = finalImg.Convert <Gray, byte>(); BiTonalLevel.Dispatcher.BeginInvoke(new Action(() => { if (BiTonalLevel.Value > 0) { processingImg = processingImg.ThresholdBinary(new Gray(BiTonalLevel.Value), new Gray(255)); } })); BlurLevel.Dispatcher.BeginInvoke(new Action(() => { if (BlurLevel.Value > 1) { CvInvoke.Blur(processingImg, processingImg, new System.Drawing.Size((int)BlurLevel.Value, (int)BlurLevel.Value), new System.Drawing.Point(-1, -1)); } })); //morphological processing processingImg.MorphologyEx(firstMorphOp, kernel, new System.Drawing.Point(-1, -1), firstMorphSteps, BorderType.Default, new MCvScalar()); if (doubleMorph) { processingImg.MorphologyEx(secondMorphOp, kernel2, new System.Drawing.Point(-1, -1), secondMorphSteps, BorderType.Default, new MCvScalar()); } ProcessingVideoBox.Dispatcher.BeginInvoke(new Action(() => ProcessingVideoBox.Source = ToBitmapGrey(processingImg))); //edge detection Mat edges = new Mat(frame.Size, frame.Depth, 1); CvInvoke.Canny(processingImg, edges, lowerTresholdLevel, upperTresholdLevel, cannyKernelSize); //contours finding VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint(); Mat hierarchy = new Mat(); int largest_contour_index = 0; double largest_area = 0; CvInvoke.FindContours(edges, contours, hierarchy, contouringMode, contouringMethod); for (int i = 0; i < contours.Size; i++) { double a = CvInvoke.ContourArea(contours[i], false); if (a > largest_area) { largest_area = a; largest_contour_index = i; } } CvInvoke.DrawContours(finalImg, contours, largest_contour_index, redColor, 3, LineType.EightConnected, hierarchy); //defects points finding VectorOfInt hull = new VectorOfInt(); Mat defects = new Mat(); if (contours.Size > 0) { VectorOfPoint largestContour = new VectorOfPoint(contours[largest_contour_index].ToArray()); CvInvoke.ConvexHull(largestContour, hull, false, true); CvInvoke.ConvexityDefects(largestContour, hull, defects); if (!defects.IsEmpty) { Matrix <int> m = new Matrix <int>(defects.Rows, defects.Cols, defects.NumberOfChannels); defects.CopyTo(m); Matrix <int>[] channels = m.Split(); for (int i = 1; i < defects.Rows; ++i) { finalImg.Draw(new System.Drawing.Point[] { largestContour[channels[0][i, 0]], largestContour[channels[1][i, 0]] }, new Bgr(100, 255, 100), 2); CvInvoke.Circle(finalImg, new System.Drawing.Point(largestContour[channels[0][i, 0]].X, largestContour[channels[0][i, 0]].Y), 7, new MCvScalar(255, 0, 0), -1); } } } MainVideoBox.Dispatcher.BeginInvoke(new Action(() => MainVideoBox.Source = ToBitmapFinal(finalImg))); }
public void Tick(Object sender, EventArgs args) { if (mKinect != null) { MultiSourceFrame frame = mFrameReader.AcquireLatestFrame(); TrackFingers(); if (frame != null) { using (var depthFrame = frame.DepthFrameReference.AcquireFrame()) { if (depthFrame != null) { if (depthData == null) { depthWidth = depthFrame.FrameDescription.Width; depthHeight = depthFrame.FrameDescription.Height; depthData = new ushort[depthWidth * depthHeight]; pixelData = new byte[depthWidth * depthHeight * 3]; mFrame = new Mat(depthHeight, depthWidth, DepthType.Cv8U, 1); } ushort minDepth = depthFrame.DepthMinReliableDistance; ushort maxDepth = depthFrame.DepthMaxReliableDistance; depthFrame.CopyFrameDataToArray(depthData); Image <Gray, Byte> img = mFrame.ToImage <Gray, Byte>(); for (int i = 0; i < depthData.Length; i++) { ushort depth = depthData[i]; //byte intensity = (byte)(depth >= minDepth && depth <= maxDepth ? depth : 0); byte intensity = (byte)(depth < 1000 && depth > 10 ? 0 : 255); img.Data[i / depthWidth, i % depthWidth, 0] = intensity; } mFrame = img.Mat; // DISPLAY Depth image //(Controls["FrameImageBox"] as ImageBox).Image = img; //********************* // Gaussian Blur //********************* CvInvoke.GaussianBlur(img, img, new Size(5, 5), 0); //********************* // Threshold //********************* //mFrame = img.Mat; //Mat thresholds = new Mat(); ; //CvInvoke.Threshold(mFrame, thresholds, THRESHOLD, THRESHOLD_MAX_VALUE, ThresholdType.Binary); //// DISPLAY Thresholds //(Controls["FrameImageBox"] as ImageBox).Image = img; //********************* // Contours //********************* Mat hierarchy = new Mat(); VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint(); VectorOfVectorOfPointF significantContours = new VectorOfVectorOfPointF(); CvInvoke.FindContours(mFrame, contours, hierarchy, RetrType.Tree, ChainApproxMethod.ChainApproxNone); Image <Gray, Byte> contourImage = new Image <Gray, Byte>(mFrame.Size); for (int i = 0; i < contours.Size; i++) { if (CvInvoke.ContourArea(contours[i]) > 500.0) { VectorOfPointF bigContour = new VectorOfPointF(); System.Drawing.PointF[] points = new System.Drawing.PointF[contours[i].Size]; Point[] intPoints = contours[i].ToArray(); for (int j = 0; j < intPoints.Length; j++) { points[j] = intPoints[j]; } bigContour.Push(points); significantContours.Push(bigContour); } } //if (contours.Size > 0) //{ // CvInvoke.DrawContours(contourImage, significantContours, -1, new MCvScalar(255, 0, 0)); //} //(Controls["FrameImageBox"] as ImageBox).Image = contourImage; //********************* // Convex Hulls //********************* for (int i = 0; i < significantContours.Size; i++) { System.Drawing.PointF[] hullPoints; VectorOfPoint contourPoints = new VectorOfPoint(Array.ConvertAll(significantContours[i].ToArray(), Point.Round)); VectorOfInt convexHull = new VectorOfInt(); hullPoints = CvInvoke.ConvexHull(significantContours[i].ToArray()); CvInvoke.ConvexHull(contourPoints, convexHull); CvInvoke.Polylines(mFrame, Array.ConvertAll(hullPoints, Point.Round), true, new MCvScalar(255, 255, 255)); // How many defects tho? //VectorOfVectorOfInt defects = new VectorOfVectorOfInt(); Mat defects = new Mat(); CvInvoke.ConvexityDefects(contourPoints /*significantContours[i]*/, convexHull /*new VectorOfPointF(hullPoints)*/, defects); if (!defects.IsEmpty) { Matrix <int> m = new Matrix <int>(defects.Rows, defects.Cols, defects.NumberOfChannels); defects.CopyTo(m); List <Point> validPoints = new List <Point>(); // Draw tha defacts for (int d = 0; d < m.Rows; d++) { int startIndex = m.Data[d, 0]; int endIndex = m.Data[d, 1]; int farthestIndex = m.Data[d, 2]; Point farthestPoint = contourPoints[farthestIndex]; Point startPoint = contourPoints[startIndex]; if (IsDefectUnique(startPoint, validPoints) && IsDefectOutsideHandRadius(startPoint)) { validPoints.Add(startPoint); } //if (true/*endIndex - startIndex > 10*/) //{ // CvInvoke.Circle(mFrame, startPoint, 3, new MCvScalar(255, 0, 0), 2); //} } // Draw valid indices foreach (Point p in validPoints) { CvInvoke.Circle(mFrame, p, 3, new MCvScalar(255, 0, 0), 2); } } } (Controls["FrameImageBox"] as ImageBox).Image = mFrame; } } } } }
void ProcessFramAndUpdateGUI(object Sender, EventArgs agr) { int Finger_num = 0; Double Result1 = 0; Double Result2 = 0; //querying image currentFrame = capture.QueryFrame().ToImage <Bgr, byte>(); int widthROI = currentFrame.Size.Width / 4; int heightROI = currentFrame.Size.Height / 4; // currentFrame = currentFrame.Copy(new Rectangle(widthROI, heightROI, widthROI * 2, heightROI * 2)); if (currentFrame == null) { return; } //Cach 1*************************** //Applying YCrCb filter //Image<Ycc, Byte> currentYCrCbFrame = currentFrame.Convert<Ycc, byte>(); //Image<Gray, byte> skin = new Image<Gray, byte>(currentFrame.Width, currentFrame.Height); //skin = currentYCrCbFrame.InRange(new Ycc(0, minCr, minCb), new Ycc(255, maxCr, maxCb)); ////Erode //Mat rect_12 = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new Size(10, 10), new Point(5, 5)); //CvInvoke.Erode(skin, skin, rect_12, new Point(-1, -1), 1, BorderType.Default, new MCvScalar()); ////Dilate //Mat rect_6 = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new Size(6, 6), new Point(3, 3)); //CvInvoke.Dilate(skin, skin, rect_6, new Point(-1, -1), 1, BorderType.Default, new MCvScalar()); //********************************** //Cach 2**************************** Phong IColorSkinDetector skinDetector = new YCrCbSkinDetector(); Image <Gray, byte> skin = skinDetector.DetectSkin(currentFrame, new Ycc(0, minCr, minCb), new Ycc(255, maxCr, maxCb)); //********************************** skin = skin.Flip(FlipType.Horizontal); //smoothing the filterd , eroded and dilated image. skin = skin.SmoothGaussian(9); picSkinCam.Image = skin.ToBitmap(); currentFrame = currentFrame.Flip(FlipType.Horizontal); #region Extract contours and hull VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint(); CvInvoke.FindContours(skin, contours, new Mat(), Emgu.CV.CvEnum.RetrType.External, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxSimple); VectorOfPoint biggestContour = new VectorOfPoint();// mang point[] chua vien` lon nhat //Tim area contours lon nhat for (int i = 0; i < contours.Size; i++) { VectorOfPoint contour = contours[i]; // chuyen sang Point[][] Result1 = CvInvoke.ContourArea(contour, false); // tinh area if (Result1 > Result2) { Result2 = Result1; biggestContour = contour; } } //Gi do try { lblNote.Text = ""; if (biggestContour != null) { CvInvoke.ApproxPolyDP(biggestContour, biggestContour, 0.00025, false); contourPoints = biggestContour.ToArray(); currentFrame.Draw(contourPoints, new Bgr(255, 0, 255), 4); VectorOfPoint hull = new VectorOfPoint(); VectorOfInt convexHull = new VectorOfInt(); CvInvoke.ConvexHull(biggestContour, hull, true); //~ Hull //Chinh clockwise thanh true RotatedRect minAreaBox = CvInvoke.MinAreaRect(hull); currentFrame.Draw(new CircleF(minAreaBox.Center, 5), new Bgr(Color.Black), 4); CvInvoke.ConvexHull(biggestContour, convexHull, true); //Chinh clockwise thanh true currentFrame.Draw(minAreaBox, new Bgr(200, 0, 0), 1); // ve khung ban tay khung bao quanh tay currentFrame.DrawPolyline(hull.ToArray(), true, new Bgr(200, 125, 75), 4); currentFrame.Draw(new CircleF(new PointF(minAreaBox.Center.X, minAreaBox.Center.Y), 3), new Bgr(200, 125, 75)); // tim convex defect Mat defect = new Mat(); CvInvoke.ConvexityDefects(biggestContour, convexHull, defect); // chuyen sang Matrix if (!defect.IsEmpty) { mDefect = new Matrix <int>(defect.Rows, defect.Cols, defect.NumberOfChannels); defect.CopyTo(mDefect); } #region Counting finger if (mDefect.Rows == 0) { return; } PointF[] start = new PointF[mDefect.Rows]; int num = 0; start[0] = new PointF(0, 0); try { for (int i = 0; i < mDefect.Rows; i++) { int startIdx = mDefect.Data[i, 0]; int depthIdx = mDefect.Data[i, 1]; int endIdx = mDefect.Data[i, 2]; Point startPoint = contourPoints[startIdx]; Point endPoint = contourPoints[endIdx]; Point depthPoint = contourPoints[depthIdx]; CircleF startCircle = new CircleF(startPoint, 5f); CircleF endCircle = new CircleF(endPoint, 5f); CircleF depthCircle = new CircleF(depthPoint, 5f); LineSegment2D Line = new LineSegment2D(startPoint, new Point((int)minAreaBox.Center.X, (int)minAreaBox.Center.Y)); //Cach 1 //if ((startCircle.Center.Y < minAreaBox.Center.Y || depthCircle.Center.Y < minAreaBox.Center.Y) && // (startCircle.Center.Y < depthCircle.Center.Y) && // (Math.Sqrt(Math.Pow(startCircle.Center.X - depthCircle.Center.X, 2) + // Math.Pow(startCircle.Center.Y - depthCircle.Center.Y, 2)) > // minAreaBox.Size.Height / 6.5)) //{ // Finger_num++; //} //Cach 2 if ((startPoint.Y < minAreaBox.Center.Y && endPoint.Y < minAreaBox.Center.Y) && (startPoint.Y < endPoint.Y) && (Math.Sqrt(Math.Pow(startPoint.X - endPoint.X, 2) + Math.Pow(startPoint.Y - endPoint.Y, 2)) > minAreaBox.Size.Height / 7)) { if (getAngle(startPoint, minAreaBox.Center, start[num]) > 10) { Finger_num++; start[num] = startPoint; num++; currentFrame.Draw(Line, new Bgr(Color.Violet), 2); currentFrame.Draw(startCircle, new Bgr(Color.OrangeRed), 5); //currentFrame.Draw(endCircle, new Bgr(Color.Black), 5); //currentFrame.Draw(Finger_num.ToString(), new Point(startPoint.X - 10, startPoint.Y), FontFace.HersheyPlain, 2, new Bgr(Color.Orange), 3); //currentFrame.Draw(Finger_num.ToString(), new Point(endPoint.X - 10, endPoint.Y), FontFace.HersheyPlain, 2, new Bgr(Color.Orange), 3); } } } } catch { return; } #endregion } #region Tracking MCvMoments moment = new MCvMoments(); // a new MCvMoments object try { moment = CvInvoke.Moments(biggestContour, false); // Moments of biggestContour } catch (NullReferenceException except) { //label3.Text = except.Message; return; } //CvInvoke.cvMoments(biggestContour, ref moment, 0); double m_00 = CvInvoke.cvGetSpatialMoment(ref moment, 0, 0); double m_10 = CvInvoke.cvGetSpatialMoment(ref moment, 1, 0); double m_01 = CvInvoke.cvGetSpatialMoment(ref moment, 0, 1); int current_X = Convert.ToInt32(m_10 / m_00) / 10; // X location of centre of contour int current_Y = Convert.ToInt32(m_01 / m_00) / 10; // Y location of center of contour #endregion if (useVirtualMouse) { // move cursor to center of contour only if Finger count is 1 or 0 // i.e. palm is closed if (Finger_num == 0 || Finger_num == 1) { Cursor.Position = new Point(current_X * 20, current_Y * 20); } // Leave the cursor where it was and Do mouse click, if finger count >= 4 if (Finger_num >= 3) { if (!isDrag) { DoMouseDown(); isDrag = true; } else { DoMouseUp(); isDrag = false; } //Cursor.Position = new Point(current_X * 20, current_Y * 20); //Cursor.Position = new Point(300, 300); } } } catch { Finger_num = 0; lblNote.Text = "Opps! Make sure to have a 'white space'"; return; } #endregion //Display image from camera picInputCam.Image = currentFrame.ToBitmap(); lblNumFinger.Text = Finger_num.ToString(); }