private static void DrawContours(IReadOnlyList <Point[]> contours, IList <Point[]> contoursPoly, Mat drawing) { var center = new Point2f[contours.Count]; var radius = new float[contours.Count]; for (var i = 0; i < contours.Count; i++) { if (Cv2.ContourArea(contours[i]) >= 5000) { contoursPoly[i] = Cv2.ApproxPolyDP(contours[i], 3, true); Cv2.MinEnclosingCircle(contoursPoly[i], out center[i], out radius[i]); var tempContour = contours[i]; var hulls = new Point[1][]; var hullsI = new int[1][]; hulls[0] = Cv2.ConvexHull(tempContour); hullsI[0] = Cv2.ConvexHullIndices(tempContour); Cv2.DrawContours(drawing, hulls, -1, Scalar.Gold, 2); if (hullsI[0].Length > 0) { var defects = Cv2.ConvexityDefects(tempContour, hullsI[0]); if (defects.Length > 0) { for (var j = 1; j < defects.Length; j++) { var startIdx = defects[j][0]; var ptStart = tempContour[startIdx]; var farIdx = defects[j][2]; var ptFar = tempContour[farIdx]; if (GetDistance(ptStart, ptFar) > 1000 && ptStart.Y < center[i].Y && radius[i] >= 70) { Cv2.Circle(drawing, ptStart, 10, Scalar.Yellow, 2); Cv2.Line(drawing, ptStart, ptFar, Scalar.Pink, 2); _numberOfFingers++; } } if (radius[i] > 50) { Cv2.DrawContours(drawing, contoursPoly, i, Scalar.Red); Cv2.Circle(drawing, center[i], (int)radius[i], Scalar.White, 2); Cv2.Circle(drawing, center[i], 5, Scalar.Red, 2); if (Program.ControlMode) { _posX = (int)(4 * (center[i].X - 100)); _posY = (int)(4 * (center[i].Y - 100)); WinApiUtils.SetCursorPos(_posX, _posY); } } } } } } }
// kreiranje rubova oko ruke private IEnumerable <int> CreateRoughHull(Point[] contour, int maxDistance) { if (contour == null) { return(Enumerable.Empty <int>()); } var indices = Cv2.ConvexHullIndices(contour).OrderByDescending(x => x); var pointsIdx = indices .Select(x => new PointContainer(contour[x], x)) .ToArray(); var hullPoints = pointsIdx.Select(x => x.Point); Cv2.Partition(hullPoints, out var labels, (p1, p2) => Point.Distance(p1, p2) < maxDistance); var dict = new Dictionary <int, List <PointContainer> >(); for (var index = 0; index < labels.Length; index++) { var label = labels[index]; dict[label] = new List <PointContainer>(); } for (var i = 0; i < pointsIdx.Count(); i++) { var idx = pointsIdx[i]; var label = labels[i]; if (dict.TryGetValue(label, out var list)) { list.Add(idx); } } var pointGroups = dict.Values.ToArray(); return(pointGroups .Select(x => { var center = CenterPoint(x.Select(x => x.Point)); return x.OrderBy(x => Point.Distance(x.Point, center)).FirstOrDefault().Idx; })); }
public void ConvexHullIndices() { var contour = new[] { // ‰š new Point(0, 0), new Point(0, 10), new Point(3, 10), new Point(3, 5), new Point(6, 5), new Point(6, 10), new Point(10, 10), new Point(10, 0), }; var hull = Cv2.ConvexHullIndices(contour); Assert.Equal(4, hull.Length); Assert.Equal(new[] { 7, 6, 1, 0 }, hull); }
private Image <Rgba32> Swap(Mat img1, IList <Point2f> points1, Mat img2, IList <Point2f> points2) { var img1Warped = img2.Clone(); img1.ConvertTo(img1, MatType.CV_32F); img1Warped.ConvertTo(img1Warped, MatType.CV_32F); var rect = new Rect(0, 0, img1Warped.Cols, img1Warped.Rows); var hullIndex = Cv2.ConvexHullIndices(points2); var hull1 = hullIndex.Select(i => points1[i]).ToList(); var hull2 = hullIndex.Select(i => points2[i]).ToList(); var dt = GetDelaunayTriangulationIndexes(rect, hull2).ToList(); foreach (var triangleIndexes in dt) { var t1 = triangleIndexes.Select(i => hull1[i]).ToList(); var t2 = triangleIndexes.Select(i => hull2[i]).ToList(); WarpTriangle(img1, img1Warped, t1, t2); } var hull8U = hull2.Select(p => new Point((int)p.X, (int)p.Y)).ToList(); using (var mask = Mat.Zeros(img2.Rows, img2.Cols, MatType.CV_8UC3).ToMat()) { Cv2.FillConvexPoly(mask, hull8U, new Scalar(255, 255, 255)); var r = Cv2.BoundingRect(hull2).Intersect(rect); var center = r.Location + new Point(r.Width / 2, r.Height / 2); img1Warped.ConvertTo(img1Warped, MatType.CV_8UC3); img2.ConvertTo(img2, MatType.CV_8UC3); using (var outputMat = new Mat()) { Cv2.SeamlessClone(img1Warped, img2, mask, center, outputMat, SeamlessCloneMethods.NormalClone); return(Image.Load(outputMat.ToBytes())); } } }
public void ConvexityDefects() { var contour = new[] { // ‰š new Point(0, 0), new Point(0, 10), new Point(3, 10), new Point(3, 5), new Point(6, 5), new Point(6, 10), new Point(10, 10), new Point(10, 0), }; var convexHull = Cv2.ConvexHullIndices(contour); Assert.Equal(4, convexHull.Length); // Note: ConvexityDefects does not support Point2f contour var convexityDefects = Cv2.ConvexityDefects(contour, convexHull); Assert.Single(convexityDefects); Assert.Equal(new Vec4i(1, 6, 3, 1280), convexityDefects[0]); }
private Bitmap ProcessImage(Bitmap image, Bitmap newImage) { // set up Dlib facedetectors and shapedetectors using (var fd = FrontalFaceDetector.GetFrontalFaceDetector()) using (var sp = new ShapePredictor("shape_predictor_68_face_landmarks.dat")) { // convert image to dlib format var img = image.ToArray2D <RgbPixel>(); // find bradley's faces in image var faces = fd.Detect(img); var bradley = faces[0]; // get bradley's landmark points var bradleyShape = sp.Detect(img, bradley); var bradleyPoints = (from i in Enumerable.Range(0, (int)bradleyShape.Parts) let p = bradleyShape.GetPart((uint)i) select new OpenCvSharp.Point(p.X, p.Y)).ToArray(); // get convex hull of bradley's points var hull = Cv2.ConvexHullIndices(bradleyPoints); var bradleyHull = from i in hull select bradleyPoints[i]; // find landmark points in face to swap var imgMark = newImage.ToArray2D <RgbPixel>(); var faces2 = fd.Detect(imgMark); var mark = faces2[0]; var markShape = sp.Detect(imgMark, mark); var markPoints = (from i in Enumerable.Range(0, (int)markShape.Parts) let p = markShape.GetPart((uint)i) select new OpenCvSharp.Point(p.X, p.Y)).ToArray(); // get convex hull of mark's points var hull2 = Cv2.ConvexHullIndices(bradleyPoints); var markHull = from i in hull2 select markPoints[i]; // calculate Delaunay triangles var triangles = Utility.GetDelaunayTriangles(bradleyHull); // get transformations to warp the new face onto Bradley's face var warps = Utility.GetWarps(markHull, bradleyHull, triangles); // apply the warps to the new face to prep it for insertion into the main image var warpedImg = Utility.ApplyWarps(newImage, image.Width, image.Height, warps); // prepare a mask for the warped image var mask = new Mat(image.Height, image.Width, MatType.CV_8UC3); mask.SetTo(0); Cv2.FillConvexPoly(mask, bradleyHull, new Scalar(255, 255, 255), LineTypes.Link8); // find the center of the warped face var r = Cv2.BoundingRect(bradleyHull); var center = new OpenCvSharp.Point(r.Left + r.Width / 2, r.Top + r.Height / 2); // blend the warped face into the main image var selfie = BitmapConverter.ToMat(image); var blend = new Mat(selfie.Size(), selfie.Type()); Cv2.SeamlessClone(warpedImg, selfie, mask, center, blend, SeamlessCloneMethods.NormalClone); // return the modified main image return(BitmapConverter.ToBitmap(blend)); } }
public IndexedPoint[] getConvexHull(Point[] contour) { var hullIndices = Cv2.ConvexHullIndices(contour, false); return(removeTooClose(hullIndices).ToArray()); }
public Mat getHandContour(Mat input, Mat frame) { Mat detectImage = frame.Clone(); if (input.Empty()) { return(detectImage); } // we work only on the 1 channel result, since this function is called inside a loop we are not sure that this is always the case if (input.Channels() != 1) { return(detectImage); } Point[][] contours; HierarchyIndex[] hierarchy; Cv2.FindContours(input, out contours, out hierarchy, RetrievalModes.External, ContourApproximationModes.ApproxSimple); if (contours.Length <= 0) { return(detectImage); } int biggest_contour_index = -1; double biggest_area = 0.0; for (int i = 0; i < contours.GetLength(0); i++) { double area = Cv2.ContourArea(contours[i], false); if (area > biggest_area) { biggest_area = area; biggest_contour_index = i; } } if (biggest_contour_index < 0) { return(detectImage); } // Debug.Log(" " + biggest_area + " " + biggest_contour_index); Point[] hull_points; int[] hull_ints; hull_points = Cv2.ConvexHull(contours[biggest_contour_index]); hull_ints = Cv2.ConvexHullIndices(contours[biggest_contour_index]); //Debug.Log("hull " + hull_points.Length); //Debug.Log("hull " + hull_ints.Length); Vec4i[] defects; if (hull_ints.Length > 3) { defects = Cv2.ConvexityDefects(contours[biggest_contour_index], hull_ints); } else { return(detectImage); } /////////////// OpenCvSharp.Rect bounding_rectangle = Cv2.BoundingRect(hull_points); // we find the center of the bounding rectangle, this should approximately also be the center of the hand Point center_bounding_rect = new Point( (bounding_rectangle.TopLeft.X + bounding_rectangle.BottomRight.X) / 2, (bounding_rectangle.TopLeft.Y + bounding_rectangle.BottomRight.Y) / 2 ); // we separate the defects keeping only the ones of intrest List <Point> start_points = new List <Point>(); List <Point> far_points = new List <Point>(); // Debug.Log(" " + defects.Length); //Debug.Log(" " + contours[biggest_contour_index].Length); //Debug.Log(" " + contours.Length); for (int i = 0; i < defects.Length; i++) { start_points.Add(contours[biggest_contour_index][defects[i].Item0]); // filtering the far point based on the distance from the center of the bounding rectangle if (findPointsDistance(contours[biggest_contour_index][defects[i].Item2], center_bounding_rect) < bounding_rectangle.Height * BOUNDING_RECT_FINGER_SIZE_SCALING) { far_points.Add(contours[biggest_contour_index][defects[i].Item2]); } } // we compact them on their medians List <Point> filtered_start_points = compactOnNeighborhoodMedian(start_points, bounding_rectangle.Height * BOUNDING_RECT_NEIGHBOR_DISTANCE_SCALING); List <Point> filtered_far_points = compactOnNeighborhoodMedian(far_points, bounding_rectangle.Height * BOUNDING_RECT_NEIGHBOR_DISTANCE_SCALING); // now we try to find the fingers List <Point> filtered_finger_points = new List <Point>(); if (filtered_far_points.Count > 1) { List <Point> finger_points = new List <Point>(); for (int i = 0; i < filtered_start_points.Count; i++) { List <Point> closest_points = findClosestOnX(filtered_far_points, filtered_start_points[i]); if (isFinger(closest_points[0], filtered_start_points[i], closest_points[1], LIMIT_ANGLE_INF, LIMIT_ANGLE_SUP, center_bounding_rect, bounding_rectangle.Height * BOUNDING_RECT_FINGER_SIZE_SCALING)) { finger_points.Add(filtered_start_points[i]); } } if (finger_points.Count > 0) { // we have at most five fingers usually :) while (finger_points.Count > 5) { finger_points.RemoveAt(0); } // filter out the points too close to each other for (int i = 0; i < finger_points.Count - 1; i++) { if (findPointsDistanceOnX(finger_points[i], finger_points[i + 1]) > bounding_rectangle.Height * BOUNDING_RECT_NEIGHBOR_DISTANCE_SCALING * 1.5) { filtered_finger_points.Add(finger_points[i]); } } if (finger_points.Count > 2) { if (findPointsDistanceOnX(finger_points[0], finger_points[finger_points.Count - 1]) > bounding_rectangle.Height * BOUNDING_RECT_NEIGHBOR_DISTANCE_SCALING * 1.5) { filtered_finger_points.Add(finger_points[finger_points.Count - 1]); } } else { filtered_finger_points.Add(finger_points[finger_points.Count - 1]); } } } // we draw what found on the returned image Cv2.DrawContours(detectImage, contours, biggest_contour_index, new Scalar(0, 255, 0), 8, LineTypes.Link8, hierarchy); //Cv2.Polylines(input, new IEnumerable<IEnumerable<Point>>( hull_points), true, new Scalar(1, 0, 0)); Cv2.Rectangle(detectImage, bounding_rectangle.TopLeft, bounding_rectangle.BottomRight, new Scalar(0, 0, 255), 8, LineTypes.Link8); Cv2.Circle(detectImage, center_bounding_rect.X, center_bounding_rect.Y, 15, new Scalar(255, 0, 255), 3, LineTypes.Link8); drawVectorPoints(detectImage, filtered_start_points, new Scalar(255, 0, 0), true); drawVectorPoints(detectImage, filtered_far_points, new Scalar(0, 0, 25), true); drawVectorPoints(detectImage, filtered_finger_points, new Scalar(0, 255, 255), false); Cv2.PutText(detectImage, filtered_finger_points.Count.ToString(), center_bounding_rect, HersheyFonts.HersheyTriplex, 10, new Scalar(255, 0, 25)); // and on the starting frame Cv2.DrawContours(frame, contours, biggest_contour_index, new Scalar(0, 255, 0), 8, LineTypes.Link8, hierarchy); Cv2.Circle(frame, center_bounding_rect.X, center_bounding_rect.Y, 15, new Scalar(255, 0, 255), 8, LineTypes.Link8); drawVectorPoints(frame, filtered_finger_points, new Scalar(0, 255, 255), false); Cv2.PutText(frame, filtered_finger_points.Count.ToString(), center_bounding_rect, HersheyFonts.HersheyTriplex, 10, new Scalar(255, 0, 255)); fingerCount = filtered_finger_points.Count; if (fingerCount <= 1) { finalPose = "rock"; } if (fingerCount == 2 && fingerCount <= 3) { finalPose = "scissor"; } if (fingerCount >= 4) { finalPose = "paper"; } return(detectImage); }
private Bitmap ProcessImage(Bitmap image, Bitmap newImage) { // set up Dlib facedetectors and shapedetectors using (var fd = FrontalFaceDetector.GetFrontalFaceDetector()) using (var sp = new ShapePredictor("shape_predictor_68_face_landmarks.dat")) { // convert images to opencv format var selfie = BitmapConverter.ToMat(image); var mark = BitmapConverter.ToMat(newImage); // convert image to dlib format var img = image.ToArray2D <RgbPixel>(); // find all faces in image var faces = fd.Detect(img); // get bradley's landmark points var bradleyShape = sp.Detect(img, faces[1]); var bradleyPoints = (from i in Enumerable.Range(0, (int)bradleyShape.Parts) let p = bradleyShape.GetPart((uint)i) select new OpenCvSharp.Point(p.X, p.Y)).ToArray(); // *** WEBINAR STEP 1: draw landmarks on bradley's face // Utility.DrawLandmarks(selfie, bradleyPoints); // return BitmapConverter.ToBitmap(selfie); // get convex hull of bradley's points var hull = Cv2.ConvexHullIndices(bradleyPoints); var bradleyHull = from i in hull select bradleyPoints[i]; // *** WEBINAR STEP 2a: draw convex hull for bradley // Utility.DrawLandmarks(selfie, bradleyHull); // Utility.DrawHull(selfie, bradleyHull); // return BitmapConverter.ToBitmap(selfie); // find landmark points in face to swap var imgMark = newImage.ToArray2D <RgbPixel>(); var faces2 = fd.Detect(imgMark); var markShape = sp.Detect(imgMark, faces2[0]); var markPoints = (from i in Enumerable.Range(0, (int)markShape.Parts) let p = markShape.GetPart((uint)i) select new OpenCvSharp.Point(p.X, p.Y)).ToArray(); // get convex hull of mark's points var hull2 = Cv2.ConvexHullIndices(bradleyPoints); var markHull = from i in hull2 select markPoints[i]; // *** WEBINAR STEP 2b: draw convex hull for mark // Utility.DrawLandmarks(mark, markHull); // Utility.DrawHull(mark, markHull); // return BitmapConverter.ToBitmap(mark); // calculate Delaunay triangles var triangles = Utility.GetDelaunayTriangles(bradleyHull); // *** WEBINAR STEP 3: draw delaunay triangles for bradley // Utility.DrawTriangles(selfie, triangles); // return BitmapConverter.ToBitmap(selfie); // get transformations to warp the new face onto Bradley's face var warps = Utility.GetWarps(markHull, bradleyHull, triangles); // *** WEBINAR STEP 6: demonstrate triangulation with all landmark points // triangles = Utility.GetDelaunayTriangles(bradleyPoints); // warps = Utility.GetWarps(markPoints, bradleyPoints, triangles); // apply the warps to the new face to prep it for insertion into the main image var warpedImg = Utility.ApplyWarps(newImage, image.Width, image.Height, warps); // *** WEBINAR STEP 4: warp triangles from mark to bradley // return BitmapConverter.ToBitmap(warpedImg); // prepare a mask for the warped image var mask = new Mat(image.Height, image.Width, MatType.CV_8UC3); mask.SetTo(0); Cv2.FillConvexPoly(mask, bradleyHull, new Scalar(255, 255, 255), LineTypes.Link8); // *** WEBINAR STEP 5a: show blend without seamless clone // Cv2.Multiply(selfie, new Scalar(1, 1, 1) - mask, selfie); // Cv2.Add(selfie, warpedImg, selfie); // return BitmapConverter.ToBitmap(selfie); // find the center of the warped face var r = Cv2.BoundingRect(bradleyHull); var center = new OpenCvSharp.Point(r.Left + r.Width / 2, r.Top + r.Height / 2); // blend the warped face into the main image var blend = new Mat(selfie.Size(), selfie.Type()); Cv2.SeamlessClone(warpedImg, selfie, mask, center, blend, SeamlessCloneMethods.NormalClone); // *** WEBINAR STEP 5b: show blend with seamless clone return(BitmapConverter.ToBitmap(blend)); } }