public (HeadRotation hRotation, HeadRotation vRotation) GetHeadPosition(Mat frame, Rect face) { var bytes = new byte[frame.Rows * frame.Cols * frame.ElemSize()]; Marshal.Copy(frame.Data, bytes, 0, bytes.Length); using var img = FaceRecognition.LoadImage(bytes, frame.Rows, frame.Cols, frame.ElemSize()); var allLandmarks = SharedFaceRecognitionModel.Model.FaceLandmark(img, new[] { new Location(face.Left, face.Top, face.Right, face.Bottom), }, PredictorModel.Large); var landmarks = allLandmarks.FirstOrDefault(); if (landmarks == null) { return(HeadRotation.Unknown, HeadRotation.Unknown); } #region DEBUG #if DEV_MODE foreach (var facePart in Enum.GetValues(typeof(FacePart)).Cast <FacePart>()) { foreach (var landmark in landmarks) { foreach (var p in landmark.Value.ToArray()) { Cv2.Ellipse(frame, new RotatedRect(new Point2f(p.X, p.Y), new Size2f(2, 2), 0), Scalar.Aqua); } } } #endif #endregion return(EstimateHorizontalPose(landmarks, face, frame), EstimateVerticalPose(landmarks, face, frame)); }
private HeadRotation EstimateHorizontalPose(IDictionary <FacePart, IEnumerable <Point> > landmarks, Rect face, Mat frame) { var noseTop = landmarks[FacePart.NoseBridge].First(); var left = landmarks[FacePart.Chin].First(p => p.X == landmarks[FacePart.Chin].Min(v => v.X)); var right = landmarks[FacePart.Chin].First(p => p.X == landmarks[FacePart.Chin].Max(v => v.X)); #region DEBUG #if DEV_MODE Cv2.Ellipse(frame, new RotatedRect(new Point2f(noseTop.X, noseTop.Y), new Size2f(2, 2), 0), Scalar.Yellow); Cv2.Ellipse(frame, new RotatedRect(new Point2f(left.X, left.Y), new Size2f(2, 2), 0), Scalar.Violet); Cv2.Ellipse(frame, new RotatedRect(new Point2f(right.X, right.Y), new Size2f(2, 2), 0), Scalar.PaleGreen); #endif #endregion int dist = ((noseTop.X - left.X) - (right.X - noseTop.X)); if (Math.Abs(dist) < _settings.HorizontalPoseThreshold * face.Width) { return(HeadRotation.Front); } return(dist > 0 ? HeadRotation.Left : HeadRotation.Right); }
private HeadRotation EstimateVerticalPose(IDictionary <FacePart, IEnumerable <Point> > landmarks, Rect face, Mat frame) { var leftEye = landmarks[FacePart.LeftEyebrow].First(); var rightEye = landmarks[FacePart.RightEyebrow].Last(); #region DEBUG #if DEV_MODE Cv2.Ellipse(frame, new RotatedRect(new Point2f(leftEye.X, leftEye.Y), new Size2f(2, 2), 0), Scalar.Red); Cv2.Ellipse(frame, new RotatedRect(new Point2f(rightEye.X, rightEye.Y), new Size2f(2, 2), 0), Scalar.Red); #endif #endregion if (leftEye.X - rightEye.X == 0) { throw new Exception("Cyclope exception"); } double a = (leftEye.Y - rightEye.Y) / (double)(leftEye.X - rightEye.X); double angle = Math.Atan(a) * 180.0 / Math.PI; //todo to rad if (Math.Abs(angle) < _settings.VerticalPoseThreshold) { return(HeadRotation.Front); } return(angle > 0 ? HeadRotation.Left : HeadRotation.Right); }
private static (bool FaceDetected, Mat Mat) DetectFace(CascadeClassifier cascade, Mat src) { Mat result; bool faceDetected; using (var gray = new Mat()) { result = src.Clone(); Cv2.CvtColor(src, gray, ColorConversionCodes.BGR2GRAY); // Detect faces Rect[] faces = cascade.DetectMultiScale( gray, 1.08, 2, HaarDetectionType.ScaleImage, new Size(30, 30)); faceDetected = faces.Length > 0; // Render all detected faces foreach (Rect face in faces) { var center = new Point { X = (int)(face.X + face.Width * 0.5), Y = (int)(face.Y + face.Height * 0.5) }; var axes = new Size { Width = (int)(face.Width * 0.5), Height = (int)(face.Height * 0.5) }; Cv2.Ellipse(result, center, axes, 0, 0, 360, new Scalar(255, 0, 255), 4); } } return(faceDetected, result); }
private void DrawEllipses(Mat img, Point center, double val1, double val2, double angle) { Cv2.Ellipse(img, center, new Size(val1 * 2, val2 * 2), angle, 0, 360, fill120, Cv2.FILLED); Cv2.Ellipse(img, center, new Size(val1 * 1.5, val2 * 1.5), angle, 0, 360, fill150, Cv2.FILLED); Cv2.Ellipse(img, center, new Size(val1, val2), angle, 0, 360, fill180, Cv2.FILLED); Cv2.Ellipse(img, center, new Size(val1 * 0.5, val2 * 0.5), angle, 0, 360, fill210, Cv2.FILLED); }
private void bStart_MouseClick(object sender, MouseEventArgs e) { //CvCapture camera = new CvCapture("C:\\rosbank\\facedetect\\test\\media\\test.avi"); var capture = new VideoCapture("C:\\rosbank\\facedetect\\test\\media\\test.avi"); //capture.Set(CaptureProperty.FrameWidth, 320); //capture.Set(CaptureProperty.FrameHeight, 240); int sleepTime = (int)Math.Round(1000 / capture.Fps); Mat image = new Mat(); Rect[] faces = null; int i = 0; while (true) { i++; capture.Read(image); if (image.Empty()) { break; } Mat small = new Mat(); Cv2.Resize(image, small, new OpenCvSharp.Size(320, 240), 0, 0, InterpolationFlags.Lanczos4); if (i % 5 == 0) { var cascade = new CascadeClassifier(@"C:\opencv3\opencv\sources\data\haarcascades\haarcascade_frontalface_alt.xml"); faces = cascade.DetectMultiScale(small, 1.08, 2, HaarDetectionType.ScaleImage, new OpenCvSharp.Size(30, 30)); } if (faces != null && faces.Length > 0) { foreach (Rect face in faces) { var center = new OpenCvSharp.Point { X = (int)(face.X + face.Width * 0.5), Y = (int)(face.Y + face.Height * 0.5) }; var axes = new OpenCvSharp.Size { Width = (int)(face.Width * 0.5), Height = (int)(face.Height * 0.5) }; Mat f = new Mat(small, face); pb1.Image = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(f); pb1.Refresh(); Cv2.Ellipse(small, center, axes, 0, 0, 360, new Scalar(255, 0, 255), 2); } } bpMain.Image = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(small); bpMain.Refresh(); Cv2.WaitKey(100); } }
public void PostProcessImage(ref Mat image, Mat sourceImage) { image = image.CvtColor(ColorConversionCodes.BGR2GRAY); image = image.Threshold(132, 255, ThresholdTypes.Binary); // TODO Check, what works better // image = image.CvtColor(ColorConversionCodes.BGR2GRAY); // using (var detector = SimpleBlobDetector.Create()) // { // var keypoints = detector.Detect(hardImage); // Cv2.DrawKeypoints(hardImage, keypoints, image, new Scalar(0, 0, 255), DrawMatchesFlags.DrawRichKeypoints); // } var contours = Cv2.FindContoursAsArray(image, RetrievalModes.Tree, ContourApproximationModes.ApproxSimple); foreach (var contour in contours) { if (contour.Length < 5) { continue; } var ellipse = Cv2.FitEllipse(contour); Cv2.Ellipse(image, ellipse, new Scalar(0, 255, 0), 2); } }
/// <summary> /// Run face detection using Haar Cascades. /// </summary> private Mat DetectFace(CascadeClassifier cascade, Mat src) { Mat result; using (var gray = new Mat()) { result = src.Clone(); Cv2.CvtColor(src, gray, ColorConversionCodes.BGR2GRAY); // Detect faces OpenCvSharp.Rect[] faces = cascade.DetectMultiScale( gray, 1.08, 3, HaarDetectionType.ScaleImage, new Size(124, 124)); // Render all detected faces foreach (OpenCvSharp.Rect face in faces) { var center = new Point { X = (int)(face.X + face.Width * 0.5), Y = (int)(face.Y + face.Height * 0.5) }; var axes = new Size { Width = (int)(face.Width * 0.5), Height = (int)(face.Height * 0.5) }; Cv2.Ellipse(result, center, axes, 0, 0, 360, new Scalar(255, 255, 255, 128), 4); } } return(result); }
public void ShowResultImage(System.Drawing.Rectangle rightRect, System.Drawing.Rectangle leftRect) { Trace.Assert(resultWnd != null); if (DetectedFaceRects == null) { return; } if (cameraViewImageMat == null) { return; } foreach (var face in DetectedFaceRects) { var center = new Point { X = (int)(face.X + face.Width * 0.5), Y = (int)(face.Y + face.Height * 0.5) }; var axes = new Size { Width = (int)(face.Width * 0.5), Height = (int)(face.Height * 0.5) }; Cv2.Ellipse(cameraViewImageMat, center, axes, 0, 0, 360, new Scalar(255, 0, 255), 4); } if (SelectedFaceRect.HasValue) { Cv2.Rectangle(cameraViewImageMat, GetRectFromRectangle(rightRect), new Scalar(255, 0, 0), 4); Cv2.Rectangle(cameraViewImageMat, GetRectFromRectangle(leftRect), new Scalar(0, 0, 255), 4); } resultWnd.ShowImage(cameraViewImageMat); }
public void DrawResultAsEllipse(Mat resultImage) { if (IsEnabled == false) { return; } if (DetectedFaceRects == null) { return; } Trace.Assert(resultImage != null); foreach (var face in DetectedFaceRects) { var center = new Point { X = (int)(face.X + face.Width * 0.5), Y = (int)(face.Y + face.Height * 0.5) }; var axes = new Size { Width = (int)(face.Width * 0.5), Height = (int)(face.Height * 0.5) }; Cv2.Ellipse(resultImage, center, axes, 0, 0, 360, ResultLineColor, 3); } }
/// <summary> /// Render detected faces via OpenCV. /// </summary> /// <param name="state">Current frame state.</param> /// <param name="image">Web cam or video frame.</param> /// <returns>Returns new image frame.</returns> private static Mat RenderFaces(FrameState state, Mat image) { Mat result = image.Clone(); Cv2.CvtColor(image, image, ColorConversionCodes.BGR2GRAY); // Render all detected faces foreach (var face in state.Faces) { var center = new Point { X = face.Center.X, Y = face.Center.Y }; var axes = new Size { Width = (int)(face.Size.Width * 0.5) + 10, Height = (int)(face.Size.Height * 0.5) + 10 }; Cv2.Ellipse(result, center, axes, 0, 0, 360, _faceColorBrush, 4); } return(result); }
/// <summary> /// /// </summary> /// <param name="cascade"></param> /// <returns></returns> private Mat DetectFace(CascadeClassifier cascade) { Mat result; using (var src = new Mat(FilePath.Image.Yalta, ImreadModes.Color)) using (var gray = new Mat()) { result = src.Clone(); Cv2.CvtColor(src, gray, ColorConversionCodes.BGR2GRAY); // Detect faces Rect[] faces = cascade.DetectMultiScale( gray, 1.08, 2, HaarDetectionType.ScaleImage, new Size(30, 30)); // Render all detected faces foreach (Rect face in faces) { var center = new Point { X = (int)(face.X + face.Width * 0.5), Y = (int)(face.Y + face.Height * 0.5) }; var axes = new Size { Width = (int)(face.Width * 0.5), Height = (int)(face.Height * 0.5) }; Cv2.Ellipse(result, center, axes, 0, 0, 360, new Scalar(255, 0, 255), 4); } } return(result); }
//Find outer defect static void FindContour_and_outer_defect(Mat img, List <Point[]> contours_final, ref int nLabels, out int [,] stats, string mode) { // variable OpenCvSharp.Point[][] temp = new Point[1][]; //0: 內圈 ; 1: 外圈 OpenCvSharp.Point[] contour_now; if (mode == "inner") { contour_now = contours_final[0]; } else { contour_now = contours_final[1]; } // Convex hull var ellipsecontour = Cv2.FitEllipse(contour_now); Mat convex_mask_img = Mat.Zeros(img.Size(), MatType.CV_8UC1); Cv2.Ellipse(convex_mask_img, ellipsecontour, 255, -1); // Contour temp[0] = contour_now; Mat contour_mask_img = Mat.Zeros(img.Size(), MatType.CV_8UC1); Cv2.DrawContours(contour_mask_img, temp, -1, 255, -1); Mat diff_image = contour_mask_img ^ convex_mask_img; //Opening Mat kernel = Mat.Ones(4, 4, MatType.CV_8UC1);//改變凹角大小 diff_image = diff_image.MorphologyEx(MorphTypes.Open, kernel); //=========================吃掉邊界======================================= //temp[0] = contour_now; //Cv2.DrawContours(diff_image, temp, -1, 0, 3); //================================================================ convex_mask_img.SaveImage("./" + mode + "convex" + ".jpg"); contour_mask_img.SaveImage("./" + mode + "contour" + ".jpg"); diff_image.SaveImage("./" + mode + "mask" + ".jpg"); //Connected Component var labelMat = new MatOfInt(); var statsMat = new MatOfInt();// Row: number of labels Column: 5 var centroidsMat = new MatOfDouble(); nLabels = Cv2.ConnectedComponentsWithStats(diff_image, labelMat, statsMat, centroidsMat); var labels = labelMat.ToRectangularArray(); stats = statsMat.ToRectangularArray(); var centroids = centroidsMat.ToRectangularArray(); }
/// <summary> /// カメラ画像を取得して次々に表示を切り替える /// </summary> public virtual void Capture(object state) { var haarCascade = new CascadeClassifier("data/haarcascades/haarcascade_frontalface_default.xml"); var camera = new VideoCapture(0 /*0番目のデバイスを指定*/) { // キャプチャする画像のサイズフレームレートの指定 FrameWidth = 480, FrameHeight = 270, // Fps = 60 }; using (var img = new Mat()) // 撮影した画像を受ける変数 using (camera) { while (true) { if (this.IsExitCapture) { this.Dispatcher.Invoke(() => this._Image.Source = null); break; } camera.Read(img); // Webカメラの読み取り(バッファに入までブロックされる if (img.Empty()) { break; } var result = img.Clone(); using (var gray = new Mat()) { Cv2.CvtColor(img, gray, ColorConversionCodes.BGR2GRAY); var faces = haarCascade.DetectMultiScale( gray, 1.08, 2, HaarDetectionType.FindBiggestObject, new OpenCvSharp.Size(50, 50) ); foreach (var face in faces) { var center = new OpenCvSharp.Point { X = (int)(face.X + face.Width * 0.5), Y = (int)(face.Y + face.Height * 0.5) }; var axes = new OpenCvSharp.Size { Width = (int)(face.Width * 0.5), Height = (int)(face.Height * 0.5) }; Cv2.Ellipse(result, center, axes, 0, 0, 360, new Scalar(255, 0, 255), 4); } } this.Dispatcher.Invoke(() => { this._Image.Source = result.ToWriteableBitmap(); // WPFに画像を表示 }); } } }
private Mat DetectFace(Mat src) { Mat result; using (Mat gray = new Mat()) { Cv2.EqualizeHist(gray, gray); result = src.Clone(); Cv2.CvtColor(src, gray, ColorConversionCodes.BGR2GRAY); // Detect faces OpenCvSharp.Rect[] faces = faceCascade.DetectMultiScale( gray, faceScale, faceMinNeightbors, faceDetectType, new Size(faceSize.x, faceSize.y)); // Render all detected faces foreach (OpenCvSharp.Rect face in faces) { var faceCenter = new Point { X = (int)(face.X + face.Width * 0.5), Y = (int)(face.Y + face.Height * 0.5) }; var faceAxes = new Size { Width = (int)(face.Width * 0.4), Height = (int)(face.Height * 0.5) }; using (Mat faceROI = new Mat(gray, face)) { // Detect eyes in the faces OpenCvSharp.Rect[] eyes = eyeCascade.DetectMultiScale( faceROI, eyeScale, eyeMinNeightbors, eyeDetectType, new Size(eyeSize.x, eyeSize.y)); // Render all eyes foreach (OpenCvSharp.Rect eye in eyes) { var eyeCenter = new Point { X = (int)(face.X + eye.X + eye.Width * 0.5), Y = (int)(face.Y + eye.Y + eye.Height * 0.5) }; var eyeAxes = new Size { Width = (int)(eye.Width * 0.5), Height = (int)(eye.Height * 0.5) }; // draw ellipse at the eyes Cv2.Ellipse(result, eyeCenter, eyeAxes, 0, 0, 360, new Scalar(0, 0, 255), 4); } } // draw ellipse at the face Cv2.Ellipse(result, faceCenter, faceAxes, 0, 0, 360, new Scalar(255, 0, 255), 4); } } return(result); }
/// <summary> /// Creates a binary image containing an ellipse. /// </summary> /// <param name="ellipse">Ellipse's parameters</param> /// <param name="size">Image's size</param> /// <returns>Binary image containing an ellipse</returns> private static Mat DrawEllipse(Ellipse ellipse, Size size) { Mat ellipseImage = Mat.Zeros(size, MatType.CV_8U); Point center = new Point((int)Math.Round(ellipse.Center.X), (int)Math.Round(ellipse.Center.Y)); Size axes = new Size((int)Math.Round(ellipse.Axes.Width), (int)Math.Round(ellipse.Axes.Height)); double angle = ellipse.Angle; Cv2.Ellipse(ellipseImage, center, axes, angle, 0, 360, 1, 1); Cv2.Ellipse(ellipseImage, center, axes, angle, 0, 360, 1, Cv2.FILLED); return(ellipseImage); }
public void Drawing_method() { using Mat OriginalImage = new Mat("../../opencv.png", ImreadModes.AnyColor); Cv2.Line(OriginalImage, new Point(10, 10), new Point(630, 10), Scalar.Blue); Cv2.Circle(OriginalImage, new Point(100, 100), 40, Scalar.Blue); Cv2.Rectangle(OriginalImage, new Rect(20, 20, 40, 40), Scalar.Red); Cv2.Ellipse(OriginalImage, new RotatedRect(new Point2f(120, 120), new Size2f(200, 100), 10), Scalar.Yellow); Cv2.PutText(OriginalImage, "OpenCVSharp", new Point(600, 600), HersheyFonts.Italic, 5, Scalar.Black); Cv2.ImShow("draw", OriginalImage); Cv2.WaitKey(); }
static void Main(string[] args) { Mat img = new Mat(new Size(1366, 768), MatType.CV_8UC3); Cv2.Line(img, new Point(100, 100), new Point(1200, 100), new Scalar(0, 0, 255), 3, LineTypes.AntiAlias); Cv2.Circle(img, new Point(300, 300), 50, new Scalar(0, 255, 0), Cv2.FILLED, LineTypes.Link4); Cv2.Rectangle(img, new Point(500, 200), new Point(1000, 400), new Scalar(255, 0, 0), 5); Cv2.Ellipse(img, new Point(1200, 300), new Size(100, 50), 0, 90, 180, new Scalar(255, 255, 0), 2); List <List <Point> > pts1 = new List <List <Point> >(); List <Point> pt1 = new List <Point>() { new Point(100, 500), new Point(300, 500), new Point(200, 600) }; List <Point> pt2 = new List <Point>() { new Point(400, 500), new Point(500, 500), new Point(600, 700), new Point(500, 650) }; pts1.Add(pt1); pts1.Add(pt2); Cv2.Polylines(img, pts1, true, new Scalar(0, 255, 255), 2); Point[] pt3 = new Point[] { new Point(700, 500), new Point(800, 500), new Point(700, 600) }; Point[][] pts2 = new Point[][] { pt3 }; Cv2.FillPoly(img, pts2, new Scalar(255, 0, 255), LineTypes.AntiAlias); Cv2.PutText(img, "OpenCV", new Point(900, 600), HersheyFonts.HersheyComplex | HersheyFonts.Italic, 2.0, new Scalar(255, 255, 255), 3); Cv2.ImShow("img", img); Cv2.WaitKey(0); Cv2.DestroyAllWindows(); }
/// <summary> /// /// </summary> /// <param name="cascade"></param> /// <returns></returns> private Mat DetectFace(CascadeClassifier cascade, Texture2D t) { src = Mat.FromImageData(t.EncodeToPNG(), ImreadModes.Color); result = src.Clone(); Cv2.CvtColor(src, gray, ColorConversionCodes.BGR2GRAY); src = null; // Detect faces faces = cascade.DetectMultiScale(gray, 1.08, 2, HaarDetectionType.ScaleImage, new Size(30, 30)); // Render all detected faces for (int i = 0; i < faces.Length; i++) { center.X = (int)(faces[i].X + faces[i].Width * 0.5); center.Y = (int)(faces[i].Y + faces[i].Height * 0.5); axes.Width = (int)(faces[i].Width * 0.5); axes.Height = (int)(faces[i].Height * 0.5); Cv2.Ellipse(result, center, axes, 0, 0, 360, new Scalar(255, 0, 255), 4); } return(result); }
private byte[] ImageCompleted(MemoryStream ms) { Mat src = OpenCvSharp.Extensions.BitmapConverter.ToMat(new System.Drawing.Bitmap(ms)); Mat gray = new Mat(); CascadeClassifier haarCascade = new CascadeClassifier("./haarcascade_frontalface_default.xml"); var result = src.Clone(); Cv2.CvtColor(src, gray, ColorConversion.BgrToGray); // 顔検出 OpenCvSharp.CPlusPlus.Rect[] faces = haarCascade.DetectMultiScale( gray); // 検出した顔の位置に円を描画 foreach (OpenCvSharp.CPlusPlus.Rect face in faces) { var center = new OpenCvSharp.CPlusPlus.Point { X = (int)(face.X + face.Width * 0.5), Y = (int)(face.Y + face.Height * 0.5) }; var axes = new OpenCvSharp.CPlusPlus.Size { Width = (int)(face.Width * 0.5), Height = (int)(face.Height * 0.5) }; Cv2.Ellipse(result, center, axes, 0, 0, 360, new Scalar(255, 0, 255), 2); } System.Drawing.Bitmap dstBitmap = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(result); byte[] imageData = null; using (MemoryStream ms2 = new MemoryStream()) { dstBitmap.Save(ms2, System.Drawing.Imaging.ImageFormat.Jpeg); imageData = ms2.GetBuffer(); } return(imageData); }
public static void DrawSquares(Mat image, List <List <Point> > squares) { for (int i = 0; i < squares.Count; i++) { Point p = squares[i][0]; int n = (int)squares[i].Count; int shift = 1; Rect r = Cv2.BoundingRect(InputArray.Create <Point>(squares[i])); r.X = r.X + r.Width / 4; r.Y = r.Y + r.Height / 4; r.Width = r.Width / 2; r.Height = r.Height / 2; Mat roi = new Mat(image, r); Scalar color = Scalar.Red;// Cv2.Mean(roi); Cv2.Polylines(image, squares, true, color, 2, LineTypes.AntiAlias, shift); var center = new Point(r.X + r.Width / 2, r.Y + r.Height / 2); Cv2.Ellipse(image, center, new Size(r.Width / 2, r.Height / 2), 0, 0, 360, color, 2, LineTypes.AntiAlias); } }
static bool Whitenoise(Mat Src, OpenCvSharp.Point[] contour) { OpenCvSharp.Point[][] temp = new Point[1][]; Mat now_defect_image = Mat.Zeros(Src.Size(), MatType.CV_8UC1); OpenCvSharp.Point[] approx = Cv2.ApproxPolyDP(contour, 0.000, true); temp[0] = approx; Cv2.DrawContours(now_defect_image, temp, -1, 255, -1); //畫出外包矩形 RotatedRect BoundingRectangle = Cv2.MinAreaRect(approx); Mat mask_image = Mat.Zeros(Src.Size(), MatType.CV_8UC1); Cv2.Ellipse(mask_image, BoundingRectangle, 255, -1, LineTypes.AntiAlias); //Console.WriteLine(BoundingRectangle.Size.Height* BoundingRectangle.Size.Width); //面積太大 一定不是white noise if (BoundingRectangle.Size.Height * BoundingRectangle.Size.Width > 700) { return(false); } double mean_in_area = 0, min_in_area = 0, max_in_area = 0; mean_in_area = Src.Mean(mask_image)[0]; Src.MinMaxLoc(out min_in_area, out max_in_area, out _, out _, mask_image); //Console.WriteLine("mean: " + mean_in_area + " min: "+ min_in_area + " max: " + max_in_area); //mask_image.SaveImage("./contour2.jpg"); if (mean_in_area > 130) { return(true); } else { return(false); } }
public Mat NormalizePosition(Mat src, Rect faceRect, List <IDictionary <FacePart, IEnumerable <Point> > > landmarks = null) { if (landmarks == null) { landmarks = GetLandmarks(src); } var points = new List <Point>(); foreach (var landmark in landmarks.First()) { foreach (var p in landmark.Value.ToArray()) { points.Add(p); } } var innerEyesAndBottomLip = new int[3]; var bml = landmarks.First()[FacePart.BottomLip].First(v => v.X == landmarks.First()[FacePart.BottomLip].ElementAt(0).X); innerEyesAndBottomLip[2] = Enumerable.Select(points, (p, i) => new { p, i }) .Where((v, i) => v.p.X == bml.X && v.p.Y == bml.Y) .Select(v => v.i).First(); var el = landmarks.First()[FacePart.LeftEye] .First(v => v.X == landmarks.First()[FacePart.LeftEye].Min(p => p.X)); innerEyesAndBottomLip[0] = Enumerable.Select(points, (p, i) => new { p, i }) .Where((v, i) => v.p.X == el.X && v.p.Y == el.Y) .Select(v => v.i).First(); var er = landmarks.First()[FacePart.RightEye] .First(v => v.X == landmarks.First()[FacePart.RightEye].Max(p => p.X)); innerEyesAndBottomLip[1] = Enumerable.Select(points, (p, i) => new { p, i }) .Where((v, i) => v.p.X == er.X && v.p.Y == er.Y) .Select(v => v.i).First(); #if DEV_MODE foreach (var i in innerEyesAndBottomLip) { Cv2.Ellipse(src, new RotatedRect(new Point2f(points[i].X, points[i].Y), new Size2f(5, 5), 0), Scalar.Red); } #endif var p1 = points.Where((p, i) => innerEyesAndBottomLip.Contains(i)); var p2 = new[] { new Point2f(MINMAX_TEMPLATE[innerEyesAndBottomLip[0], 0], MINMAX_TEMPLATE[innerEyesAndBottomLip[0], 1]), new Point2f(MINMAX_TEMPLATE[innerEyesAndBottomLip[1], 0], MINMAX_TEMPLATE[innerEyesAndBottomLip[1], 1]), new Point2f(MINMAX_TEMPLATE[innerEyesAndBottomLip[2], 0], MINMAX_TEMPLATE[innerEyesAndBottomLip[2], 1]), }; var h = Cv2.GetAffineTransform(p1.Select(p => new Point2f((p.X), (p.Y))), p2.Select(p => new Point2f(p.X * faceRect.Width, p.Y * faceRect.Height))); var ret = new Mat(); Cv2.WarpAffine(src, ret, h, new Size(faceRect.Width, faceRect.Height), InterpolationFlags.Linear, BorderTypes.Constant); return(ret); }
private Mat MoveDetect(Mat frame1, Mat frame2) { Mat result = frame2.Clone(); var gray1 = new Mat(); var gray2 = new Mat(); Cv2.CvtColor(frame1, gray1, ColorConversionCodes.BGR2GRAY); Cv2.CvtColor(frame2, gray2, ColorConversionCodes.BGR2GRAY); var diff = new Mat(); Cv2.Absdiff(gray1, gray2, diff); Cv2.ImShow("absdiss", diff); Cv2.Threshold(diff, diff, 45, 255, ThresholdTypes.Binary); Cv2.ImShow("threshold", diff); Mat element = Cv2.GetStructuringElement(MorphShapes.Rect, new Size(3, 3)); Mat element2 = Cv2.GetStructuringElement(MorphShapes.Rect, new Size(25, 25)); Cv2.Erode(diff, diff, element); Cv2.ImShow("erode", diff); Cv2.Dilate(diff, diff, element2); Cv2.ImShow("dilate", diff); Point[][] contours = null; //vector<Vec4i> hierarcy; HierarchyIndex[] hierarcy = null; //画椭圆及中心 Cv2.FindContours(diff, out contours, out hierarcy, RetrievalModes.External, ContourApproximationModes.ApproxNone); //cout << "num=" << contours.size() << endl; //contours.Count; int X = contours.GetLength(0); List <RotatedRect> box = new List <RotatedRect>(); //for (int i = 0; i < X; i++) //{ // box[i] = Cv2.FitEllipse(new Mat(contours[i])); // Cv2.Ellipse(result, box[i], new Scalar(0, 255, 0), 2, 8); // Cv2.Circle(result, box[i].center, 3, new Scalar(0, 0, 255), -1, 8); //} foreach (Point[] pts in contours) { var box1 = Cv2.FitEllipse(pts); Cv2.Ellipse(result, box1, new Scalar(0, 255, 0), 2); Point point; point.X = (int)box1.Center.X; point.Y = (int)box1.Center.X; //Cv2.Circle(result, (int)box1.Center.X, (int)box1.Center.X, 1, -1, 8); result.Circle(point, 1, -1); //Scalar color = Scalar.RandomColor(); //foreach (Point p in pts) //{ // result.Circle(p,1, color); //} } return(result); }
public static void DrawEllipse(WriteableBitmap Image, System.Windows.Point a, System.Windows.Point b, Color c, int thickness, Model.DrawMode drawMode) { Mat m = new Mat(Image.PixelHeight, Image.PixelWidth, MatType.CV_8UC4, Image.BackBuffer); Cv2.Ellipse(m, new RotatedRect(new Point2f((float)(a.X + b.X) / 2, (float)(a.Y + b.Y) / 2), new Size2f(Math.Abs(a.X - b.X), Math.Abs(a.Y - b.Y)), 0), new Scalar(c.B, c.G, c.R, c.A), thickness, (LineTypes)(1 << (int)(drawMode + 2))); }
public void GetDiffFrame(int width, int height, out double[] buf) { buf = new double[width * height]; var frame = new Mat(); var diff = new Mat(); var rotatedrect = new RotatedRect(); if (capture.Read(frame)) { frame = frame.Resize(new Size(width, height)); Cv2.CvtColor(frame, frame, ColorConversionCodes.BGR2GRAY); if (PrevFrame != null) { Cv2.Absdiff(frame, PrevFrame, diff); double weight = 1; Mat[] contours; for (int r = 0; r < 2; r++) { Cv2.Threshold(diff, diff, byte.MaxValue / 8, byte.MaxValue, ThresholdTypes.Otsu); var nonzerocnt = Cv2.CountNonZero(diff); weight = (0.25 - ((double)nonzerocnt) / (width * height)) / (0.25); weight = weight < 0 ? 0 : weight; if (weight > 0.5) { Mat h = new Mat(); Cv2.FindContours(diff, out contours, new Mat(), RetrievalModes.External, ContourApproximationModes.ApproxTC89KCOS); diff = new Mat(new Size(width, height), MatType.CV_8UC1, new Scalar(0)); if (contours.Length > 0) { var areaave = contours.Average(x => Cv2.ContourArea(x)); for (int i = 0; i < contours.Length; i++) { if (Cv2.ContourArea(contours[i]) > areaave) { Cv2.DrawContours(diff, contours, i, new Scalar(byte.MaxValue), -1); } } } } else { diff = new Mat(new Size(width, height), MatType.CV_8UC1, new Scalar(0)); } } Point[][] contourspoint; HierarchyIndex[] hierarchyIndexes; Cv2.FindContours(diff.Clone(), out contourspoint, out hierarchyIndexes, RetrievalModes.External, ContourApproximationModes.ApproxTC89KCOS); if (contourspoint.Length > 0) { var points = new List <Point>(); for (int idx_cnt = 0; idx_cnt < contourspoint.GetLength(0); ++idx_cnt) { if (hierarchyIndexes[idx_cnt].Parent != -1) { continue; } points.AddRange(contourspoint[idx_cnt]); } if (points.Count > 5) { diff = new Mat(new Size(width, height), MatType.CV_8UC1, new Scalar(0)); rotatedrect = Cv2.FitEllipse(points); float rho = 0.25f; rotatedrect.Angle = (rho * rotatedrect.Angle + (1 - rho) * PrevRect.Angle); rotatedrect.Size.Width = (rho * rotatedrect.Size.Width + (1 - rho) * PrevRect.Size.Width); rotatedrect.Size.Height = (rho * rotatedrect.Size.Height + (1 - rho) * PrevRect.Size.Height); Cv2.Ellipse(diff, rotatedrect, new Scalar(byte.MaxValue), -1); } } double w = 0.8; Cv2.AddWeighted(PrevDiffFrame, w, diff, 1 - w, 0, diff); Mat result = diff.Clone(); //Cv2.Threshold(diff, result, byte.MaxValue / 8, byte.MaxValue, ThresholdTypes.Binary); Cv2.Dilate(result, result, new Mat(), new Point(-1, -1), 8); //frame.CopyTo(result, result); unsafe { byte *rslt = (byte *)result.Data; byte *f = (byte *)frame.Data; for (int i = 0; i < width * height; i++) { double r = (double)rslt[i] / byte.MaxValue; if (r > 0.25) { buf[i] = ((double)f[i] / byte.MaxValue) + 0.25; } } } } if (PrevFrame == null) { PrevFrame = frame.Clone(); PrevDiffFrame = new Mat(PrevFrame.Size(), PrevFrame.Type(), new Scalar(0)); PrevRect = new RotatedRect(); } else { double weight = 0.5; Cv2.AddWeighted(PrevFrame, weight, frame, 1 - weight, 0, PrevFrame); PrevDiffFrame = diff.Clone(); PrevRect = rotatedrect; } } }
static void Stop1_Detect(Mat Src, string fileindex) { int OK_NG_flag = 0; Mat Src_copy = Mat.Zeros(Src.Size(), MatType.CV_8UC1); Src.CopyTo(Src_copy); Mat vis_rgb = Src.CvtColor(ColorConversionCodes.GRAY2RGB); //Console.WriteLine(vis_rgb.Size()+" "+vis_rgb.Channels()); var watch = new System.Diagnostics.Stopwatch(); watch.Start(); //========================== preprocessing to extract the ROI =============================== //mask the inner part noise of Src //會把Src的ROI切出來(圓的外面和裡面切掉)存回ROI, return 內外圓輪廓 List <Point[]> contours_final = Mask_innercicle(ref Src); //========================= 找contour =================================================== //Find outer defect return 應該要畫的區域 int nLabels_outer = 0;//number of labels int[,] stats_outer = null; FindContour_and_outer_defect(Src, contours_final, ref nLabels_outer, out stats_outer, "outer"); int nLabels_inner = 0;//number of labels int[,] stats_inner = null; FindContour_and_outer_defect(Src, contours_final, ref nLabels_inner, out stats_inner, "inner"); //======================================================================================================= var ellipsecontour = Cv2.FitEllipse(contours_final[0]); Cv2.Ellipse(vis_rgb, ellipsecontour, Scalar.Red, 2); //====================Adaptive threshold inner defect============================================== //List<Point[][]> Apaptive_Defect = AdaptiveThreshold_Based_Extract_Defect(Src, contours_final); List <Point[][]> canny_defect = canny_test(Src, contours_final, fileindex); foreach (Point[][] temp in canny_defect) { Cv2.DrawContours(vis_rgb, temp, -1, Scalar.Red, -1); OK_NG_flag = 1; } /* * //===================================================================================================== * //=================image_crop * * var biggestContourRect = Cv2.BoundingRect(contours_final[0]); * var expand_rect = new Rect(biggestContourRect.TopLeft.X - 200, biggestContourRect.TopLeft.Y - 200, biggestContourRect.Width + 200, biggestContourRect.Height + 200); * Src = new Mat(Src, expand_rect); * OpenCvSharp.Point offset_bounding_rec = expand_rect.TopLeft; * //output_mat.SaveImage("rec.jpg"); * //============================= * * //==========1================================================================ * List<Point[][]> MSER_Big = My_MSER(8, 800, 20000, 4, Src, ref vis_rgb, 1);// 8 3 * List<Point[][]> MSER_Small = My_MSER(6, 120, 800, 4, Src, ref vis_rgb, 0);//6 2.5 * * //OK or NG * // draw outer defect by stats * * foreach(Point[][] temp in MSER_Big) * { * Cv2.DrawContours(vis_rgb, temp, -1, Scalar.Red, 3,offset: offset_bounding_rec); * OK_NG_flag = 1; * } * foreach (Point[][] temp in MSER_Small) * { * Cv2.DrawContours(vis_rgb, temp, -1, Scalar.Red, 3, offset: offset_bounding_rec); * OK_NG_flag = 1; * } * * foreach (Point[][] temp in Apaptive_Defect) * { * Cv2.DrawContours(vis_rgb, temp, -1, Scalar.Blue, 3); * OK_NG_flag = 1; * } */ //inner 毛邊 for (int i = 0; i < nLabels_inner; i++) { int area = stats_inner[i, 4]; if (area < 200000 && area < stop1_out_defect_size_max && area > stop1_out_defect_size_min) { //Console.WriteLine(area); vis_rgb.Rectangle(new Rect(stats_inner[i, 0], stats_inner[i, 1], stats_inner[i, 2], stats_inner[i, 3]), Scalar.Green, 3); OK_NG_flag = 1; } } //outer 毛邊 for (int i = 0; i < nLabels_outer; i++) { int area = stats_outer[i, 4]; if (area < 200000 && area < stop1_out_defect_size_max && area > stop1_out_defect_size_min) { //Console.WriteLine(area); vis_rgb.Rectangle(new Rect(stats_outer[i, 0], stats_outer[i, 1], stats_outer[i, 2], stats_outer[i, 3]), Scalar.Green, 3); OK_NG_flag = 1; } } Console.WriteLine(OK_NG_flag == 1 ? "NG" : "OK"); //Src.SaveImage("./result/test" + fileindex + ".jpg"); if (OK_NG_flag == 1) { vis_rgb.SaveImage("./result/NG/test" + fileindex); NG_count++; } else { vis_rgb.SaveImage("./result/OK/test" + fileindex); OK_count++; } //================================================================== watch.Stop(); //印出時間 Console.WriteLine($"Execution Time: {watch.ElapsedMilliseconds} ms"); }
public Mat Iso11146(Mat img, Mat dst) { Cv2.Resize(img, img, new OpenCvSharp.Size(960, 687), 0, 0, InterpolationFlags.Linear); Cv2.Resize(dst, dst, new OpenCvSharp.Size(960, 687), 0, 0, InterpolationFlags.Linear); OpenCvSharp.Point[][] contours; HierarchyIndex[] hierarchy; Cv2.Threshold(img, img, 50, 255, ThresholdTypes.Binary); Cv2.FindContours(img, out contours, out hierarchy, RetrievalModes.External, ContourApproximationModes.ApproxTC89L1); foreach (OpenCvSharp.Point[] p in contours) { if (Cv2.ContourArea(p) < 1000) { continue; } Moments moments = Cv2.Moments(p, true); if (moments.M00 != 0) { int cX = (int)(moments.M10 / moments.M00); int cY = (int)(moments.M01 / moments.M00); int cX2 = (int)(moments.Mu20 / moments.M00); int cXY = (int)(moments.Mu11 / moments.M00); int cY2 = (int)(moments.Mu02 / moments.M00); double a = Math.Pow(((cX2 + cY2) + 2 * Math.Abs(cXY)), 0.5); int dX = (int)(2 * Math.Pow(2, 0.5) * Math.Pow(((cX2 + cY2) + 2 * Math.Abs(cXY)), 0.5)); int dY = (int)(2 * Math.Pow(2, 0.5) * Math.Pow(((cX2 + cY2) - 2 * Math.Abs(cXY)), 0.5)); double t; if ((cX2 - cY2) != 0) { t = 2 * cXY / (cX2 - cY2); } else { t = 0; } double theta = 0.5 * Math.Atan(t) * 180; OpenCvSharp.Point center = new OpenCvSharp.Point(cX, cY); OpenCvSharp.Size axis = new OpenCvSharp.Size(dX, dY); Cv2.Circle(dst, cX, cY, 1, Scalar.Black); if (trackingLD1) { if (tracking == false) { tracking = true; if ((cX - (axis_x + axis_scale / 2)) > 10) { itc.getTecCurrSetpoint(0, out temp); itc.setTecCurrSetpoint(temp - 0.005); } else if ((cX - (axis_x + axis_scale / 2)) < -10) { itc.getTecCurrSetpoint(0, out temp); itc.setTecCurrSetpoint(temp + 0.005); } } } if (dX > 0 && dY > 0) { Cv2.Ellipse(dst, center, axis, theta, 0, 360, Scalar.White); } } } return(dst); }
static void Main(string[] args) { try { using var capture = new VideoCapture(0); using var src = new Mat(capture.FrameWidth, capture.FrameHeight, MatType.CV_8UC4); using var dst = new Mat(capture.FrameWidth, capture.FrameHeight, MatType.CV_8UC4); using var gray = new Mat(); using var classifier = new CascadeClassifier("haarcascade_frontalface_alt.xml"); //using var classifier = new CascadeClassifier("haarcascade_frontalface_default.xml"); int sleepTime = (int)Math.Round(1000 / capture.Fps); Rect[] faces; using (var window = new Window("capture")) { const double a = 0.2; var smothX = new Smoth(a); var smothY = new Smoth(a); var smothWd = new Smoth(a); var smothHg = new Smoth(a); bool init = false; while (true) { capture.Read(src); //поиск лиц Cv2.CvtColor(src, gray, ColorConversionCodes.BGR2GRAY); faces = classifier.DetectMultiScale(gray, 1.08, 2, HaarDetectionType.ScaleImage, new Size(30, 30)); //инициализация if (!init && faces.Length > 0) { init = true; smothX.Reset(faces[0].X); smothY.Reset(faces[0].Y); smothWd.Reset(faces[0].Width); smothHg.Reset(faces[0].Height); } //вывод результатов src.CopyTo(dst); foreach (Rect face in faces) { { smothX.AddNewValue(face.X); smothY.AddNewValue(face.Y); smothWd.AddNewValue(face.Width); smothHg.AddNewValue(face.Height); } { var center = new Point { X = (int)(face.X + face.Width * 0.5), Y = (int)(face.Y + face.Height * 0.5) }; var axes = new Size { Width = (int)(face.Width * 0.5), Height = (int)(face.Height * 0.5) }; Cv2.Ellipse(dst, center, axes, 0, 0, 360, new Scalar(255, 0, 0), 1); } } { var x = smothX.Value; var y = smothY.Value; var wd = smothWd.Value; var hg = smothHg.Value; var center = new Point { X = (int)(x + wd * 0.5), Y = (int)(y + hg * 0.5) }; var axes = new Size { Width = (int)(wd * 0.5), Height = (int)(hg * 0.5) }; //Cv2.Ellipse(dst, center, axes, 0, 0, 360, new Scalar(255, 0, 255), 4); Cv2.Ellipse(dst, center, new Size { Width = 5, Height = 5 }, 0, 0, 360, new Scalar(0, 0, 255), 3); } window.ShowImage(dst); //выход if (Cv2.WaitKey(sleepTime) != -1) { break; } } } } catch (Exception ex) { Console.WriteLine(ex.Message, ex.StackTrace); Console.ReadLine(); } }