/// <summary> /// /// </summary> /// <param name="cascade"></param> /// <returns></returns> private Mat DetectFace(CascadeClassifier cascade) { Mat result; using (var src = new Mat(FilePath.Image.Yalta, ImreadModes.Color)) using (var gray = new Mat()) { result = src.Clone(); Cv2.CvtColor(src, gray, ColorConversionCodes.BGR2GRAY); // Detect faces Rect[] faces = cascade.DetectMultiScale( gray, 1.08, 2, HaarDetectionType.ScaleImage, new Size(30, 30)); // Render all detected faces foreach (Rect face in faces) { var center = new Point { X = (int)(face.X + face.Width * 0.5), Y = (int)(face.Y + face.Height * 0.5) }; var axes = new Size { Width = (int)(face.Width * 0.5), Height = (int)(face.Height * 0.5) }; Cv2.Ellipse(result, center, axes, 0, 0, 360, new Scalar(255, 0, 255), 4); } } return result; }
private int OpenCVFaceDetector(string path) { // uses openCv Library OpenCvSharp.CascadeClassifier faceClassifier = new OpenCvSharp.CascadeClassifier(@"./haarcascade/haarcascade_frontalface_alt.xml"); OpenCvSharp.Mat result; Rect[] faces = new Rect[0]; using (var src = new OpenCvSharp.Mat(path, OpenCvSharp.ImreadModes.Color)) using (var gray = new OpenCvSharp.Mat()) { result = src.Clone(); Cv2.CvtColor(src, gray, ColorConversionCodes.BGR2GRAY); // Detect faces faces = faceClassifier.DetectMultiScale(gray, 1.08, 2, OpenCvSharp.HaarDetectionType.ScaleImage); List <System.Drawing.Rectangle> rfaces = new List <System.Drawing.Rectangle>(); foreach (Rect face in faces) { System.Drawing.Rectangle r = new System.Drawing.Rectangle(face.X, face.Y, face.Width, face.Height); this.GetLandmarks(gray, face, rfaces); rfaces.Add(r); } DrawOnImage?.Invoke(rfaces.ToArray(), new System.Drawing.Size(result.Width, result.Height)); } result.Dispose(); return(faces.Length); }
public byte[] Mark(Mat srcImage) { try { var grayImage = new Mat(); Cv2.CvtColor(srcImage, grayImage, ColorConversionCodes.BGRA2GRAY); Cv2.EqualizeHist(grayImage, grayImage); var cascade = new CascadeClassifier([email protected]"{AppDomain.CurrentDomain.BaseDirectory}/Services/Data/haarcascade_frontalface_alt.xml"); var faces = cascade.DetectMultiScale( grayImage, 1.1, 3, HaarDetectionType.DoRoughSearch | HaarDetectionType.ScaleImage ); if (faces.Length < 1) { return(null); } var face = faces.FirstOrDefault(); var image = new Image(); var file = image.ConvertToByte(srcImage); return(Crop(file, face.X, face.Y, face.Width, face.Height)); } catch (Exception e) { return(null); } }
/* public TargetDetector(string fileName) { Cascade = new CascadeClassifier(fileName); Scale = 1.04; ScaleFactor = 1.3; MinNeighbors = 2; }*/ public TargetDetector(Mat mask) { Cascade = new CascadeClassifier(App.FaceCascadeName); EyeCascade = new CascadeClassifier(App.EyeCascadeName); SetMask(mask); Scale = 1.04; ScaleFactor = 1.3; MinNeighbors = 2; }
public void Run() { // Load the cascades var haarCascade = new CascadeClassifier(FilePath.Text.HaarCascade); var lbpCascade = new CascadeClassifier(FilePath.Text.LbpCascade); // Detect faces Mat haarResult = DetectFace(haarCascade); Mat lbpResult = DetectFace(lbpCascade); Cv2.ImShow("Faces by Haar", haarResult); Cv2.ImShow("Faces by LBP", lbpResult); Cv2.WaitKey(0); Cv2.DestroyAllWindows(); }
public static List <OpenCvSharp.Rect> DetectFaces(OpenCvSharp.Mat image) { List <OpenCvSharp.Rect> faces = new List <OpenCvSharp.Rect>(); var facesCascade = HttpContext.Current.Server.MapPath("~/face.xml"); using (OpenCvSharp.CascadeClassifier face = new OpenCvSharp.CascadeClassifier(facesCascade)) { using (OpenCvSharp.Mat ugray = new OpenCvSharp.Mat()) { Cv2.CvtColor(image, ugray, ColorConversionCodes.BGRA2GRAY); Cv2.EqualizeHist(ugray, ugray); var facesDetected = face.DetectMultiScale( image: ugray, scaleFactor: 1.1, minNeighbors: 10, flags: HaarDetectionType.DoRoughSearch | HaarDetectionType.ScaleImage, minSize: new OpenCvSharp.Size(20, 20)); faces.AddRange(facesDetected); } } return(faces); }
/// <summary> /// Processor initializer /// <param name="facesCascadeData">String with cascade XML for face detection, must be defined</param> /// <param name="eyesCascadeData">String with cascade XML for eyes detection, can be null</param> /// <param name="shapeData">Binary data with trained shape predictor for 68-point face landmarks recognition, can be empty or null</param> /// </summary> public virtual void Initialize(string facesCascadeData, string eyesCascadeData, byte[] shapeData = null) { // face detector - the key thing here if (null == facesCascadeData || facesCascadeData.Length == 0) { throw new Exception( "FaceProcessor.Initialize: No face detector cascade passed, with parameter is required"); } FileStorage storageFaces = new FileStorage(facesCascadeData, FileStorage.Mode.Read | FileStorage.Mode.Memory); cascadeFaces = new CascadeClassifier(); if (!cascadeFaces.Read(storageFaces.GetFirstTopLevelNode())) { throw new Exception("FaceProcessor.Initialize: Failed to load faces cascade classifier"); } // eyes detector if (null != eyesCascadeData) { FileStorage storageEyes = new FileStorage(eyesCascadeData, FileStorage.Mode.Read | FileStorage.Mode.Memory); cascadeEyes = new CascadeClassifier(); if (!cascadeEyes.Read(storageEyes.GetFirstTopLevelNode())) { throw new Exception("FaceProcessor.Initialize: Failed to load eyes cascade classifier"); } } // shape detector if (null != shapeData && shapeData.Length > 0) { shapeFaces = new ShapePredictor(); shapeFaces.LoadData(shapeData); } }
private void Init() { new Thread(() => { var eye_casc = new cv.CascadeClassifier("eye.xml"); var left_eye_casc = new cv.CascadeClassifier("left_eye.xml"); var right_eye_casc = new cv.CascadeClassifier("right_eye.xml"); var face_casc = new cv.CascadeClassifier("fface_default.xml"); cap = new cv.VideoCapture(0); while (true) { if (released) { break; } var img = new cv.Mat(); cap.Read(img); var gray = img.CvtColor(cv.ColorConversionCodes.BGR2GRAY); var gaus = gray.AdaptiveThreshold(255, cv.AdaptiveThresholdTypes.GaussianC, cv.ThresholdTypes.Binary, 115, 1); img = gaus; var faces = face_casc.DetectMultiScale(gray, 1.3, 5); RenderTargetBitmap eyes_lay = null; foreach (var face in faces) { var rect = new cv.Rect(face.Location, face.Size); //img.Rectangle(rect, new cv.Scalar(255, 0, 0)); var sub_ing = gray[rect]; var sub_ing_rgb = img[rect]; //left eye var eyes = eye_casc.DetectMultiScale(sub_ing, 1.3, 2); int count = 0; foreach (var eye in eyes) { count++; if (count > 2) { count = 0; break; } var rect_eye = new cv.Rect(eye.Location, eye.Size); if (eye.X + eye.Width < face.Width / 2) { //sub_ing_rgb.Rectangle(rect_eye, new cv.Scalar(0, 255, 0)); Dispatcher.Invoke(() => { eyes_lay = DrawImg(cv.Extensions.BitmapSourceConverter.ToBitmapSource(img), eye.X + face.X, eye.Y + face.Y, eye.Width, eye.Height, eye_l, scale_w, scale_h); }); } } //left eye count = 0; foreach (var eye in eyes) { count++; if (count > 2) { break; } var rect_eye = new cv.Rect(eye.Location, eye.Size); if (eye.X + eye.Width > face.Width / 2) { Dispatcher.Invoke(() => { if (eyes_lay != null) { eyes_lay = DrawImg(eyes_lay, eye.X + face.X, eye.Y + face.Y, eye.Width, eye.Height, eye_r, scale_w, scale_h); } else { eyes_lay = DrawImg(cv.Extensions.BitmapSourceConverter.ToBitmapSource(img), eye.X + face.X, eye.Y + face.Y, eye.Width, eye.Height, eye_r, scale_w, scale_h); } }); } } } Dispatcher.Invoke(() => { if (eyes_lay != null) { OutImg.Source = eyes_lay; } else { OutImg.Source = cv.Extensions.BitmapSourceConverter.ToBitmapSource(img); } }); //Thread.Sleep(100); GC.Collect(); } }) { IsBackground = true }.Start(); }