DetectMultiScale() 공개 메소드

Detects objects of different sizes in the input image. The detected objects are returned as a list of rectangles.
public DetectMultiScale ( OpenCvSharp.Mat image, double scaleFactor = 1.1, int minNeighbors = 3, HaarDetectionType flags, Size minSize = null, Size maxSize = null ) : Rect[]
image OpenCvSharp.Mat Matrix of the type CV_8U containing an image where objects are detected.
scaleFactor double Parameter specifying how much the image size is reduced at each image scale.
minNeighbors int Parameter specifying how many neighbors each candidate rectangle should have to retain it.
flags HaarDetectionType Parameter with the same meaning for an old cascade as in the function cvHaarDetectObjects. /// It is not used for a new cascade.
minSize Size Minimum possible object size. Objects smaller than that are ignored.
maxSize Size Maximum possible object size. Objects larger than that are ignored.
리턴 Rect[]
예제 #1
1
        /// <summary>
        /// 
        /// </summary>
        /// <param name="cascade"></param>
        /// <returns></returns>
        private Mat DetectFace(CascadeClassifier cascade)
        {
            Mat result;

            using (var src = new Mat(FilePath.Image.Yalta, ImreadModes.Color))
            using (var gray = new Mat())
            {
                result = src.Clone();
                Cv2.CvtColor(src, gray, ColorConversionCodes.BGR2GRAY);

                // Detect faces
                Rect[] faces = cascade.DetectMultiScale(
                    gray, 1.08, 2, HaarDetectionType.ScaleImage, new Size(30, 30));

                // Render all detected faces
                foreach (Rect face in faces)
                {
                    var center = new Point
                    {
                        X = (int)(face.X + face.Width * 0.5),
                        Y = (int)(face.Y + face.Height * 0.5)
                    };
                    var axes = new Size
                    {
                        Width = (int)(face.Width * 0.5),
                        Height = (int)(face.Height * 0.5)
                    };
                    Cv2.Ellipse(result, center, axes, 0, 0, 360, new Scalar(255, 0, 255), 4);
                }
            }
            return result;
        }
예제 #2
0
        private int OpenCVFaceDetector(string path)
        {
            // uses openCv Library
            OpenCvSharp.CascadeClassifier faceClassifier = new OpenCvSharp.CascadeClassifier(@"./haarcascade/haarcascade_frontalface_alt.xml");

            OpenCvSharp.Mat result;
            Rect[]          faces = new Rect[0];

            using (var src = new OpenCvSharp.Mat(path, OpenCvSharp.ImreadModes.Color))
                using (var gray = new OpenCvSharp.Mat())
                {
                    result = src.Clone();
                    Cv2.CvtColor(src, gray, ColorConversionCodes.BGR2GRAY);

                    // Detect faces
                    faces = faceClassifier.DetectMultiScale(gray, 1.08, 2, OpenCvSharp.HaarDetectionType.ScaleImage);
                    List <System.Drawing.Rectangle> rfaces = new List <System.Drawing.Rectangle>();
                    foreach (Rect face in faces)
                    {
                        System.Drawing.Rectangle r = new System.Drawing.Rectangle(face.X, face.Y, face.Width, face.Height);
                        this.GetLandmarks(gray, face, rfaces);
                        rfaces.Add(r);
                    }
                    DrawOnImage?.Invoke(rfaces.ToArray(), new System.Drawing.Size(result.Width, result.Height));
                }
            result.Dispose();
            return(faces.Length);
        }
예제 #3
0
        public byte[] Mark(Mat srcImage)
        {
            try
            {
                var grayImage = new Mat();
                Cv2.CvtColor(srcImage, grayImage, ColorConversionCodes.BGRA2GRAY);
                Cv2.EqualizeHist(grayImage, grayImage);

                var cascade = new CascadeClassifier($@"{AppDomain.CurrentDomain.BaseDirectory}/Services/Data/haarcascade_frontalface_alt.xml");

                var faces = cascade.DetectMultiScale(
                    grayImage,
                    1.1,
                    3, HaarDetectionType.DoRoughSearch | HaarDetectionType.ScaleImage
                    );


                if (faces.Length < 1)
                {
                    return(null);
                }

                var face  = faces.FirstOrDefault();
                var image = new Image();

                var file = image.ConvertToByte(srcImage);
                return(Crop(file, face.X, face.Y, face.Width, face.Height));
            }
            catch (Exception e)
            {
                return(null);
            }
        }
예제 #4
0
        public static List <OpenCvSharp.Rect> DetectFaces(OpenCvSharp.Mat image)
        {
            List <OpenCvSharp.Rect> faces = new List <OpenCvSharp.Rect>();
            var facesCascade = HttpContext.Current.Server.MapPath("~/face.xml");

            using (OpenCvSharp.CascadeClassifier face = new OpenCvSharp.CascadeClassifier(facesCascade))
            {
                using (OpenCvSharp.Mat ugray = new OpenCvSharp.Mat())
                {
                    Cv2.CvtColor(image, ugray, ColorConversionCodes.BGRA2GRAY);
                    Cv2.EqualizeHist(ugray, ugray);
                    var facesDetected = face.DetectMultiScale(
                        image: ugray,
                        scaleFactor: 1.1,
                        minNeighbors: 10,
                        flags: HaarDetectionType.DoRoughSearch | HaarDetectionType.ScaleImage,
                        minSize: new OpenCvSharp.Size(20, 20));
                    faces.AddRange(facesDetected);
                }
            }
            return(faces);
        }
예제 #5
0
        private void Init()
        {
            new Thread(() =>
            {
                var eye_casc       = new cv.CascadeClassifier("eye.xml");
                var left_eye_casc  = new cv.CascadeClassifier("left_eye.xml");
                var right_eye_casc = new cv.CascadeClassifier("right_eye.xml");
                var face_casc      = new cv.CascadeClassifier("fface_default.xml");

                cap = new cv.VideoCapture(0);

                while (true)
                {
                    if (released)
                    {
                        break;
                    }
                    var img = new cv.Mat();
                    cap.Read(img);
                    var gray  = img.CvtColor(cv.ColorConversionCodes.BGR2GRAY);
                    var gaus  = gray.AdaptiveThreshold(255, cv.AdaptiveThresholdTypes.GaussianC, cv.ThresholdTypes.Binary, 115, 1);
                    img       = gaus;
                    var faces = face_casc.DetectMultiScale(gray, 1.3, 5);
                    RenderTargetBitmap eyes_lay = null;

                    foreach (var face in faces)
                    {
                        var rect = new cv.Rect(face.Location, face.Size);
                        //img.Rectangle(rect, new cv.Scalar(255, 0, 0));

                        var sub_ing     = gray[rect];
                        var sub_ing_rgb = img[rect];

                        //left eye
                        var eyes  = eye_casc.DetectMultiScale(sub_ing, 1.3, 2);
                        int count = 0;
                        foreach (var eye in eyes)
                        {
                            count++;
                            if (count > 2)
                            {
                                count = 0;
                                break;
                            }
                            var rect_eye = new cv.Rect(eye.Location, eye.Size);

                            if (eye.X + eye.Width < face.Width / 2)
                            {
                                //sub_ing_rgb.Rectangle(rect_eye, new cv.Scalar(0, 255, 0));
                                Dispatcher.Invoke(() =>
                                {
                                    eyes_lay = DrawImg(cv.Extensions.BitmapSourceConverter.ToBitmapSource(img), eye.X + face.X, eye.Y + face.Y, eye.Width, eye.Height, eye_l, scale_w, scale_h);
                                });
                            }
                        }

                        //left eye
                        count = 0;
                        foreach (var eye in eyes)
                        {
                            count++;
                            if (count > 2)
                            {
                                break;
                            }
                            var rect_eye = new cv.Rect(eye.Location, eye.Size);

                            if (eye.X + eye.Width > face.Width / 2)
                            {
                                Dispatcher.Invoke(() =>
                                {
                                    if (eyes_lay != null)
                                    {
                                        eyes_lay = DrawImg(eyes_lay, eye.X + face.X, eye.Y + face.Y, eye.Width, eye.Height, eye_r, scale_w, scale_h);
                                    }
                                    else
                                    {
                                        eyes_lay = DrawImg(cv.Extensions.BitmapSourceConverter.ToBitmapSource(img), eye.X + face.X, eye.Y + face.Y, eye.Width, eye.Height, eye_r, scale_w, scale_h);
                                    }
                                });
                            }
                        }
                    }



                    Dispatcher.Invoke(() =>
                    {
                        if (eyes_lay != null)
                        {
                            OutImg.Source = eyes_lay;
                        }
                        else
                        {
                            OutImg.Source = cv.Extensions.BitmapSourceConverter.ToBitmapSource(img);
                        }
                    });
                    //Thread.Sleep(100);
                    GC.Collect();
                }
            })
            {
                IsBackground = true
            }.Start();
        }