Detect() 공개 메소드

Detect HaarCascade object in the given image, using predefined parameters
public Detect ( Byte>.Image image ) : Emgu.CV.Structure.MCvAvgComp[]
image Byte>.Image The image where the objects are to be detected from
리턴 Emgu.CV.Structure.MCvAvgComp[]
예제 #1
0
        private void Window_Loaded(object sender, RoutedEventArgs e)
        {
            var sourceImage = new Bitmap("C:\\Steve_Wozniak.jpg");

            string haarcascade = "haarcascade_frontalface_default.xml";

            using (HaarCascade face = new HaarCascade(haarcascade))
            {
                var image = new Image<Rgb, Byte>(sourceImage);

                using (var gray = image.Convert<Gray, Byte>())
                {
                    var detectedFaces = face.Detect(
                                            gray,
                                            1.1,
                                            10,
                                            Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                                            new System.Drawing.Size(20, 20));

                    var firstFace = detectedFaces[0];
                    System.Drawing.Bitmap bmpImage = image.Bitmap;
                    System.Drawing.Bitmap bmpCrop = bmpImage.Clone(firstFace.rect,
                                                                    bmpImage.PixelFormat);

                    var cropedImage = new Image<Rgb, Byte>(bmpCrop);

                    MainImage.Source = ToBitmapSource(sourceImage);
                    DetectedFaceImage.Source = ToBitmapSource(cropedImage.Bitmap);
                }
            }
        }
예제 #2
0
        public static Image<Gray, byte> DetectAndTrimFace(int[] pixels, Size initialSize, Size outputSize, String haarcascadePath)
        {
            var inBitmap = ConvertToBitmap(pixels, initialSize.Width, initialSize.Width);

            //for testing purposes I can the picture to a folder
            //inBitmap.Save(@"E:\data\phototest\received.bmp");

            var grayframe = new Image<Gray, byte>(inBitmap);

            var haar = new HaarCascade(haarcascadePath);
            var faces = haar.Detect(grayframe,
                1.2,
                3,
                HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                new Size(30, 30));

            if (faces.Count() != 1)
            {
                return null;
            }
            var face = faces[0];

            var returnImage = grayframe.Copy(face.rect).Resize(outputSize.Width, outputSize.Height, INTER.CV_INTER_CUBIC);

            //cleanup managed resources
            haar.Dispose();
            grayframe.Dispose();

            return returnImage;
        }
예제 #3
0
        protected virtual MCvAvgComp[] GetFacesVector(Image<Gray, byte> grayImage)
        {
            var haarCascade = new HaarCascade(this.HaarCascadePath);

            return haarCascade.Detect(grayImage,
                this.ScanFactor, //the image where the object are to be detected from
                this.Neighbours, //factor by witch the window is scaled in subsequent scans
                HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, //min number of neighbour rectangles
                Size.Empty,
                Size.Empty);
        }
예제 #4
0
        public static Rectangle[] Detect(Image<Bgr, Byte> image, string cascadeFile,
            double scaleFactor = 1.3, int minNeighbors = 10,
            Emgu.CV.CvEnum.HAAR_DETECTION_TYPE detectionType = Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
            int minSize = 20, int maxSize = 0)
        {
            string cascadeFilePath = CascadeManager.GetCascade(cascadeFile);

            Size minimumSize;
            if (minSize == 0)
            {
                minimumSize = Size.Empty;
            }
            else
            {
                minimumSize = new Size(minSize, minSize);
            }

            Size maximumSize;
            if (maxSize == 0)
            {
                maximumSize = Size.Empty;
            }
            else
            {
                maximumSize = new Size(maxSize, maxSize);
            }

            if (GpuInvoke.HasCuda)
            {
                using (GpuCascadeClassifier cascade = new GpuCascadeClassifier(cascadeFilePath))
                using (GpuImage<Bgr, Byte> gpuImage = new GpuImage<Bgr, byte>(image))
                using (GpuImage<Gray, Byte> gpuGray = gpuImage.Convert<Gray, Byte>())
                {
                    return cascade.DetectMultiScale(gpuGray, scaleFactor, minNeighbors, minimumSize);
                }
            }
            else
            {
                using (HaarCascade cascade = new HaarCascade(cascadeFilePath))
                using (Image<Gray, Byte> gray = image.Convert<Gray, Byte>())
                {
                    gray._EqualizeHist();

                    MCvAvgComp[] detected = cascade.Detect(gray,
                        scaleFactor, minNeighbors,
                        detectionType,
                        minimumSize, maximumSize);

                    return (from x in detected
                            select x.rect).ToArray();
                }
            }
        }
        /// <summary>
        /// Run over directory, get images, detech faces, resize and storeagain
        /// </summary>
        /// <param name="directory"></param>
        public static void CreateTrainingSet(String mainDirectory, int newSize, bool equalize, int rotation, bool flip)
        {
            string[] subdirEntries = Directory.GetDirectories(mainDirectory);
            foreach (var directory in subdirEntries)
            {
                string[] fileEntries = Directory.GetFiles(directory);
                foreach (var file in fileEntries.Where(x=>(!x.Contains("_"))))
                {
                    Image<Gray, byte> image = new Image<Gray, byte>(file);

                    //the images are big - reduce the size to the half
                    image = image.Resize(0.5, INTER.CV_INTER_CUBIC);

                    var haar = new HaarCascade(FileAccessUtil.GetHaarCascade());
                    var faces = haar.Detect(image);

                    if (faces.Count() == 1)
                    {
                        var face = faces[0];

                        //resize all images to 100
                        var faceImg = image.Copy(face.rect).Resize(newSize, newSize, INTER.CV_INTER_CUBIC);
                        String imgName = file.Insert(file.IndexOf("."), "_" + newSize.ToString());
                        if (equalize)
                        {
                            imgName = imgName.Insert(file.IndexOf("."), "_N");
                            var equalized = EqualizeHist(faceImg);
                            faceImg = equalized;

                        }

                        faceImg.Save(imgName);

                        //create rotated image if it was demanded
                        if (rotation != 0)
                        {
                            var rotated = faceImg.Rotate(rotation, new Gray(0.3));
                            var rotatedName = imgName.Insert(file.IndexOf("."), "_R");
                            rotated.Save(rotatedName);
                        }

                        if (flip)
                        {
                            var fliped = faceImg.Flip(FLIP.HORIZONTAL);
                            var flipedName = imgName.Insert(file.IndexOf("."), "_F");
                            fliped.Save(flipedName);
                        }
                    }
                }
            }
        }
        protected MCvAvgComp[] DetectVarious(Image<Gray, byte> grayImage)
        {
            MCvAvgComp[] eyes = null;

            foreach (var haarcascade in this.HaarCascadePaths)
            {
                var haarCascade = new HaarCascade(haarcascade);
                eyes = haarCascade.Detect(grayImage,
                    this.ScanFactor, //the image where the object are to be detected from
                    this.Neighbours, //factor by witch the window is scaled in subsequent scans
                    HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, //min number of neighbour rectangles
                    Size.Empty,
                    Size.Empty);

                if (eyes.Any()) break;
            }

            return eyes;
        }
예제 #7
0
        public static Image<Gray, byte> DetectAndTrimFace(int[] pixels, int initialSize, int outputSize)
        {
            var inBitmap = ConvertToBitmap(pixels, initialSize);
            //inBitmap.Save(@"E:\data\phototest\received.bmp");
            var grayframe = new Image<Gray, byte>(inBitmap);

            var haar = new HaarCascade(GetHaarCascade());
            var faces = haar.Detect(grayframe,
                1.2,
                3,
                HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                new Size(30, 30));

            if (faces.Count() != 1)
            {
                return null;
            }
            var face = faces[0];

            var returnImage = grayframe.Copy(face.rect).Resize(outputSize, outputSize, INTER.CV_INTER_CUBIC);

            return returnImage;
        }
예제 #8
0
        /// <summary>
        /// Adding to face database new faces - background worker
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        private void addImagesWorker_DoWork( object sender, System.ComponentModel.DoWorkEventArgs e )
        {
            String dir = (String)e.Argument;
            FileInfo [] files = new DirectoryInfo(dir).GetFiles("*.jpg");

            var haar = new HaarCascade("haarcascade_frontalface_default.xml");
            int count = 0;

            foreach (FileInfo file in files)
            {
            var image = new Image<Gray, Byte>(dir + "\\" + file.Name);
            Image<Gray, byte> img = image.Convert<Gray, byte>();
            var faces = haar.Detect(img, 1.4, 4, HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                                    new Size(img.Width / 8, img.Height / 8));

            Rectangle rect = new Rectangle();

            if (faces.Length > 0)
            {
                rect = faces [0].rect;

                for (int i = 1; i < faces.Length; i++)
                {
                    if (faces [i].rect.Width > rect.Width &&
                        faces [i].rect.Height > rect.Height)
                    {
                        rect = faces [i].rect;
                    }
                }//find the biggest face in detected

                img.ROI = rect;
                Image<Gray, Byte> face = img.Clone();
                Image<Gray, Byte> faceResized = face.Resize(100, 100,
                                Emgu.CV.CvEnum.INTER.CV_INTER_LINEAR);

                if (faceResized != null)
                {
                    var splitted = file.Name.Substring(0, file.Name.Length - 4).Split(' ');
                    int tmp;
                    if(int.TryParse(splitted[2],out tmp)){
                    faceResized.Save(Learn.createFileName(splitted [0], splitted [1], int.Parse(splitted [2])));
                    }
                }//if resized
            }//if found any face
            count++;
            addImagesWorker.ReportProgress(count, DateTime.Now);
            }//check each jpg file
        }
        public int LoadAndProcessImage(string FileName, Communicator comm)
        {
            int Vote = 0;
            imgOriginal = new Image<Bgr, Byte>(FileName);
            imgGray = imgOriginal.Convert<Gray, Byte>();
            //BitAnalysis.StartDye(0, 0, imgGray.Height, imgGray.Width, imgGray);

            hcHumerus = new HaarCascade(".\\haarHumerus_03112013_4.8_18.xml");
            ibImage.Image = imgBlank;

            acHumerus = hcHumerus.Detect(imgGray,
                        4.8,
                        18,
                        HAAR_DETECTION_TYPE.SCALE_IMAGE,
                        Size.Empty,
                        Size.Empty);
            acHumerus1 = hcHumerus.Detect(imgGray,
                                        4.8,
                                        18,
                                        HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                                        Size.Empty,
                                        Size.Empty);

            int count1 = 0, count2 = 0;
            foreach (MCvAvgComp acHum in acHumerus)
            {
                StartDye(acHum.rect.X, acHum.rect.Y, acHum.rect.Width, acHum.rect.Height, imgGray,comm);
                if (Flag)  // to get coordination x,y, and with, high
                {
                    imgOriginal.Draw(acHum.rect, new Bgr(Color.Blue), 2);
                    count1++;
                    Vote = 1;
                }
                imgGray.Draw(acHum.rect, new Gray(0.0), 1);
            }
            if (count1 ==0)
            {
                foreach (MCvAvgComp acHum1 in acHumerus1)
                {
                    StartDye(acHum1.rect.X, acHum1.rect.Y, acHum1.rect.Width, acHum1.rect.Height, imgGray,comm);
                    if (Flag)  // to get coordination x,y, and with, high
                    {
                        imgOriginal.Draw(acHum1.rect, new Bgr(Color.Red), 2);
                        count2++;
                        Vote = 1;
                    }
                    imgGray.Draw(acHum1.rect, new Gray(0.0), 1);
                }
            }
            if (count1 == 0 &&  count2 == 0 )
            {
                imgGray = imgGray.AddWeighted(imgGray, 1.0, 0.0, 0.0);
                imgGray = imgGray.ThresholdToZero(new Gray(100));
                imgGray = imgGray.SmoothGaussian(9);
                imgGray = imgGray.Canny(0, 80);

                hcHumerus = new HaarCascade(".\\HaarHumerus_03172013_2.8_3.xml");

                acHumerus = hcHumerus.Detect(imgGray,
                   2.8,
                   3,
                    HAAR_DETECTION_TYPE.SCALE_IMAGE,
                    Size.Empty,
                    Size.Empty);
                acHumerus1 = hcHumerus.Detect(imgGray,
                    2.8,
                    3,
                    HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                    Size.Empty,
                    Size.Empty);
                foreach (MCvAvgComp acHum in acHumerus)
                {
                    StartDye(acHum.rect.X, acHum.rect.Y, acHum.rect.Width, acHum.rect.Height, imgGray,comm);
                    if (Flag)  // to get coordination x,y, and with, high
                    {
                        imgOriginal.Draw(acHum.rect, new Bgr(Color.Orange), 2);
                        Vote = 1;
                    }
                    imgGray.Draw(acHum.rect, new Gray(0.0), 1);
                }
                foreach (MCvAvgComp acHum1 in acHumerus1)
                {
                    StartDye(acHum1.rect.X, acHum1.rect.Y, acHum1.rect.Width, acHum1.rect.Height, imgGray,comm);
                    if (Flag)  // to get coordination x,y, and with, high
                    {
                        imgOriginal.Draw(acHum1.rect, new Bgr(Color.Green), 2);
                        Vote = 1;
                    }
                    imgGray.Draw(acHum1.rect, new Gray(), 1);
                }

            }

            return Vote;
        }
예제 #10
0
        public bool LoadAndProcessImage(string FileName)
        {
            bool Broken = false;
            imgOriginal = new Image<Bgr, Byte>(FileName);
            imgGray = imgOriginal.Convert<Gray, Byte>();
            //BitAnalysis.StartDye(0, 0, imgGray.Height, imgGray.Width, imgGray);

            hcHumerus = new HaarCascade(@"c:\haarHumerus_03112013_4.8_18.xml"); //haarHumerus_03112013_4.8_18
            ibImage.Image = imgBlank;

            acHumerus = hcHumerus.Detect(imgGray,
                        4.8,
                        18,
                        HAAR_DETECTION_TYPE.SCALE_IMAGE,
                        Size.Empty,
                        Size.Empty);
            acHumerus1 = hcHumerus.Detect(imgGray,
                                        4.8,
                                        18,
                                        HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                                        Size.Empty,
                                        Size.Empty);

            int count1 = 0, count2 = 0;
            PB.Value += 10;
            foreach (MCvAvgComp acHum in acHumerus)
            {
                if (GetCoordination(acHum))  // to get coordination x,y, and with, high
                {
                    imgOriginal.Draw(acHum.rect, new Bgr(Color.Blue), 2);
                    count1++;
                    Broken = true;
                }
                imgGray.Draw(acHum.rect, new Gray(0.0), 1);
            }
            PB.Value += 10;
            if (count1 == 0)
            {
                foreach (MCvAvgComp acHum1 in acHumerus1)
                {
                    if (GetCoordination(acHum1))  // to get coordination x,y, and with, high
                    {
                        imgOriginal.Draw(acHum1.rect, new Bgr(Color.Red), 2);
                        count2++;
                        Broken = true;
                    }
                    imgGray.Draw(acHum1.rect, new Gray(0.0), 1);
                }
            }
            if (count1 == 0  && count2 == 0 )
            {
                imgGray = imgGray.AddWeighted(imgGray, 1.0, 0.0, 0.0);
                imgGray = imgGray.ThresholdToZero(new Gray(100));
                imgGray = imgGray.SmoothGaussian(9);
                imgGray = imgGray.Canny(0, 80);

                hcHumerus = new HaarCascade(@"c:\HaarHumerus_03172013_2.8_3.xml");

                acHumerus = hcHumerus.Detect(imgGray,
                   2.8,
                   3,
                    HAAR_DETECTION_TYPE.SCALE_IMAGE,
                    Size.Empty,
                    Size.Empty);
                acHumerus1 = hcHumerus.Detect(imgGray,
                    2.8,
                    3,
                    HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                    Size.Empty,
                    Size.Empty);
                foreach (MCvAvgComp acHum in acHumerus)
                {
                    if (GetCoordination(acHum))  // to get coordination x,y, and with, high
                    {
                        imgOriginal.Draw(acHum.rect, new Bgr(Color.Orange), 2);
                        Broken = true;
                    }
                    imgGray.Draw(acHum.rect, new Gray(0.0), 1);
                }
                foreach (MCvAvgComp acHum1 in acHumerus1)
                {

                    if (GetCoordination(acHum1))  // to get coordination x,y, and with, high
                    {
                        imgOriginal.Draw(acHum1.rect, new Bgr(Color.Green), 2);
                        Broken = true;
                    }
                    imgGray.Draw(acHum1.rect, new Gray(), 1);
                }

            }
            PB.Value = +20;

            return Broken;
        }
예제 #11
0
        //cannyEdgeDetect ends
        //Face Detection in image
        private MCvAvgComp[] faceDetection(Image<Bgr, Byte> image)
        {
            Console.Out.WriteLine("Finding faces in the images...");
            grayScale_Image = convertToGrayScale(image);
            //HaarCascade for face detection.
            HaarCascade facehaar;
            try
            {
                //Face is detected by using haar-like structure training. The trained data set is already defined in the XML file
                facehaar = new HaarCascade(@"haarcascade_frontalface_default.xml");
                //Use Canny edge to filter the images.
                return facehaar.Detect(grayScale_Image, 1.1, 4, HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new Size(grayScale_Image.Width / 12, grayScale_Image.Height / 12), new Size(grayScale_Image.Width / 2, grayScale_Image.Height / 2));
            }
            catch (Exception ex) {
                Console.Out.WriteLine("File Not Found for FaceHaarCascade: "+ex.Message);
            }

            return null;
        }
예제 #12
0
        static void Run()
        {
            Image<Bgr, Byte> image = new Image<Bgr, byte>("lena.jpg"); //Read the files as an 8-bit Bgr image

             Stopwatch watch;
             String faceFileName = "haarcascade_frontalface_default.xml";
             String eyeFileName = "haarcascade_eye.xml";

             if (GpuInvoke.HasCuda)
             {
            using (GpuCascadeClassifier face = new GpuCascadeClassifier(faceFileName))
            using (GpuCascadeClassifier eye = new GpuCascadeClassifier(eyeFileName))
            {
               watch = Stopwatch.StartNew();
               using (GpuImage<Bgr, Byte> gpuImage = new GpuImage<Bgr, byte>(image))
               using (GpuImage<Gray, Byte> gpuGray = gpuImage.Convert<Gray, Byte>())
               {
                  Rectangle[] faceRegion = face.DetectMultiScale(gpuGray, 1.1, 10, Size.Empty);
                  foreach (Rectangle f in faceRegion)
                  {
                     //draw the face detected in the 0th (gray) channel with blue color
                     image.Draw(f, new Bgr(Color.Blue), 2);
                     using (GpuImage<Gray, Byte> faceImg = gpuGray.GetSubRect(f))
                     {
                        //For some reason a clone is required.
                        //Might be a bug of GpuCascadeClassifier in opencv
                        using (GpuImage<Gray, Byte> clone = faceImg.Clone())
                        {
                           Rectangle[] eyeRegion = eye.DetectMultiScale(clone, 1.1, 10, Size.Empty);

                           foreach (Rectangle e in eyeRegion)
                           {
                              Rectangle eyeRect = e;
                              eyeRect.Offset(f.X, f.Y);
                              image.Draw(eyeRect, new Bgr(Color.Red), 2);
                           }
                        }
                     }
                  }
               }
               watch.Stop();
            }
             }
             else
             {
            //Read the HaarCascade objects
            using(HaarCascade face = new HaarCascade(faceFileName))
            using(HaarCascade eye = new HaarCascade(eyeFileName))
            {
               watch = Stopwatch.StartNew();
               using (Image<Gray, Byte> gray = image.Convert<Gray, Byte>()) //Convert it to Grayscale
               {
                  //normalizes brightness and increases contrast of the image
                  gray._EqualizeHist();

                  //Detect the faces  from the gray scale image and store the locations as rectangle
                  //The first dimensional is the channel
                  //The second dimension is the index of the rectangle in the specific channel
                  MCvAvgComp[] facesDetected = face.Detect(
                     gray,
                     1.1,
                     10,
                     Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                     new Size(20, 20));

                  foreach (MCvAvgComp f in facesDetected)
                  {
                     //draw the face detected in the 0th (gray) channel with blue color
                     image.Draw(f.rect, new Bgr(Color.Blue), 2);

                     //Set the region of interest on the faces
                     gray.ROI = f.rect;
                     MCvAvgComp[] eyesDetected = eye.Detect(
                        gray,
                        1.1,
                        10,
                        Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                        new Size(20, 20));
                     gray.ROI = Rectangle.Empty;

                     foreach (MCvAvgComp e in eyesDetected)
                     {
                        Rectangle eyeRect = e.rect;
                        eyeRect.Offset(f.rect.X, f.rect.Y);
                        image.Draw(eyeRect, new Bgr(Color.Red), 2);
                     }
                  }
               }
               watch.Stop();
            }
             }

             //display the image
             ImageViewer.Show(image, String.Format(
            "Completed face and eye detection using {0} in {1} milliseconds",
            GpuInvoke.HasCuda ? "GPU": "CPU",
            watch.ElapsedMilliseconds));
        }
예제 #13
0
        public void ProcessFaces()
        {
            this._stopwatch.Reset();
               this._stopwatch.Start();
               // get the current frame from our webcam
               Image<Bgr, Byte> frame = _capture.QueryFrame();
               Image<Gray, Byte> gray;
               //Read the HaarCascade objects
               using (HaarCascade face = new HaarCascade(FACEFILENAME)) {
                    //Convert it to Grayscale
                    gray = frame.Convert<Gray, Byte>();
                    //normalizes brightness and increases contrast of the image
                    gray._EqualizeHist();
                    //Detect the faces  from the gray scale image and store the locations as rectangle
                    //The first dimensional is the channel
                    //The second dimension is the index of the rectangle in the specific channel
                    MCvAvgComp[] facesDetected = face.Detect(
                         gray,
                         1.1,
                         10,
                         Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                         new System.Drawing.Size(20, 20));
                    if (facesDetected != null && facesDetected.Length > 0) {
                         this.notifyIcon.ShowBalloonTip(5000, "Alert!", "Shoulder surfer detected!",ToolTipIcon.Warning);
                         Console.WriteLine("Shoulder surfer detected!");
                    }

               }
               this._stopwatch.Stop();
               Console.WriteLine("Elapsed {0}s", this._stopwatch.Elapsed.TotalSeconds);
        }
예제 #14
0
파일: MainForm.cs 프로젝트: jmdbo/SS
        private void ProcessFrame(object sender, EventArgs e)
        {
            if(viewer!=null && capture != null)
            {
                img = capture.QueryFrame();

                HaarCascade haar = new HaarCascade("../../haarcascade_frontalface_default.xml");
                Image<Gray, byte> grayframe = img.Convert<Gray, byte>();
                var faces = haar.Detect(grayframe, 1.2, 3, HAAR_DETECTION_TYPE.DO_ROUGH_SEARCH, new Size(20, 20), new Size(img.Width / 2, img.Height / 2));
                foreach (var face in faces)
                {
                    img.Draw(face.rect, new Bgr(0, double.MaxValue, 0), 3);
                }
                viewer.Image = img;
            }
        }
예제 #15
0
파일: DetectFace.cs 프로젝트: Raptek/STEM
 public static void detectFaceHaar(Image<Bgr, Byte> image, String faceFileName, String eyesFileName, List<MCvAvgComp> facesList, List<MCvAvgComp> eyesList, out long detectionTime)
 {
     Stopwatch watch;
     using(HaarCascade faceCascade = new HaarCascade(faceFileName))
     using (HaarCascade eyesCascade = new HaarCascade(eyesFileName))
     {
         watch = Stopwatch.StartNew();
         using (Image<Gray, Byte> grayImage = image.Convert<Gray, Byte>())
         {
             //grayImage._EqualizeHist();
             MCvAvgComp[] facesRegion = faceCascade.Detect(grayImage, 1.4, 3, Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.FIND_BIGGEST_OBJECT, new Size(image.Width / 8, image.Height / 8) /*new Size(24, 24)*/, Size.Empty);
             facesList.AddRange(facesRegion);
             foreach (MCvAvgComp f in facesList)
             {
                 grayImage.ROI = f.rect;
                 MCvAvgComp[] eyesDetected = eyesCascade.Detect(grayImage, 1.15, 3, Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new Size(20, 20) /*new Size(24, 24)*/, Size.Empty);
                 grayImage.ROI = Rectangle.Empty;
                 foreach (MCvAvgComp e in eyesDetected)
                 {
                     MCvAvgComp eyeRect = e;
                     eyeRect.rect.Offset(f.rect.X, f.rect.Y);
                     eyesList.Add(eyeRect);
                 }
             }
         }
         watch.Stop();
     }
     detectionTime = watch.ElapsedMilliseconds;
 }