DetectMultiScale() публичный Метод

Finds rectangular regions in the given image that are likely to contain objects the cascade has been trained for and returns those regions as a sequence of rectangles. The function scans the image several times at different scales. Each time it considers overlapping regions in the image. It may also apply some heuristics to reduce number of analyzed regions, such as Canny prunning. After it has proceeded and collected the candidate rectangles (regions that passed the classifier cascade), it groups them and returns a sequence of average rectangles for each large enough group.
public DetectMultiScale ( Byte>.Image image, double scaleFactor, int minNeighbors, Size minSize, Size maxSize ) : System.Drawing.Rectangle[]
image Byte>.Image The image where the objects are to be detected from
scaleFactor double The factor by which the search window is scaled between the subsequent scans, for example, 1.1 means increasing window by 10%
minNeighbors int Minimum number (minus 1) of neighbor rectangles that makes up an object. All the groups of a smaller number of rectangles than min_neighbors-1 are rejected. If min_neighbors is 0, the function does not any grouping at all and returns all the detected candidate rectangles, which may be useful if the user wants to apply a customized grouping procedure
minSize System.Drawing.Size Minimum window size. Use Size.Empty for default, where it is set to the size of samples the classifier has been trained on (~20x20 for face detection)
maxSize System.Drawing.Size Maxumum window size. Use Size.Empty for default, where the parameter will be ignored.
Результат System.Drawing.Rectangle[]
Пример #1
1
        //:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::


        //::::::::::::Detection of the hand in a gray image::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
        public List<Object> Detection(Image<Gray, Byte> frame)
        {   
            List<Object> listReturn = new List<object>(2);
            haar = new CascadeClassifier(@"C:\Users\America\Documents\MySystemV1\classifier\cascade.xml");
            

            if (frame != null)
            {
                System.Drawing.Rectangle[] hands = haar.DetectMultiScale(frame, 1.1, 4, new System.Drawing.Size(frame.Width / 8, frame.Height / 8), new System.Drawing.Size(frame.Width / 3, frame.Height / 3));

                foreach (System.Drawing.Rectangle roi in hands)
                {
                    roi.Inflate(-5, 17);                 // Make the roi bigger, becuse we dont obteined the tootal length of the fingers, in some cases. 
                    frame.Draw(roi, new Gray (255), 3);
                }

                if (hands.Count() == 0)
                { 
                    Rectangle[] noDetection= new Rectangle[]{}; 
                    //noDetection[0] = Rectangle.Empty;
                    listReturn.Add(noDetection); 
                }
                else
                {
                    listReturn.Add(hands);
                }
                
            }

            listReturn.Add(frame);

            return listReturn;
            //Regresa los dos valores si el frame es diferente de null, lo cual se supone que siempre es cierto, por que eso se toma en cuenta desde data poll
        }//finaliza detection()   
Пример #2
0
        /// <summary>
        /// Implements the car detector filter and adds the details to the advertDetails object
        /// </summary>
        /// <param name="_advertDetails">The advertDetails object where information about the advert is stored</param>
        public virtual void pump(ref AdvertDetails _advertDetails)
        {
            Rectangle rect = new Rectangle();
            String view = "Unknown";

            Image<Gray, Byte> image = _advertDetails.Image.Convert<Gray, byte>();

            CascadeClassifier classifier = new CascadeClassifier(frontClassifier);
            Rectangle[] rectangleList = new Rectangle[0];

            classifier = new CascadeClassifier(sideClassifier);
            Rectangle[] temp = classifier.DetectMultiScale(image, scaleFac, numNeighbours, side_minSize, maxSize);
            if (temp.Length > rectangleList.Length)
            {
                rectangleList = temp;
                view = "Side";
            }

            if(view != "Side")
            {
                 temp = classifier.DetectMultiScale(image, scaleFac, numNeighbours, fb_minSize, maxSize);;
                if(temp.Length > rectangleList.Length)
                {
                    rectangleList = temp;
                    view = "Front";
                }

                classifier = new CascadeClassifier(backClassifier);
                temp = classifier.DetectMultiScale(image, scaleFac, numNeighbours, fb_minSize, maxSize);
                if (temp.Length > rectangleList.Length)
                {
                    rectangleList = temp;
                    view = "Back";
                }
            }

            if (rectangleList.Length > 0)
            {
                rect = getLargest(rectangleList);
                _advertDetails.Rect = rect;
                _advertDetails.CarFound = true;
                _advertDetails.View = view;
                _advertDetails.CarRating = 1;
            }
            else
            {
                _advertDetails.CarFound = false;
                _advertDetails.Error = "No car found.";
            }
        }
Пример #3
0
        public static void Detect(Image<Bgr, Byte> image, String faceFileName, List<Rectangle> recFaces, List<Image<Bgr, Byte>> imgFaces, out long detectionTime)
        {
            Stopwatch watch;

            {
                //Read the HaarCascade objects
                using (CascadeClassifier faceClassifier = new CascadeClassifier(faceFileName))
                {
                    watch = Stopwatch.StartNew();
                    using (Image<Gray, Byte> gray = image.Convert<Gray, Byte>()) //Convert it to Grayscale
                    {
                        //Normalizes brightness and increases contrast of the image
                        gray._EqualizeHist();

                        //Detect the faces  from the gray scale image and store the locations as rectangle
                        //The first dimensional is the channel
                        //The second dimension is the index of the rectangle in the specific channel
                        Rectangle[] facesDetected = faceClassifier.DetectMultiScale(
                           gray,
                           1.1,
                           10,
                           new Size(20, 20),
                           Size.Empty);
                        recFaces.AddRange(facesDetected);
                        //Now for each rectangle, get the sub face image from the coordinates and store it for display later
                        foreach (Rectangle rec in facesDetected)
                            imgFaces.Add(image.GetSubRect(rec));
                    }
                    watch.Stop();
                }
            }
            detectionTime = watch.ElapsedMilliseconds;
        }
Пример #4
0
        public static void detectFaceCPU(Image<Bgr, Byte> image, String faceFileName, String eyesFileName, List<Rectangle> facesList, List<Rectangle> eyesList, out long detectionTime)
        {
            Stopwatch watch;
            using (CascadeClassifier faceCascade = new CascadeClassifier(faceFileName))
            using (CascadeClassifier eyesCascade = new CascadeClassifier(eyesFileName))
            {
                watch = Stopwatch.StartNew();
                using (Image<Gray, Byte> grayImage = image.Convert<Gray, Byte>())
                {
                    //grayImage._EqualizeHist();
                    Rectangle[] facesRegion = faceCascade.DetectMultiScale(grayImage, 1.1, 10, new Size(image.Width / 8, image.Height / 8), Size.Empty);
                    facesList.AddRange(facesRegion);

                    foreach (Rectangle f in facesRegion)
                    {
                        grayImage.ROI = f;
                        Rectangle[] eyesDetected = eyesCascade.DetectMultiScale(grayImage, 1.1, 10, new Size(image.Width / 8, image.Height / 8), Size.Empty);
                        grayImage.ROI = Rectangle.Empty;
                        foreach (Rectangle e in eyesDetected)
                        {
                            Rectangle eyeRect = e;
                            eyeRect.Offset(f.X, f.Y);
                            eyesList.Add(eyeRect);
                        }
                    }
                }
                watch.Stop();
            }
            detectionTime = watch.ElapsedMilliseconds;
        }
Пример #5
0
   // Use this for initialization
   void Start()
   {  
      Texture2D lenaTexture = Resources.Load<Texture2D>("lena");    

      //updateTextureWithString("load lena ok");
      Image<Bgr, Byte> img = TextureConvert.Texture2dToImage<Bgr, byte>(lenaTexture);
      //updateTextureWithString("convert to image ok");

      //String fileName = "haarcascade_frontalface_default";
      //String fileName = "lbpcascade_frontalface";
      String fileName = "haarcascade_frontalface_alt2";
      String filePath = Path.Combine(Application.persistentDataPath, fileName + ".xml");
      //if (!File.Exists(filePath))
      {
         //updateTextureWithString("start move cascade xml");
         TextAsset cascadeModel = Resources.Load<TextAsset>(fileName);
         
#if UNITY_METRO
         UnityEngine.Windows.File.WriteAllBytes(filePath, cascadeModel.bytes);
#else
         File.WriteAllBytes(filePath, cascadeModel.bytes);
#endif
         //updateTextureWithString("File size: " + new FileInfo(filePath).Length);
      }

      
      using (CascadeClassifier classifier = new CascadeClassifier(filePath))
      using (Image<Gray, Byte> gray = img.Convert<Gray, byte>())
      {
         //updateTextureWithString("classifier create ok");

         Rectangle[] faces = null;
         try
         {
            faces = classifier.DetectMultiScale(gray);

            //updateTextureWithString("face detected");
            foreach (Rectangle face in faces)
            {
               CvInvoke.Rectangle(img, face, new MCvScalar(0, 255, 0));
            }
         }
         catch (Exception e)
         {
            
            //updateTextureWithString(e.Message);
            return;
         }
         
         //updateTextureWithString(String.Format("{0} face found on image of {1} x {2}", faces.Length, img.Width, img.Height));
      }

      Texture2D texture = TextureConvert.ImageToTexture2D(img, FlipType.Vertical);

      this.GetComponent<GUITexture>().texture = texture;
      this.GetComponent<GUITexture>().pixelInset = new Rect(-img.Width / 2, -img.Height / 2, img.Width, img.Height);
   }
 public Rectangle[] FindEyes(string eyeFileName, Image<Gray, Byte> imageFace)
 {
     using (CascadeClassifier eye = new CascadeClassifier(eyeFileName))
     using (Mat eyeRegionMat = new Mat())
     {
         Rectangle[] eyeRegion = eye.DetectMultiScale(imageFace, eyescale, eyeneighbors, new Size(eyeminsize, eyeminsize), new Size(eyemaxsize, eyemaxsize));
         return eyeRegion;
     }
 }
 public void pump(ref AdvertDetails _advertDetails)
 {
     CascadeClassifier cc = new CascadeClassifier(VWclassifier);
     bool carExitst = _advertDetails.CarFound?true:false;
     Image<Gray, byte> image;
     if(carExitst)
         image = _advertDetails.Image.GetSubRect(_advertDetails.Rect).Convert<Gray, byte>();
     else
         image = _advertDetails.Image.Convert<Gray, byte>();
     Rectangle[] logosFound = cc.DetectMultiScale(image, 1.05, 1, new Size(20,20), new Size(40,40));
 }
Пример #8
0
   // Use this for initialization
   void Start()
   {  
      Texture2D lenaTexture = Resources.Load<Texture2D>("lena");    

      UMat img = new UMat();
      TextureConvert.Texture2dToOutputArray(lenaTexture, img);
      CvInvoke.Flip(img, img, FlipType.Vertical);

      //String fileName = "haarcascade_frontalface_default";
      //String fileName = "lbpcascade_frontalface";
      String fileName = "haarcascade_frontalface_alt2";
      String filePath = Path.Combine(Application.persistentDataPath, fileName + ".xml");
      //if (!File.Exists(filePath))
      {
         TextAsset cascadeModel = Resources.Load<TextAsset>(fileName);
         
#if UNITY_METRO
         UnityEngine.Windows.File.WriteAllBytes(filePath, cascadeModel.bytes);
#else
         File.WriteAllBytes(filePath, cascadeModel.bytes);
#endif
      }

      using (CascadeClassifier classifier = new CascadeClassifier(filePath))
      using (UMat gray = new UMat())
      {
         CvInvoke.CvtColor(img, gray, ColorConversion.Bgr2Gray);

         Rectangle[] faces = null;
         try
         {
            faces = classifier.DetectMultiScale(gray);

            foreach (Rectangle face in faces)
            {
               CvInvoke.Rectangle(img, face, new MCvScalar(0, 255, 0));
            }
         }
         catch (Exception e)
         {
            Debug.Log(e.Message);
            
            return;
         }
      }

      Texture2D texture = TextureConvert.InputArrayToTexture2D(img, FlipType.Vertical);

      this.GetComponent<GUITexture>().texture = texture;
      Size s = img.Size;
      this.GetComponent<GUITexture>().pixelInset = new Rect(-s.Width / 2, -s.Height / 2, s.Width, s.Height);
   }
Пример #9
0
        public override void OnStart()
        {
            PackageHost.WriteInfo("Package starting - IsRunning: {0} - IsConnected: {1}", PackageHost.IsRunning, PackageHost.IsConnected);


            string startupPath = System.Reflection.Assembly.GetExecutingAssembly().CodeBase;
            IDBAccess dataStore = new DBAccess("facesDB.db");
            recoEngine = new RecognizerEngine(
                Path.Combine(Environment.CurrentDirectory, "data/facesDB.db"),
                Path.Combine(Environment.CurrentDirectory, "data/RecognizerEngineData.YAML"));    //"/data/facesDB.db", startupPath + "/data/RecognizerEngineData.YAML");
            cap = new Capture();

            //cap.QueryFrame().Save(Path.Combine(Environment.CurrentDirectory, "test.bmp"));

            Task.Factory.StartNew(() =>
            {
                while (PackageHost.IsRunning)
                {
                    Rectangle[] faces;
                    //string bla = System.Reflection.Assembly.GetExecutingAssembly().    CodeBase + "/haarcascade_frontalface_default.xml";
		            cascadeClassifier = new CascadeClassifier( Path.Combine(Environment.CurrentDirectory, "haarcascade_frontalface_default.xml"));
		            using (var imageFrame = cap.QueryFrame().ToImage<Bgr, Byte>())
		            {
			            if (imageFrame != null)
			            {
				            var grayframe = imageFrame.Convert<Gray, byte>();
				            faces = cascadeClassifier.DetectMultiScale(grayframe, 1.2, 10, Size.Empty); //the actual face detection happens here

				            PackageHost.PushStateObject<Rectangle[]>("faces", faces);
				            foreach (var face in faces)
				            {

                                int nameID = recoEngine.RecognizeUser(imageFrame.GetSubRect(face).Convert<Gray, byte>());
                                if (nameID == 0)
                                {
                                    PackageHost.WriteWarn("unknown face");
                                    PackageHost.PushStateObject<String>("Face", "Unknown");
                                }
                                else
                                {
                                    string name = dataStore.GetUsername(nameID);
                                    PackageHost.WriteInfo("face recognized : {0}", name);
                                    PackageHost.PushStateObject<String>("Face", name);
                                }
				            }
			            }
		            }
                    Thread.Sleep(5000);//PackageHost.GetSettingValue<int>("RefreshRate")
				}
			});
        }
Пример #10
0
        public static void Detect(Image<Bgr, Byte> image, String faceFileName, String eyeFileName, List<System.Drawing.Rectangle> faces, List<System.Drawing.Rectangle> eyes)
        {
            using (CascadeClassifier face = new CascadeClassifier(faceFileName))
            //Read the eyeFileName objects

            using (CascadeClassifier eye = new CascadeClassifier(eyeFileName))
            {

                var watch = Stopwatch.StartNew();
                using (Image<Gray, Byte> gray = image.Convert<Gray, Byte>()) //Convert it to Grayscale
                {
                    //normalizes brightness and increases contrast of the image
                    gray._EqualizeHist();

                    //Detect the faces  from the gray scale image and store the locations as rectangle
                    //The first dimensional is the channel
                    //The second dimension is the index of the rectangle in the specific channel
                    System.Drawing.Rectangle[] facesDetected = face.DetectMultiScale(
                       gray,
                       1.1,
                       10,
                       new System.Drawing.Size(20, 20),
                       System.Drawing.Size.Empty);
                    faces.AddRange(facesDetected);

                    foreach (System.Drawing.Rectangle f in facesDetected)
                    {

                        //Set the region of interest on the faces
                        gray.ROI = f;
                        System.Drawing.Rectangle[] eyesDetected = eye.DetectMultiScale(
                           gray,
                           1.1,
                           10,
                           new System.Drawing.Size(20, 20),
                           System.Drawing.Size.Empty);
                        gray.ROI = System.Drawing.Rectangle.Empty;

                        foreach (System.Drawing.Rectangle e in eyesDetected)
                        {
                            System.Drawing.Rectangle eyeRect = e;
                            eyeRect.Offset(f.X, f.Y);
                            eyes.Add(eyeRect);
                        }
                    }
                }
                watch.Stop();
            }
        }
Пример #11
0
 public static void detectFace(Image<Bgr, Byte> image, String faceFileName, List<Rectangle> facesList, out long detectionTime)
 {
     Stopwatch watch;
     using (CascadeClassifier faceCascade = new CascadeClassifier(faceFileName))
     {
         watch = Stopwatch.StartNew();
         using (Image<Gray, Byte> grayImage = image.Convert<Gray, Byte>())
         {
             Rectangle[] facesRegion = faceCascade.DetectMultiScale(grayImage, 1.1, 10, new Size(24, 24), Size.Empty);
             facesList.AddRange(facesRegion);
         }
         watch.Stop();
     }
     detectionTime = watch.ElapsedMilliseconds;
 }
Пример #12
0
        public static void Detect(Mat image, String faceFileName, 
            String eyeFileName, List<Rectangle> faces, List<Rectangle> eyes)
        {
            //Read the HaarCascade objects
            using (CascadeClassifier face = new CascadeClassifier(faceFileName))
            using (CascadeClassifier eye = new CascadeClassifier(eyeFileName))
            {
               Size size = new Size(image.Rows, image.Cols);
                using (UMat ugray = new UMat(size, DepthType.Cv8U, 1))
                {
                    //CvInvoke.CvtColor(image, ugray, ColorConversion.Bgr2Gray);

                    //normalizes brightness and increases contrast of the image
                    CvInvoke.EqualizeHist(image, ugray);

                    //Detect the faces  from the gray scale image and store the locations as rectangle
                    //The first dimensional is the channel
                    //The second dimension is the index of the rectangle in the specific channel
                    Rectangle[] facesDetected = face.DetectMultiScale(
                       ugray,
                       1.1,
                       10,
                       new Size(20, 20));

                    faces.AddRange(facesDetected);

                    foreach (Rectangle f in facesDetected)
                    {
                        //Get the region of interest on the faces
                        using (UMat faceRegion = new UMat(ugray, f))
                        {
                            Rectangle[] eyesDetected = eye.DetectMultiScale(
                               faceRegion,
                               1.1,
                               10,
                               new Size(20, 20));

                            foreach (Rectangle e in eyesDetected)
                            {
                                Rectangle eyeRect = e;
                                eyeRect.Offset(f.X, f.Y);
                                eyes.Add(eyeRect);
                            }
                        }
                    }
                }
            }
        }
Пример #13
0
 private void EmguFaceDetector()
 {
     Emgu.CV.CascadeClassifier emguFaceClassifier = null;
     if (File.Exists(this.ImagePath))
     {
         if (File.Exists(@"./haarcascade/haarcascade_frontalface_alt.xml"))
         {
             emguFaceClassifier = new Emgu.CV.CascadeClassifier(@"./haarcascade/haarcascade_frontalface_alt.xml");
             Emgu.CV.Mat src  = CvInvoke.Imread(this.ImagePath, 0);
             Emgu.CV.Mat gray = new Emgu.CV.Mat();
             CvInvoke.CvtColor(src, gray, Emgu.CV.CvEnum.ColorConversion.Bgr2Gray);
             var faces   = emguFaceClassifier.DetectMultiScale(gray, 1.1, 2, new System.Drawing.Size(30, 30));
             int facecnt = faces.Length;
         }
     }
 }
Пример #14
0
        public Form1()
        {
            InitializeComponent();
            recognizer = new LBPHFaceRecognizer(1, 8, 8, 9, 65);

            classifier = new CascadeClassifier(haarcascade);
            GPU_classifier = new GpuCascadeClassifier(haarcascade_cuda);

            font = new MCvFont(Emgu.CV.CvEnum.FONT.CV_FONT_HERSHEY_TRIPLEX, 0.5, 0.5);
            if (File.Exists(@"traningdata.xml"))
            {
                recognizer.Load(@"traningdata.xml");
            }
            else
            {

                foreach (var file in Directory.GetFiles(Application.StartupPath + @"\Traning Faces\"))
                {
                    try { temp = new Image<Gray, Byte>(file); }
                    catch { continue; }
                    temp._EqualizeHist();

                    var detectedFaces = classifier.DetectMultiScale(temp, 1.1, 15, new Size(24, 24), Size.Empty);
                    if (detectedFaces.Length == 0)
                    {
                        continue;
                    }

                    temp.ROI = detectedFaces[0];
                    temp = temp.Copy();
                    temp = temp.Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                    imagesList.Add(temp);
                    imagesLabels.Add(Path.GetFileNameWithoutExtension(file));
                }
                for (int i = 0; i < imagesList.Count; i++)
                {
                    imagesLabels_indices.Add(i);
                }

                try { recognizer.Train(imagesList.ToArray(), imagesLabels_indices.ToArray()); }
                catch (Exception ex)
                {
                    MessageBox.Show(ex.Message);
                    Environment.Exit(0);
                }
            }
        }
Пример #15
0
        //Определение одного лица на входящем изображении
        public Image <Bgr, byte> DetectFace(Image <Bgr, byte> image)
        {
            //Преобразование изображения в формат bitmap для последующй работы с ним
            Bitmap bitmap = image.ToBitmap();

            //Создание списка лиц на изображении - обводятся прямоугольниками
            System.Drawing.Rectangle[] faces = classifier.DetectMultiScale(image, 1.4, 0);

            if (faces.Length > 0) /*если есть хотя бы одно лицо*/
            {
                //Выбор одного прямоугольника из списка
                System.Drawing.Rectangle newFace = faces[0];

                //Изменение соотношения сторок приямоугольника к 3х4 и перемещение прямоугольника на уровень волос
                newFace.Height = Convert.ToInt32(newFace.Width * 1.33);
                newFace.Y      = newFace.Y - Convert.ToInt32(newFace.Height / 4.7);

                //Прорисовка прямоугольника на изображении
                using (Graphics graphics = Graphics.FromImage(bitmap))
                {
                    using (System.Drawing.Pen pen = new System.Drawing.Pen(System.Drawing.Color.MintCream, 3))
                    {
                        graphics.DrawRectangle(pen, newFace);
                    }
                }

                //Если лицо полностью находится в кадре - отображение его в отдельном окошке
                try
                {
                    PersonPictureBox.Image = bitmap.Clone(newFace, bitmap.PixelFormat);
                }
                catch (Exception ex)
                {
                    System.Windows.MessageBox.Show(ex.Message, "Ошибка", MessageBoxButton.OK, MessageBoxImage.Error);
                }
            }

            //Если не обнаружено ни одного лица - ставится картинка "нет изображения"
            else
            {
                PersonPictureBox.Image = System.Drawing.Image.FromFile(sadSmilePath);
            }

            //Возвращение исходного изображения или изображения с выделенным прямоугольником
            return(bitmap.ToImage <Bgr, byte>());
        }
Пример #16
0
        public static void Detect(Image<Bgr, Byte> image, String faceFileName, List<Rectangle> faces, out long detectionTime)
        {
            Stopwatch watch;

            if (GpuInvoke.HasCuda)
            {
                using (GpuCascadeClassifier face = new GpuCascadeClassifier(faceFileName))
                {
                    watch = Stopwatch.StartNew();
                    using (GpuImage<Bgr, Byte> gpuImage = new GpuImage<Bgr, byte>(image))
                    using (GpuImage<Gray, Byte> gpuGray = gpuImage.Convert<Gray, Byte>())
                    {
                        Rectangle[] faceRegion = face.DetectMultiScale(gpuGray, 1.1, 10, Size.Empty);
                        faces.AddRange(faceRegion);
                    }
                    watch.Stop();
                }
            }
            else
            {
                //Read the HaarCascade objects
                using (CascadeClassifier face = new CascadeClassifier(faceFileName))
                {
                    watch = Stopwatch.StartNew();
                    using (Image<Gray, Byte> gray = image.Convert<Gray, Byte>()) //Convert it to Grayscale
                    {
                        //normalizes brightness and increases contrast of the image
                        gray._EqualizeHist();

                        //Detect the faces  from the gray scale image and store the locations as rectangle
                        //The first dimensional is the channel
                        //The second dimension is the index of the rectangle in the specific channel
                        Rectangle[] facesDetected = face.DetectMultiScale(
                           gray,
                           1.1,
                           10,
                           new Size(20, 20),
                           Size.Empty);
                        faces.AddRange(facesDetected);
                    }
                    watch.Stop();
                }
            }
            detectionTime = watch.ElapsedMilliseconds;
        }
Пример #17
0
        //:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::


        //::::::::::::Detection of the hand in a gray image::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
        public List<Object> Detection(Image<Gray, Byte> frame, PointF centerHand)
        {   
            List<Object> listReturn = new List<object>(2);
            haar = new CascadeClassifier(@"C:\Users\America\Documents\MySystemV1\classifier\cascade.xml");
            

            if (frame != null)
            {
                System.Drawing.Rectangle[] hands = haar.DetectMultiScale(frame, 1.07, 2, new System.Drawing.Size(frame.Width / 8, frame.Height / 8), new System.Drawing.Size(frame.Width / 3, frame.Height / 3));

                //Draw the inflated teh rectagle and draw it. 
                for (int i = 0; i < hands.Length; i++)
                {
                    hands[i].Inflate(-20, 23);
                }

                //Check if the rois are intersected with others and then merge. 
                if (hands.Length > 1)
                { 
                    //Call the function to check the intersection 
                    hands = MergeRois(hands, centerHand); 
                }

                if (hands.Count() == 0)
                {
                    Rectangle[] noDetection = new Rectangle[] { };
                    listReturn.Add(noDetection);
                }
                else
                {
                    for (int i = 0; i < hands.Length; i++)
                    {
                        frame.Draw(hands[i], new Gray(255), 1);
                    }
                    listReturn.Add(hands);
                }
                
            }

            listReturn.Add(frame);

            return listReturn;
            //Regresa los dos valores si el frame es diferente de null, lo cual se supone que siempre es cierto, por que eso se toma en cuenta desde data poll
        }//finaliza detection()   
Пример #18
0
        public List<FaceScored> FindFaces(Emgu.CV.Image<Emgu.CV.Structure.Bgr, byte> image, CascadeClassifier cascadeClassifierFace, CascadeClassifier cascadeClassifierEye)
        {
            List<FaceScored> currentFaces = new List<FaceScored>();
            using (Image<Gray, Byte> gray = image.Convert<Gray, Byte>())
            {
            gray._EqualizeHist();
            Size minFaceSize = new Size(minSizeFace , minSizeFace );
            Size maxFaceSize =  new Size(maxSizeFace , maxSizeFace );
            Size minEyeSize = new Size(minSizeEye , minSizeEye );
            Size maxEyeSize =  new Size(maxSizeEye , maxSizeEye );
            Rectangle[] facesDetected = cascadeClassifierFace.DetectMultiScale(gray, scaleFace , neighborsFace , minFaceSize,maxFaceSize);

            foreach (Rectangle f in facesDetected)
            {
                if (f.Width<35)
                    break;
                gray.ROI = f;

                Rectangle[] eyesDetected = cascadeClassifierEye.DetectMultiScale(gray, scaleEye, neighborsEye, minEyeSize, maxEyeSize);
                if (eyesDetected.Count() >0){
                    FaceScored faceModel = new FaceScored();
                    faceModel.FaceImage = gray.Bitmap;
                    faceModel.FaceImageFullColr = image.GetSubRect(f).Bitmap;
                    faceModel.Height = faceModel.FaceImage.Height;
                    faceModel.Width = faceModel.FaceImage.Width;
                    faceModel.EyesCount = eyesDetected.Count();

                    Gray avgf = new Gray();
                    MCvScalar avstd = new MCvScalar();
                    gray.AvgSdv(out avgf, out avstd);
                    faceModel.StdDev = avstd.V0;

                    currentFaces.Add(faceModel);
                    if(currentFaces.Count%5==0)
                        Console.WriteLine("FaceDetect Add every 5 faceModel" + faceModel.Width);
                    break;
                }
                gray.ROI = Rectangle.Empty;
                }
            }
            return currentFaces;
        }
Пример #19
0
        public static List <Rectangle> DetectFaces(Emgu.CV.Mat image)
        {
            List <Rectangle> faces = new List <Rectangle>();
            var facesCascade       = HttpContext.Current.Server.MapPath("~/face.xml");

            using (Emgu.CV.CascadeClassifier face = new Emgu.CV.CascadeClassifier(facesCascade))
            {
                using (UMat ugray = new UMat())
                {
                    CvInvoke.CvtColor(image, ugray, Emgu.CV.CvEnum.ColorConversion.Bgr2Gray);
                    CvInvoke.EqualizeHist(ugray, ugray);
                    Rectangle[] facesDetected = face.DetectMultiScale(
                        ugray,
                        1.1,
                        10,
                        new System.Drawing.Size(20, 20));
                    faces.AddRange(facesDetected);
                }
            }
            return(faces);
        }
Пример #20
0
 /////////////////////////////////////////
 //Процедура обработки видео и поика Совы
 void ProcessFrame(object sender, EventArgs e)
 {
     Image<Bgr, Byte> frame = _cameraCapture.QueryFrame(); //Полученный кадр
     using (CascadeClassifier face = new CascadeClassifier(faceFileName)) //Каскад
     using (Image<Gray, Byte> gray = frame.Convert<Gray, Byte>()) //Хаар работает с ЧБ изображением
     {
         //Детектируем
         Rectangle[] facesDetected2 = face.DetectMultiScale(
                 gray, //Исходное изображение
                 1.1,  //Коэффициент увеличения изображения
                 6,   //Группировка предварительно обнаруженных событий. Чем их меньше, тем больше ложных тревог
                 new Size(5, 5), //Минимальный размер совы
                 Size.Empty); //Максимальный размер совы
        //Выводим всё найденное
         foreach (Rectangle f in facesDetected2)
         {
             frame.Draw(f, new Bgr(Color.Blue), 2);
         }
     }
     VideoImage.Image = frame;
 }
Пример #21
0
        private void timer1_Tick(object sender, EventArgs e)
        {
            imageBox1.Image = cap.QueryFrame();
            IDBAccess dataStore = new DBAccess("facesDB.db");

            cascadeClassifier = new CascadeClassifier(Application.StartupPath + "/haarcascade_frontalface_default.xml");
            using (var imageFrame = cap.QueryFrame().ToImage<Bgr, Byte>())
            {
                if (imageFrame != null)
                {
                    var grayframe = imageFrame.Convert<Gray, byte>();
                    var faces = cascadeClassifier.DetectMultiScale(grayframe, 1.2, 10, Size.Empty); //the actual face detection happens here

                    PackageHost.PushStateObject<Rectangle[]>("faces", faces);
                    string txt = "";
                    foreach (var face in faces)
                    {

                        
                        imageFrame.Draw(face, new Bgr(Color.BurlyWood), 3); //the detected face(s) is highlighted here using a box that is drawn around it/them

                        if (File.Exists(Application.StartupPath + "/RecognizerEngineData.YAML"))
                        {
                            txt += dataStore.GetUsername(recoEngine.RecognizeUser(imageFrame.GetSubRect(face).Convert<Gray, byte>())) + " ";
                        }
                        else
                        {
                            txt += "Train the recognizer engine first !";
                        }
                        
                    }
                    if (faces.GetLength(0) > 0)
                    {
                        imageFrame.Draw(faces[0], new Bgr(Color.Red), 3);
                    }
                    textBox1.Text = txt;
                }
                imageBox1.Image = imageFrame;
            }
        }
Пример #22
0
        public static void detect1(Mat image, String fileName, List<Rectangle> breast, bool tryUseCuda, bool tryUseOpenCL,out long detectionTime)
        {

            //Many opencl functions require opencl compatible gpu devices. 
            //As of opencv 3.0-alpha, opencv will crash if opencl is enable and only opencv compatible cpu device is presented
            //So we need to call CvInvoke.HaveOpenCLCompatibleGpuDevice instead of CvInvoke.HaveOpenCL (which also returns true on a system that only have cpu opencl devices).
            CvInvoke.UseOpenCL = tryUseOpenCL && CvInvoke.HaveOpenCLCompatibleGpuDevice;

            Stopwatch watch;
            //Read the HaarCascade objects
            using (CascadeClassifier face = new CascadeClassifier(fileName))
            {
                watch = Stopwatch.StartNew();
                using (UMat ugray = new UMat())
                {
                    CvInvoke.CvtColor(image, ugray, Emgu.CV.CvEnum.ColorConversion.Bgr2Gray);

                    //normalizes brightness and increases contrast of the image
                    CvInvoke.EqualizeHist(ugray, ugray);

                    //Detect the faces  from the gray scale image and store the locations as rectangle
                    //The first dimensional is the channel
                    //The second dimension is the index of the rectangle in the specific channel
                    Rectangle[] facesDetected = face.DetectMultiScale(
                       ugray,
                       1.1,
                       10,
                       new Size(20, 20));

                    breast.AddRange(facesDetected);


                }
                watch.Stop();
            }

            detectionTime = watch.ElapsedMilliseconds;
            
        }
Пример #23
0
        public static void Detect(Image<Bgr, Byte> image, String faceFileName, List<Rectangle> faces)
        {
            //Read the HaarCascade objects
            using (CascadeClassifier face = new CascadeClassifier(faceFileName))
            {
                using (Image<Gray, Byte> gray = image.Convert<Gray, Byte>()) //Convert it to Grayscale
                {
                    //normalizes brightness and increases contrast of the image
                    gray._EqualizeHist();

                    //Detect the faces  from the gray scale image and store the locations as rectangle
                    //The first dimensional is the channel
                    //The second dimension is the index of the rectangle in the specific channel
                    Rectangle[] facesDetected = face.DetectMultiScale(
                        gray,
                        1.1,
                        5,
                        new Size(50, 50),
                        Size.Empty);
                    faces.AddRange(facesDetected);
                }
            }
        }
Пример #24
0
        private void Capture_ImageGrabbed(object sender, EventArgs e)
        {
            try
            {
                Emgu.CV.Mat frame = new Emgu.CV.Mat();
                if (capture.Retrieve(frame))
                {
                    Emgu.CV.Mat grayFrame = new Emgu.CV.Mat();
                    Emgu.CV.CvInvoke.CvtColor(frame, grayFrame, Emgu.CV.CvEnum.ColorConversion.Bgr2Gray);

                    Rectangle[] faces = emguFaceClassifier.DetectMultiScale(grayFrame, ScaleFactor, Neighbors);
                    foreach (var face in faces)
                    {
                        Emgu.CV.CvInvoke.Rectangle(frame, face, new MCvScalar(0, 0, 255));
                    }
                    //ImageSource = ToBitmapSource(currentFrame);
                    //Bitmap bmi = frame.ToBitmap();
                    //ImageSource = ToBitmapImage(bmi);
                }
            }
            catch (Exception ex)
            {
            }
        }
Пример #25
0
        public static void Detect(Image<Bgr, Byte> image, String faceFileName, String eyeFileName, List<Rectangle> faces, List<Rectangle> eyes, out long detectionTime)
        {
            Stopwatch watch;

             if (GpuInvoke.HasCuda)
             {
            using (GpuCascadeClassifier face = new GpuCascadeClassifier(faceFileName))
            using (GpuCascadeClassifier eye = new GpuCascadeClassifier(eyeFileName))
            {
               watch = Stopwatch.StartNew();
               using (GpuImage<Bgr, Byte> gpuImage = new GpuImage<Bgr, byte>(image))
               using (GpuImage<Gray, Byte> gpuGray = gpuImage.Convert<Gray, Byte>())
               {
                  Rectangle[] faceRegion = face.DetectMultiScale(gpuGray, 1.1, 10, Size.Empty);
                  faces.AddRange(faceRegion);
                  foreach (Rectangle f in faceRegion)
                  {
                     using (GpuImage<Gray, Byte> faceImg = gpuGray.GetSubRect(f))
                     {
                        //For some reason a clone is required.
                        //Might be a bug of GpuCascadeClassifier in opencv
                        using (GpuImage<Gray, Byte> clone = faceImg.Clone())
                        {
                           Rectangle[] eyeRegion = eye.DetectMultiScale(clone, 1.1, 10, Size.Empty);

                           foreach (Rectangle e in eyeRegion)
                           {
                              Rectangle eyeRect = e;
                              eyeRect.Offset(f.X, f.Y);
                              eyes.Add(eyeRect);
                           }
                        }
                     }
                  }
               }
               watch.Stop();
            }
             }
             else
             {
            //Read the HaarCascade objects
            using (CascadeClassifier face = new CascadeClassifier(faceFileName))
            using (CascadeClassifier eye = new CascadeClassifier(eyeFileName))
            {
               watch = Stopwatch.StartNew();
               using (Image<Gray, Byte> gray = image.Convert<Gray, Byte>()) //Convert it to Grayscale
               {
                  //normalizes brightness and increases contrast of the image
                  gray._EqualizeHist();

                  //Detect the faces  from the gray scale image and store the locations as rectangle
                  //The first dimensional is the channel
                  //The second dimension is the index of the rectangle in the specific channel
                  Rectangle[] facesDetected = face.DetectMultiScale(
                     gray,
                     1.1,
                     10,
                     new Size(20, 20),
                     Size.Empty);
                  faces.AddRange(facesDetected);

                  foreach (Rectangle f in facesDetected)
                  {
                     //Set the region of interest on the faces
                     gray.ROI = f;
                     Rectangle[] eyesDetected = eye.DetectMultiScale(
                        gray,
                        1.1,
                        10,
                        new Size(20, 20),
                        Size.Empty);
                     gray.ROI = Rectangle.Empty;

                     foreach (Rectangle e in eyesDetected)
                     {
                        Rectangle eyeRect = e;
                        eyeRect.Offset(f.X, f.Y);
                        eyes.Add(eyeRect);
                     }
                  }
               }
               watch.Stop();
            }
             }
             detectionTime = watch.ElapsedMilliseconds;
        }
Пример #26
0
        //Процедура обработки изображения и поика циферблата
        private void button1_Click(object sender, EventArgs e)
        {
            Image<Bgr, Byte> frame = (Image<Bgr, Byte>)VideoImage.Image; //Полученный кадр
            using (CascadeClassifier dial = new CascadeClassifier(faceFileName)) //Каскад
            using (Image<Gray, Byte> gray = frame.Convert<Gray, Byte>()) //Хаар работает с ЧБ изображением
            {
                //Детектируем
                Rectangle[] facesDetected2 = dial.DetectMultiScale(
                        gray, //Исходное изображение
                        1.1,  //Коэффициент увеличения изображения
                        6,   //Группировка предварительно обнаруженных событий. Чем их меньше, тем больше ложных тревог
                        new Size(5, 5), //Минимальный размер циферблата
                        Size.Empty); //Максимальный размер циферблата
                //Выводим всё найденное
                foreach (Rectangle f in facesDetected2)
                {
                    frame.Draw(f, new Bgr(Color.Blue), 2);
                    VideoImage.Image = frame;
                    //frame.ROI = f;
                    frame.Save("out.bmp");
                    Bitmap bmp = new Bitmap("out.bmp");
                    BitmapToBlackWhite2(bmp).Save("out_black.bmp");

                    LabelConnectedComponents(gray, 0).Save("label.bmp");

                 //   BinaryImage.Image =
                    //gray.ThresholdAdaptive(new Gray(255), ADAPTIVE_THRESHOLD_TYPE.CV_ADAPTIVE_THRESH_MEAN_C, THRESH.CV_THRESH_OTSU, 5, new Gray(0.03)).Save("another.bmp");
                }
            }
        }
Пример #27
0
        public List<Face> FindFaces(Emgu.CV.Image<Emgu.CV.Structure.Bgr, byte> image, string faceFileName, string eyeFileName, double scale, int neighbors, int minSize)
        {
            List<Face> faces = new List<Face>();
            List<Rectangle> facesRect = new List<Rectangle>();
            List<Rectangle> eyesRect = new List<Rectangle>();
            try
            {
                using (CascadeClassifier face = new CascadeClassifier(faceFileName))
                {
                    using (Image<Gray, Byte> gray = image.Convert<Gray, Byte>())
                    {
                        gray._EqualizeHist();
                        Rectangle[] facesDetected = face.DetectMultiScale(gray, scale, neighbors, new Size(minSize, minSize), Size.Empty);

                        foreach (Rectangle f in facesDetected)
                        {
                            using (Image<Gray, Byte> faceImg = gray.GetSubRect(f))
                            {
                                using (Image<Gray, Byte> clone = faceImg.Clone())
                                {
                                    Face facemodel = new Face();
                                    eyesRect = new List<Rectangle>(FindEyes(eyeFileName, clone));
                                    if (eyesRect != null && eyesRect.Count>0)
                                    {
                                        facemodel.EyesRects = eyesRect;
                                        facemodel.EyesCount = eyesRect.Count;
                                    }
                                    else
                                    {
                                        continue;
                                    }
                                    facemodel.FaceImage = clone.Bitmap;
                                    facemodel.Height = facemodel.FaceImage.Height;
                                    facemodel.Width = facemodel.FaceImage.Width;
                                    facemodel.FaceRect = f;
                                    facemodel.FramePosX = f.X;
                                    facemodel.FramePosY = f.Y;
                                    facemodel.ImageFrameSize = image.Size;

                                    Gray avgf = new Gray();
                                    MCvScalar avstd = new MCvScalar();
                                    clone.AvgSdv(out avgf, out avstd);
                                    facemodel.StdDev = avstd.V0;
                                    faces.Add(facemodel);
                                    if (facemodel.FaceScore > 39)
                                        Console.WriteLine("FaceDetect OpenCL Add faceModel" + facemodel.FaceScore);

                                    break;
                                }
                            }
                            gray.ROI = Rectangle.Empty;
                        }
                    }
                }
            }
            catch (Exception errFaceDet)
            {
                Console.WriteLine("ERROR - faceDetect OpenCL =" + errFaceDet);
            }
            return faces;
        }
Пример #28
0
        private static Rectangle[] DetectFace(Image<Bgr, Byte> image, string faceFileName)
        {
            try
            {
                if (GpuInvoke.HasCuda)
                {
                    using (GpuCascadeClassifier face = new GpuCascadeClassifier(faceFileName))
                    {
                        using (GpuImage<Bgr, Byte> gpuImage = new GpuImage<Bgr, byte>(image))
                        using (GpuImage<Gray, Byte> gpuGray = gpuImage.Convert<Gray, Byte>())
                        {
                            Rectangle[] faceRegion = face.DetectMultiScale(gpuGray, 1.1, 10, Size.Empty);

                            return faceRegion;
                        }
                    }
                }
                else
                {
                    //Read the HaarCascade objects
                    using (CascadeClassifier face = new CascadeClassifier(faceFileName))
                    {

                        using (Image<Gray, Byte> gray = image.Convert<Gray, Byte>()) //Convert it to Grayscale
                        {
                            //normalizes brightness and increases contrast of the image
                            gray._EqualizeHist();

                            //Detect the faces  from the gray scale image and store the locations as rectangle
                            //The first dimensional is the channel
                            //The second dimension is the index of the rectangle in the specific channel
                            Rectangle[] facesDetected = face.DetectMultiScale(
                               gray,
                               1.1,
                               10,
                               new Size(filterWidth, filterHeight),
                               Size.Empty);

                            return facesDetected;
                        }
                    }
                }
            }
            catch (Exception ex)
            {
                throw ex;
            }
        }
Пример #29
0
      public void TestCascadeClassifierFaceDetect()
      {
         Image<Gray, Byte> image = EmguAssert.LoadImage<Gray, byte>("lena.jpg");
         //using (HaarCascade cascade = new HaarCascade("eye_12.xml"))
         using (CascadeClassifier cascade = new CascadeClassifier(EmguAssert.GetFile("haarcascade_eye.xml")))
         //using (HaarCascade cascade = new HaarCascade("haarcascade_frontalface_alt2.xml"))
         {
            Rectangle[] objects = cascade.DetectMultiScale(image, 1.05, 0, new Size(10, 10), Size.Empty);
            foreach (Rectangle obj in objects)
               image.Draw(obj, new Gray(0.0), 1);
         }
         TestOpenCL(delegate
                  {
                     //using (HaarCascade cascade = new HaarCascade("eye_12.xml"))
                     using (UMat um = image.ToUMat())
                     using (CascadeClassifier cascade = new CascadeClassifier(EmguAssert.GetFile("haarcascade_eye.xml")))
                     //using (HaarCascade cascade = new HaarCascade("haarcascade_frontalface_alt2.xml"))
                     {
                        Stopwatch watch = Stopwatch.StartNew();
                        Rectangle[] objects = cascade.DetectMultiScale(um, 1.05, 0, new Size(10, 10), Size.Empty);
                        watch.Stop();
                        Trace.WriteLine(String.Format("Objects detected in {0} milliseconds (UseOpenCL: {1})", watch.ElapsedMilliseconds, CvInvoke.UseOpenCL));
                        foreach (Rectangle obj in objects)
                           image.Draw(obj, new Gray(0.0), 1);

                     }
                  });
      }
Пример #30
0
 private static bool VerifyFace(Image<Gray, byte> currFaceImg, CascadeClassifier classifier, 
                                int sens, Size min)
 {
     var maxSize = Math.Min(currFaceImg.Width, currFaceImg.Height);
     var total = min.Width + min.Height;
     var objects = classifier.DetectMultiScale(currFaceImg, 1.1, sens, min,
                                         new Size(maxSize * min.Width / total,
                                                  maxSize * min.Height / total));
     return objects.Length != 0;
 }
Пример #31
0
        public Rectangle[] detection(Image<Bgr, Byte> inputImage, string pathXMLHaarcascade)
        {
            Rectangle[] rectangleFace = null;
            Image<Gray, byte> grayFrame;

            if (inputImage != null)
            {
                //ver si esto esta bien
                grayFrame = inputImage.Convert<Gray, Byte>();
                grayFrame._EqualizeHist();

                CascadeClassifier haarCascadeXML = new CascadeClassifier(pathXMLHaarcascade);
                rectangleFace = haarCascadeXML.DetectMultiScale(grayFrame, ScaleFactor, minNeighbors, minSize, maxSize);
            }

            return rectangleFace;
        }
Пример #32
0
        public static void Detect(
        Mat image, String faceFileName, String eyeFileName, 
        List<Rectangle> faces, List<Rectangle> eyes, 
        bool tryUseCuda, bool tryUseOpenCL,
        out long detectionTime)
        {
            Stopwatch watch;

             #if !(IOS || NETFX_CORE)
             if (tryUseCuda && CudaInvoke.HasCuda)
             {
            using (CudaCascadeClassifier face = new CudaCascadeClassifier(faceFileName))
            using (CudaCascadeClassifier eye = new CudaCascadeClassifier(eyeFileName))
            {
               face.ScaleFactor = 1.1;
               face.MinNeighbors = 10;
               face.MinObjectSize = Size.Empty;
               eye.ScaleFactor = 1.1;
               eye.MinNeighbors = 10;
               eye.MinObjectSize = Size.Empty;
               watch = Stopwatch.StartNew();
               using (CudaImage<Bgr, Byte> gpuImage = new CudaImage<Bgr, byte>(image))
               using (CudaImage<Gray, Byte> gpuGray = gpuImage.Convert<Gray, Byte>())
               using (GpuMat region = new GpuMat())
               {
                  face.DetectMultiScale(gpuGray, region);
                  Rectangle[] faceRegion = face.Convert(region);
                  faces.AddRange(faceRegion);
                  foreach (Rectangle f in faceRegion)
                  {
                     using (CudaImage<Gray, Byte> faceImg = gpuGray.GetSubRect(f))
                     {
                        //For some reason a clone is required.
                        //Might be a bug of CudaCascadeClassifier in opencv
                        using (CudaImage<Gray, Byte> clone = faceImg.Clone(null))
                        using (GpuMat eyeRegionMat = new GpuMat())
                        {
                           eye.DetectMultiScale(clone, eyeRegionMat);
                           Rectangle[] eyeRegion = eye.Convert(eyeRegionMat);
                           foreach (Rectangle e in eyeRegion)
                           {
                              Rectangle eyeRect = e;
                              eyeRect.Offset(f.X, f.Y);
                              eyes.Add(eyeRect);
                           }
                        }
                     }
                  }
               }
               watch.Stop();
            }
             }
             else
             #endif
             {
            //Many opencl functions require opencl compatible gpu devices.
            //As of opencv 3.0-alpha, opencv will crash if opencl is enable and only opencv compatible cpu device is presented
            //So we need to call CvInvoke.HaveOpenCLCompatibleGpuDevice instead of CvInvoke.HaveOpenCL (which also returns true on a system that only have cpu opencl devices).
            CvInvoke.UseOpenCL = tryUseOpenCL && CvInvoke.HaveOpenCLCompatibleGpuDevice;

            //Read the HaarCascade objects
            using (CascadeClassifier face = new CascadeClassifier(faceFileName))
            //using (CascadeClassifier eye = new CascadeClassifier(eyeFileName))
            {
               watch = Stopwatch.StartNew();
               using (UMat ugray = new UMat())
               {
                  CvInvoke.CvtColor(image, ugray, Emgu.CV.CvEnum.ColorConversion.Bgr2Gray);

                  //normalizes brightness and increases contrast of the image
                  CvInvoke.EqualizeHist(ugray, ugray);

                  //Detect the faces  from the gray scale image and store the locations as rectangle
                  //The first dimensional is the channel
                  //The second dimension is the index of the rectangle in the specific channel
                  Rectangle[] facesDetected = face.DetectMultiScale(
                     ugray,
                     1.1,
                     2);

                  faces.AddRange(facesDetected);

                  //foreach (Rectangle f in facesDetected)
                  //{
                  //   //Get the region of interest on the faces
                  //   using (UMat faceRegion = new UMat(ugray, f))
                  //   {
                  //      Rectangle[] eyesDetected = eye.DetectMultiScale(
                  //         faceRegion,
                  //         1.1,
                  //         10,
                  //         new Size(20, 20));

                  //      foreach (Rectangle e in eyesDetected)
                  //      {
                  //         Rectangle eyeRect = e;
                  //         eyeRect.Offset(f.X, f.Y);
                  //         eyes.Add(eyeRect);
                  //      }
                  //   }
                  //}
               }
               watch.Stop();
            }
             }
             detectionTime = watch.ElapsedMilliseconds;
        }
Пример #33
0
 public void TestHaarPerformance()
 {
    CascadeClassifier face = new CascadeClassifier("haarcascade_frontalface_alt2.xml");
    Image<Gray, Byte> img = new Image<Gray, byte>("lena.jpg");
    Stopwatch watch = Stopwatch.StartNew();
    face.DetectMultiScale(img, 1.1, 3, Size.Empty, Size.Empty);
    watch.Stop();
    Trace.WriteLine(String.Format("Detecting face from {0}x{1} image took: {2} milliseconds.", img.Width, img.Height, watch.ElapsedMilliseconds));
 }
Пример #34
0
      public static void Detect(
         IInputArray image, String faceFileName, String eyeFileName,
         List<Rectangle> faces, List<Rectangle> eyes,
         out long detectionTime)
      {
         Stopwatch watch;

         using (InputArray iaImage = image.GetInputArray())
         {

#if !(__IOS__ || NETFX_CORE)
            if (iaImage.Kind == InputArray.Type.CudaGpuMat && CudaInvoke.HasCuda)
            {
               using (CudaCascadeClassifier face = new CudaCascadeClassifier(faceFileName))
               using (CudaCascadeClassifier eye = new CudaCascadeClassifier(eyeFileName))
               {
                  face.ScaleFactor = 1.1;
                  face.MinNeighbors = 10;
                  face.MinObjectSize = Size.Empty;
                  eye.ScaleFactor = 1.1;
                  eye.MinNeighbors = 10;
                  eye.MinObjectSize = Size.Empty;
                  watch = Stopwatch.StartNew();
                  using (CudaImage<Bgr, Byte> gpuImage = new CudaImage<Bgr, byte>(image))
                  using (CudaImage<Gray, Byte> gpuGray = gpuImage.Convert<Gray, Byte>())
                  using (GpuMat region = new GpuMat())
                  {
                     face.DetectMultiScale(gpuGray, region);
                     Rectangle[] faceRegion = face.Convert(region);
                     faces.AddRange(faceRegion);
                     foreach (Rectangle f in faceRegion)
                     {
                        using (CudaImage<Gray, Byte> faceImg = gpuGray.GetSubRect(f))
                        {
                           //For some reason a clone is required.
                           //Might be a bug of CudaCascadeClassifier in opencv
                           using (CudaImage<Gray, Byte> clone = faceImg.Clone(null))
                           using (GpuMat eyeRegionMat = new GpuMat())
                           {
                              eye.DetectMultiScale(clone, eyeRegionMat);
                              Rectangle[] eyeRegion = eye.Convert(eyeRegionMat);
                              foreach (Rectangle e in eyeRegion)
                              {
                                 Rectangle eyeRect = e;
                                 eyeRect.Offset(f.X, f.Y);
                                 eyes.Add(eyeRect);
                              }
                           }
                        }
                     }
                  }
                  watch.Stop();
               }
            }
            else
#endif
            {
               //Read the HaarCascade objects
               using (CascadeClassifier face = new CascadeClassifier(faceFileName))
               using (CascadeClassifier eye = new CascadeClassifier(eyeFileName))
               {
                  watch = Stopwatch.StartNew();

                  using (UMat ugray = new UMat())
                  {
                     CvInvoke.CvtColor(image, ugray, Emgu.CV.CvEnum.ColorConversion.Bgr2Gray);

                     //normalizes brightness and increases contrast of the image
                     CvInvoke.EqualizeHist(ugray, ugray);

                     //Detect the faces  from the gray scale image and store the locations as rectangle
                     //The first dimensional is the channel
                     //The second dimension is the index of the rectangle in the specific channel
                     Rectangle[] facesDetected = face.DetectMultiScale(
                        ugray,
                        1.1,
                        10,
                        new Size(20, 20));

                     faces.AddRange(facesDetected);

                     foreach (Rectangle f in facesDetected)
                     {
                        //Get the region of interest on the faces
                        using (UMat faceRegion = new UMat(ugray, f))
                        {
                           Rectangle[] eyesDetected = eye.DetectMultiScale(
                              faceRegion,
                              1.1,
                              10,
                              new Size(20, 20));

                           foreach (Rectangle e in eyesDetected)
                           {
                              Rectangle eyeRect = e;
                              eyeRect.Offset(f.X, f.Y);
                              eyes.Add(eyeRect);
                           }
                        }
                     }
                  }
                  watch.Stop();
               }
            }
            detectionTime = watch.ElapsedMilliseconds;
         }
      }
Пример #35
0
        public async Task <HttpResponseMessage> Index()
        {
            if (!Request.Content.IsMimeMultipartContent())
            {
                throw new HttpResponseException(HttpStatusCode.UnsupportedMediaType);
            }

            Emgu.CV.CascadeClassifier cc = new Emgu.CV.CascadeClassifier(System.Web.Hosting.HostingEnvironment.MapPath("/haarcascade_frontalface_alt_tree.xml"));
            var provider = new MultipartMemoryStreamProvider();
            await Request.Content.ReadAsMultipartAsync(provider);

            foreach (var file in provider.Contents)
            {
                var filename = file.Headers.ContentDisposition.FileName.Trim('\"');
                var buffer   = await file.ReadAsByteArrayAsync();

                using (MemoryStream mStream = new MemoryStream(buffer, 0, buffer.Length))
                {
                    mStream.Position = 0;
                    //Do whatever you want with filename and its binary data.

                    using (Bitmap bmp = new Bitmap(mStream))
                    {
                        using (Emgu.CV.Image <Emgu.CV.Structure.Bgr, Int32> img = new Emgu.CV.Image <Emgu.CV.Structure.Bgr, Int32>(bmp))
                        {
                            if (img != null)
                            {
                                var grayframe = img.Convert <Emgu.CV.Structure.Gray, byte>();
                                var faces     = cc.DetectMultiScale(grayframe);//, 1.1, 10, Size.Empty);
                                int faceCount = 0;
                                foreach (var face in faces)
                                {
                                    // only returns the first face found
                                    faceCount++;
                                    using (Bitmap faceBmp = new Bitmap(face.Right - face.Left, face.Bottom - face.Top))
                                    {
                                        Graphics g = Graphics.FromImage(faceBmp);
                                        g.DrawImage(bmp, new Rectangle(0, 0, faceBmp.Width, faceBmp.Height), face.Left, face.Top, faceBmp.Width, faceBmp.Height, GraphicsUnit.Pixel);
                                        MemoryStream outStream = new MemoryStream();
                                        faceBmp.Save(outStream, System.Drawing.Imaging.ImageFormat.Jpeg);
                                        var result = new HttpResponseMessage(HttpStatusCode.OK)
                                        {
                                            Content = new ByteArrayContent(outStream.ToArray()),
                                        };
                                        result.Content.Headers.ContentDisposition =
                                            new System.Net.Http.Headers.ContentDispositionHeaderValue("attachment")
                                        {
                                            FileName = "face.jpg"
                                        };
                                        result.Content.Headers.ContentType   = new System.Net.Http.Headers.MediaTypeHeaderValue("image/jpeg");
                                        result.Content.Headers.ContentLength = outStream.Length;

                                        return(result);
                                    }
                                }
                            }
                        }
                    }
                }
            }

            throw new HttpResponseException(HttpStatusCode.InternalServerError);
        }