The Cascade Classifier
Inheritance: Emgu.Util.UnmanagedObject
Esempio n. 1
1
        //:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::


        //::::::::::::Detection of the hand in a gray image::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
        public List<Object> Detection(Image<Gray, Byte> frame)
        {   
            List<Object> listReturn = new List<object>(2);
            haar = new CascadeClassifier(@"C:\Users\America\Documents\MySystemV1\classifier\cascade.xml");
            

            if (frame != null)
            {
                System.Drawing.Rectangle[] hands = haar.DetectMultiScale(frame, 1.1, 4, new System.Drawing.Size(frame.Width / 8, frame.Height / 8), new System.Drawing.Size(frame.Width / 3, frame.Height / 3));

                foreach (System.Drawing.Rectangle roi in hands)
                {
                    roi.Inflate(-5, 17);                 // Make the roi bigger, becuse we dont obteined the tootal length of the fingers, in some cases. 
                    frame.Draw(roi, new Gray (255), 3);
                }

                if (hands.Count() == 0)
                { 
                    Rectangle[] noDetection= new Rectangle[]{}; 
                    //noDetection[0] = Rectangle.Empty;
                    listReturn.Add(noDetection); 
                }
                else
                {
                    listReturn.Add(hands);
                }
                
            }

            listReturn.Add(frame);

            return listReturn;
            //Regresa los dos valores si el frame es diferente de null, lo cual se supone que siempre es cierto, por que eso se toma en cuenta desde data poll
        }//finaliza detection()   
      static void Run()
      {
          CascadeClassifier cas = new CascadeClassifier("haarcascade_frontalface_default.xml");
          
          /* Mat image = new Mat("lena.jpg", LoadImageType.Color); //Read the files as an 8-bit Bgr image  
         long detectionTime;
         List<Rectangle> faces = new List<Rectangle>();
         List<Rectangle> eyes = new List<Rectangle>();

         //The cuda cascade classifier doesn't seem to be able to load "haarcascade_frontalface_default.xml" file in this release
         //disabling CUDA module for now
         bool tryUseCuda = false;
         bool tryUseOpenCL = true;

         DetectFace.Detect(
           image, "haarcascade_frontalface_default.xml", "haarcascade_eye.xml", 
           faces, eyes,
           tryUseCuda,
           tryUseOpenCL,
           out detectionTime);

         foreach (Rectangle face in faces)
            CvInvoke.Rectangle(image, face, new Bgr(Color.Red).MCvScalar, 2);
         foreach (Rectangle eye in eyes)
            CvInvoke.Rectangle(image, eye, new Bgr(Color.Blue).MCvScalar, 2);

         //display the image 
         ImageViewer.Show(image, String.Format(
            "Completed face and eye detection using {0} in {1} milliseconds", 
            (tryUseCuda && CudaInvoke.HasCuda) ? "GPU"
            : (tryUseOpenCL && CvInvoke.HaveOpenCLCompatibleGpuDevice) ? "OpenCL" 
            : "CPU",
            detectionTime));*/
      }
Esempio n. 3
0
        public static void detectFaceCPU(Image<Bgr, Byte> image, String faceFileName, String eyesFileName, List<Rectangle> facesList, List<Rectangle> eyesList, out long detectionTime)
        {
            Stopwatch watch;
            using (CascadeClassifier faceCascade = new CascadeClassifier(faceFileName))
            using (CascadeClassifier eyesCascade = new CascadeClassifier(eyesFileName))
            {
                watch = Stopwatch.StartNew();
                using (Image<Gray, Byte> grayImage = image.Convert<Gray, Byte>())
                {
                    //grayImage._EqualizeHist();
                    Rectangle[] facesRegion = faceCascade.DetectMultiScale(grayImage, 1.1, 10, new Size(image.Width / 8, image.Height / 8), Size.Empty);
                    facesList.AddRange(facesRegion);

                    foreach (Rectangle f in facesRegion)
                    {
                        grayImage.ROI = f;
                        Rectangle[] eyesDetected = eyesCascade.DetectMultiScale(grayImage, 1.1, 10, new Size(image.Width / 8, image.Height / 8), Size.Empty);
                        grayImage.ROI = Rectangle.Empty;
                        foreach (Rectangle e in eyesDetected)
                        {
                            Rectangle eyeRect = e;
                            eyeRect.Offset(f.X, f.Y);
                            eyesList.Add(eyeRect);
                        }
                    }
                }
                watch.Stop();
            }
            detectionTime = watch.ElapsedMilliseconds;
        }
Esempio n. 4
0
        public static void Detect(Image<Bgr, Byte> image, String faceFileName, List<Rectangle> recFaces, List<Image<Bgr, Byte>> imgFaces, out long detectionTime)
        {
            Stopwatch watch;

            {
                //Read the HaarCascade objects
                using (CascadeClassifier faceClassifier = new CascadeClassifier(faceFileName))
                {
                    watch = Stopwatch.StartNew();
                    using (Image<Gray, Byte> gray = image.Convert<Gray, Byte>()) //Convert it to Grayscale
                    {
                        //Normalizes brightness and increases contrast of the image
                        gray._EqualizeHist();

                        //Detect the faces  from the gray scale image and store the locations as rectangle
                        //The first dimensional is the channel
                        //The second dimension is the index of the rectangle in the specific channel
                        Rectangle[] facesDetected = faceClassifier.DetectMultiScale(
                           gray,
                           1.1,
                           10,
                           new Size(20, 20),
                           Size.Empty);
                        recFaces.AddRange(facesDetected);
                        //Now for each rectangle, get the sub face image from the coordinates and store it for display later
                        foreach (Rectangle rec in facesDetected)
                            imgFaces.Add(image.GetSubRect(rec));
                    }
                    watch.Stop();
                }
            }
            detectionTime = watch.ElapsedMilliseconds;
        }
        public RecognitionOnPrem()
        {
            InitializeComponent();

            Loaded += (s, e) =>
            {
                if (grabber == null)
                {
                    _faceClassifier = new CascadeClassifier("haarcascade_frontalface_default.xml");

                    //count number of trained faces
                    ContTrain = CommonData.TrainingImages.Count;

                    grabber = new Capture();
                    grabber.QueryFrame();
                }
                else
                {
                    grabber.Start();
                }
            };

            Unloaded += (s, e) => {
                grabber.Stop();
            };

            CompositionTarget.Rendering += CompositionTarget_Rendering;
        }
        public FormManualTrain(Form1 frm1)
        {
            InitializeComponent();
            browseImage = new OpenFileDialog();

            _form1 = frm1;

            eigenRecog = new Classifier_Train();
            face = new HaarCascade("haarcascade_frontalface_default.xml");
            eyeWithGlass = new CascadeClassifier("haarcascade_eye_tree_eyeglasses.xml");
            mydb = new DBConn();
            minEye = new Size(10, 10);
            maxEye = new Size(225, 225);
            font = new MCvFont(FONT.CV_FONT_HERSHEY_TRIPLEX, 0.5d, 0.5d);
            if (File.ReadAllText("setting.txt") != null)
            {
                folderPath = File.ReadAllText("setting.txt");
            }
            else
            {
                FolderBrowserDialog b = new FolderBrowserDialog();
                b.Description = "Please select your installation path";
                DialogResult r = b.ShowDialog();
                if (r == DialogResult.OK) // Test result.
                {
                    folderPath = b.SelectedPath;
                    Console.WriteLine(folderPath);
                    File.WriteAllText(@"setting.txt", folderPath);
                    MessageBox.Show("Path is at " + folderPath);
                }
            }
        }
        public FaceDetectionAlgorithm()
        {
            string path = new Uri(Path.GetDirectoryName(System.Reflection.Assembly.GetExecutingAssembly().GetName().CodeBase) + "\\Files\\Cascades").LocalPath+"\\";

            _faceHaarCascade = new CascadeClassifier(path + "haarcascade_frontalface_alt.xml");
            _eyeHaarCascade = new CascadeClassifier(path + "haarcascade_eye.xml");
        }
Esempio n. 8
0
        private void Process(object sender, EventArgs arg)
        {
            classify = new CascadeClassifier("haarcascade_frontalface_default.xml");
            classify1 = new CascadeClassifier("haarcascade_eye.xml");

            Image<Bgr, Byte> img = cap.QueryFrame().ToImage<Bgr, Byte>();
            if (img != null)
            {
                Image<Gray,byte> grayscaleframe = img.Convert<Gray, byte>();
                var faces = classify.DetectMultiScale(grayscaleframe, 1.1,3,Size.Empty,Size.Empty);
                var eyes = classify1.DetectMultiScale(grayscaleframe, 1.1, 10,Size.Empty,Size.Empty);

                foreach(var face in faces)
                {

                   img.Draw(face, new Bgr(Color.Blue));
                }

               foreach(var eye in eyes)
                {
                    img.Draw(eye, new Bgr(Color.Red), 2);
                }
            }

            imgCap.Image = img;
        }
Esempio n. 9
0
        //Процедура обработки изображения и поика циферблата
        private void button1_Click(object sender, EventArgs e)
        {
            Image<Bgr, Byte> frame = (Image<Bgr, Byte>)VideoImage.Image; //Полученный кадр
            using (CascadeClassifier dial = new CascadeClassifier(faceFileName)) //Каскад
            using (Image<Gray, Byte> gray = frame.Convert<Gray, Byte>()) //Хаар работает с ЧБ изображением
            {
                //Детектируем
                Rectangle[] facesDetected2 = dial.DetectMultiScale(
                        gray, //Исходное изображение
                        1.1,  //Коэффициент увеличения изображения
                        6,   //Группировка предварительно обнаруженных событий. Чем их меньше, тем больше ложных тревог
                        new Size(5, 5), //Минимальный размер циферблата
                        Size.Empty); //Максимальный размер циферблата
                //Выводим всё найденное
                foreach (Rectangle f in facesDetected2)
                {
                    frame.Draw(f, new Bgr(Color.Blue), 2);
                    VideoImage.Image = frame;
                    //frame.ROI = f;
                    frame.Save("out.bmp");
                    Bitmap bmp = new Bitmap("out.bmp");
                    BitmapToBlackWhite2(bmp).Save("out_black.bmp");

                    LabelConnectedComponents(gray, 0).Save("label.bmp");

                 //   BinaryImage.Image =
                    //gray.ThresholdAdaptive(new Gray(255), ADAPTIVE_THRESHOLD_TYPE.CV_ADAPTIVE_THRESH_MEAN_C, THRESH.CV_THRESH_OTSU, 5, new Gray(0.03)).Save("another.bmp");
                }
            }
        }
Esempio n. 10
0
        public AIRecognition()
        {
            InitializeComponent();

            _faceClassifier = new CascadeClassifier("haarcascade_frontalface_default.xml");

            Loaded += (s, e) =>
            {
                _vmodel.Pictures.Clear();
                _vmodel.PersonRecognized = 0;
                this.DataContext = _vmodel;

                if (grabber == null)
                {
                    CommonData.LoadSavedData();
                    //check how many faces we already have
                    _countFaces = CommonData.PicturesVM.Pictures.Count;

                    grabber = new Capture();
                    grabber.QueryFrame();
                    grabber.Start();
                }
                else
                {
                    grabber.Start();
                }

            };
            Unloaded += (s, e) =>
            {
                grabber.Stop();
            };

            CompositionTarget.Rendering += CompositionTarget_Rendering;
        }
Esempio n. 11
0
 public BlinkForm()
 {
     InitializeComponent();
     faceClassifier = new CascadeClassifier(Application.StartupPath + "\\Classifier\\haarcascade_frontalface_default.xml");
     eyePairClassifier = new CascadeClassifier(Application.StartupPath + "\\Classifier\\haarcascade_mcs_eyepair_big.xml");
     eyeClassifier = new CascadeClassifier(Application.StartupPath + "\\Classifier\\haarcascade_eye_tree_eyeglasses.xml");
     Detect();
 }
Esempio n. 12
0
 public TrainData()
 {
     InitializeComponent();
     fr1 = new EigenFaceRecognizer(80, double.PositiveInfinity);//The recognitoion object
     fr2 = new FisherFaceRecognizer(-1, 3100);//The recognitoion object
     fr3 = new LBPHFaceRecognizer(1, 8, 8, 8, 100);//50
     cascade = new CascadeClassifier("haarcascade_frontalface_default.xml"); //this file contains the training 
 }
Esempio n. 13
0
 public Classifier()
 {
     _internalClassifier = new CascadeClassifier(CascadPath);
     ScaleFactor = 1.1;
     MinNeighbours = 6;
     MinSize = new Size(10, 10);
     MaxSize = Size.Empty;
 }
 public Rectangle[] FindEyes(string eyeFileName, Image<Gray, Byte> imageFace)
 {
     using (CascadeClassifier eye = new CascadeClassifier(eyeFileName))
     using (Mat eyeRegionMat = new Mat())
     {
         Rectangle[] eyeRegion = eye.DetectMultiScale(imageFace, eyescale, eyeneighbors, new Size(eyeminsize, eyeminsize), new Size(eyemaxsize, eyemaxsize));
         return eyeRegion;
     }
 }
Esempio n. 15
0
        public SDMHand(PictureBox picturePreview) : this()
        {
            _picturePreview = picturePreview;
            _cascadeClassifier = new CascadeClassifier(@".\HaarCascade\face.xml");
            _rectangles = new List<Rectangle>(SDMConfig.BufferSize);
            _nui = new NUI(CommandBox);

            Show();
        }
Esempio n. 16
0
   // Use this for initialization
   void Start()
   {  
      Texture2D lenaTexture = Resources.Load<Texture2D>("lena");    

      //updateTextureWithString("load lena ok");
      Image<Bgr, Byte> img = TextureConvert.Texture2dToImage<Bgr, byte>(lenaTexture);
      //updateTextureWithString("convert to image ok");

      //String fileName = "haarcascade_frontalface_default";
      //String fileName = "lbpcascade_frontalface";
      String fileName = "haarcascade_frontalface_alt2";
      String filePath = Path.Combine(Application.persistentDataPath, fileName + ".xml");
      //if (!File.Exists(filePath))
      {
         //updateTextureWithString("start move cascade xml");
         TextAsset cascadeModel = Resources.Load<TextAsset>(fileName);
         
#if UNITY_METRO
         UnityEngine.Windows.File.WriteAllBytes(filePath, cascadeModel.bytes);
#else
         File.WriteAllBytes(filePath, cascadeModel.bytes);
#endif
         //updateTextureWithString("File size: " + new FileInfo(filePath).Length);
      }

      
      using (CascadeClassifier classifier = new CascadeClassifier(filePath))
      using (Image<Gray, Byte> gray = img.Convert<Gray, byte>())
      {
         //updateTextureWithString("classifier create ok");

         Rectangle[] faces = null;
         try
         {
            faces = classifier.DetectMultiScale(gray);

            //updateTextureWithString("face detected");
            foreach (Rectangle face in faces)
            {
               CvInvoke.Rectangle(img, face, new MCvScalar(0, 255, 0));
            }
         }
         catch (Exception e)
         {
            
            //updateTextureWithString(e.Message);
            return;
         }
         
         //updateTextureWithString(String.Format("{0} face found on image of {1} x {2}", faces.Length, img.Width, img.Height));
      }

      Texture2D texture = TextureConvert.ImageToTexture2D(img, FlipType.Vertical);

      this.GetComponent<GUITexture>().texture = texture;
      this.GetComponent<GUITexture>().pixelInset = new Rect(-img.Width / 2, -img.Height / 2, img.Width, img.Height);
   }
Esempio n. 17
0
        private void EmguFaceDetector(string path)
        {
            if (capture == null)
            {
                capture = new Emgu.CV.VideoCapture(0);
            }
            capture.ImageGrabbed += Capture_ImageGrabbed;
            capture.Start();

            emguFaceClassifier = new Emgu.CV.CascadeClassifier(@"./haarcascade/haarcascade_frontalface_alt.xml");
        }
 public void pump(ref AdvertDetails _advertDetails)
 {
     CascadeClassifier cc = new CascadeClassifier(VWclassifier);
     bool carExitst = _advertDetails.CarFound?true:false;
     Image<Gray, byte> image;
     if(carExitst)
         image = _advertDetails.Image.GetSubRect(_advertDetails.Rect).Convert<Gray, byte>();
     else
         image = _advertDetails.Image.Convert<Gray, byte>();
     Rectangle[] logosFound = cc.DetectMultiScale(image, 1.05, 1, new Size(20,20), new Size(40,40));
 }
Esempio n. 19
0
        public MainWindow()
        {
            InitializeComponent();

            capture = new Capture();
            haarCascade = new CascadeClassifier("../../haarcascade_frontalface_alt.xml");
            timer = new DispatcherTimer();
            timer.Tick += new EventHandler(timer_Tick);
            timer.Interval = new TimeSpan(0, 0, 0, 0, 33);
            timer.Start();
        }
 public JanelaTreinarDetector()
 {
     InitializeComponent();
     CvInvoke.UseOpenCL = false;
     //Parent = _Parent;
     //Face = Parent.Face;
     Face = new CascadeClassifier("haarcascade_frontalface_default.xml");//Our face detection method 
   
     ENC_Parameters.Param[0] = ENC;
     Image_Encoder_JPG = GetEncoder(ImageFormat.Jpeg);
     initialise_capture();
 }
Esempio n. 21
0
   // Use this for initialization
   void Start()
   {  
      Texture2D lenaTexture = Resources.Load<Texture2D>("lena");    

      UMat img = new UMat();
      TextureConvert.Texture2dToOutputArray(lenaTexture, img);
      CvInvoke.Flip(img, img, FlipType.Vertical);

      //String fileName = "haarcascade_frontalface_default";
      //String fileName = "lbpcascade_frontalface";
      String fileName = "haarcascade_frontalface_alt2";
      String filePath = Path.Combine(Application.persistentDataPath, fileName + ".xml");
      //if (!File.Exists(filePath))
      {
         TextAsset cascadeModel = Resources.Load<TextAsset>(fileName);
         
#if UNITY_METRO
         UnityEngine.Windows.File.WriteAllBytes(filePath, cascadeModel.bytes);
#else
         File.WriteAllBytes(filePath, cascadeModel.bytes);
#endif
      }

      using (CascadeClassifier classifier = new CascadeClassifier(filePath))
      using (UMat gray = new UMat())
      {
         CvInvoke.CvtColor(img, gray, ColorConversion.Bgr2Gray);

         Rectangle[] faces = null;
         try
         {
            faces = classifier.DetectMultiScale(gray);

            foreach (Rectangle face in faces)
            {
               CvInvoke.Rectangle(img, face, new MCvScalar(0, 255, 0));
            }
         }
         catch (Exception e)
         {
            Debug.Log(e.Message);
            
            return;
         }
      }

      Texture2D texture = TextureConvert.InputArrayToTexture2D(img, FlipType.Vertical);

      this.GetComponent<GUITexture>().texture = texture;
      Size s = img.Size;
      this.GetComponent<GUITexture>().pixelInset = new Rect(-s.Width / 2, -s.Height / 2, s.Width, s.Height);
   }
 public FaceDetectorDevice()
 {
     //Read the HaarCascade objects
     CvInvoke.UseOpenCL = false;
     faceClassifier = new CascadeClassifier(@"Resources\haarcascades\frontalface_alt.xml");
     eyeClassifier = new CascadeClassifier(@"Resources\haarcascades\eye.xml");
     mouthClassifier = new CascadeClassifier(@"Resources\haarcascades\mouth.xml");
     noseClassifier = new CascadeClassifier(@"Resources\haarcascades\nose.xml");
     detectionNotifyTimer = new Timer();
     detectionNotifyTimer.Elapsed += DetectionNotifyTimerOnElapsed;
     DetectionMode = DetectionModes.Disabled;
     DetectionPeriod = TimeSpan.FromMilliseconds(500);
 }
Esempio n. 23
0
        public override void OnStart()
        {
            PackageHost.WriteInfo("Package starting - IsRunning: {0} - IsConnected: {1}", PackageHost.IsRunning, PackageHost.IsConnected);


            string startupPath = System.Reflection.Assembly.GetExecutingAssembly().CodeBase;
            IDBAccess dataStore = new DBAccess("facesDB.db");
            recoEngine = new RecognizerEngine(
                Path.Combine(Environment.CurrentDirectory, "data/facesDB.db"),
                Path.Combine(Environment.CurrentDirectory, "data/RecognizerEngineData.YAML"));    //"/data/facesDB.db", startupPath + "/data/RecognizerEngineData.YAML");
            cap = new Capture();

            //cap.QueryFrame().Save(Path.Combine(Environment.CurrentDirectory, "test.bmp"));

            Task.Factory.StartNew(() =>
            {
                while (PackageHost.IsRunning)
                {
                    Rectangle[] faces;
                    //string bla = System.Reflection.Assembly.GetExecutingAssembly().    CodeBase + "/haarcascade_frontalface_default.xml";
		            cascadeClassifier = new CascadeClassifier( Path.Combine(Environment.CurrentDirectory, "haarcascade_frontalface_default.xml"));
		            using (var imageFrame = cap.QueryFrame().ToImage<Bgr, Byte>())
		            {
			            if (imageFrame != null)
			            {
				            var grayframe = imageFrame.Convert<Gray, byte>();
				            faces = cascadeClassifier.DetectMultiScale(grayframe, 1.2, 10, Size.Empty); //the actual face detection happens here

				            PackageHost.PushStateObject<Rectangle[]>("faces", faces);
				            foreach (var face in faces)
				            {

                                int nameID = recoEngine.RecognizeUser(imageFrame.GetSubRect(face).Convert<Gray, byte>());
                                if (nameID == 0)
                                {
                                    PackageHost.WriteWarn("unknown face");
                                    PackageHost.PushStateObject<String>("Face", "Unknown");
                                }
                                else
                                {
                                    string name = dataStore.GetUsername(nameID);
                                    PackageHost.WriteInfo("face recognized : {0}", name);
                                    PackageHost.PushStateObject<String>("Face", name);
                                }
				            }
			            }
		            }
                    Thread.Sleep(5000);//PackageHost.GetSettingValue<int>("RefreshRate")
				}
			});
        }
        /// <summary>
        /// Implements the car detector filter and adds the details to the advertDetails object
        /// </summary>
        /// <param name="_advertDetails">The advertDetails object where information about the advert is stored</param>
        public virtual void pump(ref AdvertDetails _advertDetails)
        {
            Rectangle rect = new Rectangle();
            String view = "Unknown";

            Image<Gray, Byte> image = _advertDetails.Image.Convert<Gray, byte>();

            CascadeClassifier classifier = new CascadeClassifier(frontClassifier);
            Rectangle[] rectangleList = new Rectangle[0];

            classifier = new CascadeClassifier(sideClassifier);
            Rectangle[] temp = classifier.DetectMultiScale(image, scaleFac, numNeighbours, side_minSize, maxSize);
            if (temp.Length > rectangleList.Length)
            {
                rectangleList = temp;
                view = "Side";
            }

            if(view != "Side")
            {
                 temp = classifier.DetectMultiScale(image, scaleFac, numNeighbours, fb_minSize, maxSize);;
                if(temp.Length > rectangleList.Length)
                {
                    rectangleList = temp;
                    view = "Front";
                }

                classifier = new CascadeClassifier(backClassifier);
                temp = classifier.DetectMultiScale(image, scaleFac, numNeighbours, fb_minSize, maxSize);
                if (temp.Length > rectangleList.Length)
                {
                    rectangleList = temp;
                    view = "Back";
                }
            }

            if (rectangleList.Length > 0)
            {
                rect = getLargest(rectangleList);
                _advertDetails.Rect = rect;
                _advertDetails.CarFound = true;
                _advertDetails.View = view;
                _advertDetails.CarRating = 1;
            }
            else
            {
                _advertDetails.CarFound = false;
                _advertDetails.Error = "No car found.";
            }
        }
        public static void Detect(Image<Bgr, Byte> image, String faceFileName, String eyeFileName, List<System.Drawing.Rectangle> faces, List<System.Drawing.Rectangle> eyes)
        {
            using (CascadeClassifier face = new CascadeClassifier(faceFileName))
            //Read the eyeFileName objects

            using (CascadeClassifier eye = new CascadeClassifier(eyeFileName))
            {

                var watch = Stopwatch.StartNew();
                using (Image<Gray, Byte> gray = image.Convert<Gray, Byte>()) //Convert it to Grayscale
                {
                    //normalizes brightness and increases contrast of the image
                    gray._EqualizeHist();

                    //Detect the faces  from the gray scale image and store the locations as rectangle
                    //The first dimensional is the channel
                    //The second dimension is the index of the rectangle in the specific channel
                    System.Drawing.Rectangle[] facesDetected = face.DetectMultiScale(
                       gray,
                       1.1,
                       10,
                       new System.Drawing.Size(20, 20),
                       System.Drawing.Size.Empty);
                    faces.AddRange(facesDetected);

                    foreach (System.Drawing.Rectangle f in facesDetected)
                    {

                        //Set the region of interest on the faces
                        gray.ROI = f;
                        System.Drawing.Rectangle[] eyesDetected = eye.DetectMultiScale(
                           gray,
                           1.1,
                           10,
                           new System.Drawing.Size(20, 20),
                           System.Drawing.Size.Empty);
                        gray.ROI = System.Drawing.Rectangle.Empty;

                        foreach (System.Drawing.Rectangle e in eyesDetected)
                        {
                            System.Drawing.Rectangle eyeRect = e;
                            eyeRect.Offset(f.X, f.Y);
                            eyes.Add(eyeRect);
                        }
                    }
                }
                watch.Stop();
            }
        }
 float disNo_Mo;  //nose-mouth
 public ImproveRecognize()
 {
     mineye = new Size(10,10);
     maxeye = new Size(50, 50);
     minnose = new Size(10, 10);
     maxnose = new Size(50, 50);
     minmouth = new Size(10, 10);
     maxmouth = new Size(50, 100);
     
     eyewithglass = new CascadeClassifier("haarcascade_eye_tree_eyeglasses.xml");
     nose = new CascadeClassifier("haarcascade_mcs_nose.xml");
     mouth = new CascadeClassifier("haarcascade_mcs_mouth.xml");
     coord = new Point[4];
     
 }
Esempio n. 27
0
 public static void detectFace(Image<Bgr, Byte> image, String faceFileName, List<Rectangle> facesList, out long detectionTime)
 {
     Stopwatch watch;
     using (CascadeClassifier faceCascade = new CascadeClassifier(faceFileName))
     {
         watch = Stopwatch.StartNew();
         using (Image<Gray, Byte> grayImage = image.Convert<Gray, Byte>())
         {
             Rectangle[] facesRegion = faceCascade.DetectMultiScale(grayImage, 1.1, 10, new Size(24, 24), Size.Empty);
             facesList.AddRange(facesRegion);
         }
         watch.Stop();
     }
     detectionTime = watch.ElapsedMilliseconds;
 }
Esempio n. 28
0
        public static void Detect(Mat image, String faceFileName, 
            String eyeFileName, List<Rectangle> faces, List<Rectangle> eyes)
        {
            //Read the HaarCascade objects
            using (CascadeClassifier face = new CascadeClassifier(faceFileName))
            using (CascadeClassifier eye = new CascadeClassifier(eyeFileName))
            {
               Size size = new Size(image.Rows, image.Cols);
                using (UMat ugray = new UMat(size, DepthType.Cv8U, 1))
                {
                    //CvInvoke.CvtColor(image, ugray, ColorConversion.Bgr2Gray);

                    //normalizes brightness and increases contrast of the image
                    CvInvoke.EqualizeHist(image, ugray);

                    //Detect the faces  from the gray scale image and store the locations as rectangle
                    //The first dimensional is the channel
                    //The second dimension is the index of the rectangle in the specific channel
                    Rectangle[] facesDetected = face.DetectMultiScale(
                       ugray,
                       1.1,
                       10,
                       new Size(20, 20));

                    faces.AddRange(facesDetected);

                    foreach (Rectangle f in facesDetected)
                    {
                        //Get the region of interest on the faces
                        using (UMat faceRegion = new UMat(ugray, f))
                        {
                            Rectangle[] eyesDetected = eye.DetectMultiScale(
                               faceRegion,
                               1.1,
                               10,
                               new Size(20, 20));

                            foreach (Rectangle e in eyesDetected)
                            {
                                Rectangle eyeRect = e;
                                eyeRect.Offset(f.X, f.Y);
                                eyes.Add(eyeRect);
                            }
                        }
                    }
                }
            }
        }
 //what this function does ?
 // open camera, take a capture 
 // when an image is grabbed (captured) add it to the capture object
 // then, process frame :
 // create a matrix object to save the picture
 // create Matrix to save the pictures in
 // put what camera captured into the matrix object : Mat  
 // convert frame obj to an image obj
 // if frames object contain images : 
 // loop at all images . detect faces and draw blue rectangles on detected faces 
 // Add the all extracted faces to : extfaces (arraylist) && count no of faces (INT counter)
 // DisplayThe recording by the camera in the imagebox
 
 public VideoCapturing()
 {
     InitializeComponent();
     cascade = new CascadeClassifier("haarcascade_frontalface_default.xml"); //this file contains the training 
     CvInvoke.UseOpenCL = false;
     try
     {
         capture = new Capture();
         ExtFaces = new List<Bitmap>();
         capture.ImageGrabbed += ProcessFrame;
     }
     catch (NullReferenceException excpt)
     {
         MessageBox.Show(excpt.Message);
     }
 }
Esempio n. 30
0
 private void EmguFaceDetector()
 {
     Emgu.CV.CascadeClassifier emguFaceClassifier = null;
     if (File.Exists(this.ImagePath))
     {
         if (File.Exists(@"./haarcascade/haarcascade_frontalface_alt.xml"))
         {
             emguFaceClassifier = new Emgu.CV.CascadeClassifier(@"./haarcascade/haarcascade_frontalface_alt.xml");
             Emgu.CV.Mat src  = CvInvoke.Imread(this.ImagePath, 0);
             Emgu.CV.Mat gray = new Emgu.CV.Mat();
             CvInvoke.CvtColor(src, gray, Emgu.CV.CvEnum.ColorConversion.Bgr2Gray);
             var faces   = emguFaceClassifier.DetectMultiScale(gray, 1.1, 2, new System.Drawing.Size(30, 30));
             int facecnt = faces.Length;
         }
     }
 }
Esempio n. 31
0
        public Form1()
        {
            InitializeComponent();
            recognizer = new LBPHFaceRecognizer(1, 8, 8, 9, 65);

            classifier = new CascadeClassifier(haarcascade);
            GPU_classifier = new GpuCascadeClassifier(haarcascade_cuda);

            font = new MCvFont(Emgu.CV.CvEnum.FONT.CV_FONT_HERSHEY_TRIPLEX, 0.5, 0.5);
            if (File.Exists(@"traningdata.xml"))
            {
                recognizer.Load(@"traningdata.xml");
            }
            else
            {

                foreach (var file in Directory.GetFiles(Application.StartupPath + @"\Traning Faces\"))
                {
                    try { temp = new Image<Gray, Byte>(file); }
                    catch { continue; }
                    temp._EqualizeHist();

                    var detectedFaces = classifier.DetectMultiScale(temp, 1.1, 15, new Size(24, 24), Size.Empty);
                    if (detectedFaces.Length == 0)
                    {
                        continue;
                    }

                    temp.ROI = detectedFaces[0];
                    temp = temp.Copy();
                    temp = temp.Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                    imagesList.Add(temp);
                    imagesLabels.Add(Path.GetFileNameWithoutExtension(file));
                }
                for (int i = 0; i < imagesList.Count; i++)
                {
                    imagesLabels_indices.Add(i);
                }

                try { recognizer.Train(imagesList.ToArray(), imagesLabels_indices.ToArray()); }
                catch (Exception ex)
                {
                    MessageBox.Show(ex.Message);
                    Environment.Exit(0);
                }
            }
        }
        public FormTrain(Form1 frm1)
        {
            InitializeComponent();
            _form1 = frm1;

            eigenRecog = new Classifier_Train();
            face = new HaarCascade("haarcascade_frontalface_default.xml");
            eyeWithGlass = new CascadeClassifier("haarcascade_eye_tree_eyeglasses.xml");
            mydb = new DBConn();
            minEye = new Size(10, 10);
            maxEye = new Size(225, 225);
            font = new MCvFont(FONT.CV_FONT_HERSHEY_TRIPLEX, 0.5d, 0.5d);
            
            captureT = new Capture();
            Application.Idle += new EventHandler(runningCamera);
            
        }
Esempio n. 33
0
        public static List <Rectangle> DetectFaces(Emgu.CV.Mat image)
        {
            List <Rectangle> faces = new List <Rectangle>();
            var facesCascade       = HttpContext.Current.Server.MapPath("~/face.xml");

            using (Emgu.CV.CascadeClassifier face = new Emgu.CV.CascadeClassifier(facesCascade))
            {
                using (UMat ugray = new UMat())
                {
                    CvInvoke.CvtColor(image, ugray, Emgu.CV.CvEnum.ColorConversion.Bgr2Gray);
                    CvInvoke.EqualizeHist(ugray, ugray);
                    Rectangle[] facesDetected = face.DetectMultiScale(
                        ugray,
                        1.1,
                        10,
                        new System.Drawing.Size(20, 20));
                    faces.AddRange(facesDetected);
                }
            }
            return(faces);
        }
Esempio n. 34
0
        public Face_Detection_Form()
        {
            InitializeComponent();

            _faceDetected = new Emgu.CV.CascadeClassifier("Cascade_Class_Face.xml");
            try
            {
                string   labelsInfo = File.ReadAllText(Application.StartupPath + "\\faces\\faces.txt");
                string[] labels     = labelsInfo.Split(',');
                Numlabels = Convert.ToInt16(labels[0]);
                Count     = Numlabels;
                string Faceload;
                for (int i = 1; i < Numlabels + 1; i++)
                {
                    Faceload = "Face" + i + ".bmp";
                    trainningImages.Add(new Image <Gray, byte>(Application.StartupPath + "\\faces\\faces.txt"));
                    _labelsObject.Add(labels[i]);
                }
            }
            catch (Exception ex)
            {
                MessageBox.Show("Can not read data from Database!");
            }
        }
Esempio n. 35
0
        public async Task <HttpResponseMessage> Index()
        {
            if (!Request.Content.IsMimeMultipartContent())
            {
                throw new HttpResponseException(HttpStatusCode.UnsupportedMediaType);
            }

            Emgu.CV.CascadeClassifier cc = new Emgu.CV.CascadeClassifier(System.Web.Hosting.HostingEnvironment.MapPath("/haarcascade_frontalface_alt_tree.xml"));
            var provider = new MultipartMemoryStreamProvider();
            await Request.Content.ReadAsMultipartAsync(provider);

            foreach (var file in provider.Contents)
            {
                var filename = file.Headers.ContentDisposition.FileName.Trim('\"');
                var buffer   = await file.ReadAsByteArrayAsync();

                using (MemoryStream mStream = new MemoryStream(buffer, 0, buffer.Length))
                {
                    mStream.Position = 0;
                    //Do whatever you want with filename and its binary data.

                    using (Bitmap bmp = new Bitmap(mStream))
                    {
                        using (Emgu.CV.Image <Emgu.CV.Structure.Bgr, Int32> img = new Emgu.CV.Image <Emgu.CV.Structure.Bgr, Int32>(bmp))
                        {
                            if (img != null)
                            {
                                var grayframe = img.Convert <Emgu.CV.Structure.Gray, byte>();
                                var faces     = cc.DetectMultiScale(grayframe);//, 1.1, 10, Size.Empty);
                                int faceCount = 0;
                                foreach (var face in faces)
                                {
                                    // only returns the first face found
                                    faceCount++;
                                    using (Bitmap faceBmp = new Bitmap(face.Right - face.Left, face.Bottom - face.Top))
                                    {
                                        Graphics g = Graphics.FromImage(faceBmp);
                                        g.DrawImage(bmp, new Rectangle(0, 0, faceBmp.Width, faceBmp.Height), face.Left, face.Top, faceBmp.Width, faceBmp.Height, GraphicsUnit.Pixel);
                                        MemoryStream outStream = new MemoryStream();
                                        faceBmp.Save(outStream, System.Drawing.Imaging.ImageFormat.Jpeg);
                                        var result = new HttpResponseMessage(HttpStatusCode.OK)
                                        {
                                            Content = new ByteArrayContent(outStream.ToArray()),
                                        };
                                        result.Content.Headers.ContentDisposition =
                                            new System.Net.Http.Headers.ContentDispositionHeaderValue("attachment")
                                        {
                                            FileName = "face.jpg"
                                        };
                                        result.Content.Headers.ContentType   = new System.Net.Http.Headers.MediaTypeHeaderValue("image/jpeg");
                                        result.Content.Headers.ContentLength = outStream.Length;

                                        return(result);
                                    }
                                }
                            }
                        }
                    }
                }
            }

            throw new HttpResponseException(HttpStatusCode.InternalServerError);
        }
Esempio n. 36
0
 public FrameGrabber()
 {
     Detector = new CascadeClassifier("haarcascade_profileface.xml");
 }