Пример #1
0
 private void ProcessFrame(object sender, EventArgs arg)
 {
     if (faceRecog.Checked == true)
     {
         long recpoints;
         Image <Bgr, Byte> img = new Image <Bgr, byte>(secondImageBox.Image.Bitmap);
         using (Image <Gray, Byte> modelImage = img.Convert <Gray, Byte>())
             using (Image <Gray, Byte> observedImage = _capture.RetrieveBgrFrame().Convert <Gray, Byte>())
             {
                 Image <Bgr, byte> result = SurfRecognizer.Draw(modelImage, observedImage, out recpoints);
                 captureImageBox.Image = observedImage;
                 if (recpoints > 10)
                 {
                     MCvFont f = new MCvFont(Emgu.CV.CvEnum.FONT.CV_FONT_HERSHEY_COMPLEX, 1.0, 1.0);
                     //Draw "Hello, world." on the image using the specific font
                     result.Draw("Person Recognited, Welcome", ref f, new Point(40, 40), new Bgr(0, 255, 0));
                     ImageViewer.Show(result, String.Format(" {0} Points Recognited", recpoints));
                 }
             }
     }
     ///////////////////////////////////////////////////////////////////////
     if (faceRecog.Checked == false)
     {
         Image <Bgr, Byte> detectedface;
         Image <Bgr, Byte> frame = _capture.RetrieveBgrFrame();
         Image <Bgr, Byte> image = frame.Resize(400, 300, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);//Read the files as an 8-bit Bgr image
         long             detectionTime;
         List <Rectangle> faces = new List <Rectangle>();
         List <Rectangle> eyes  = new List <Rectangle>();
         DetectFace.Detect(image, "haarcascade_frontalface_default.xml", "haarcascade_eye.xml", faces, eyes, out detectionTime);
         foreach (Rectangle face in faces)
         {
             image.Draw(face, new Bgr(Color.Red), 2);
             image.ROI    = face;
             detectedface = image;
             if (eqHisChecked.Checked == false)
             {
                 secondImageBox.Image = detectedface.Convert <Gray, Byte>().Resize(2, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
             }
             CvInvoke.cvResetImageROI(image);
         }
         foreach (Rectangle eye in eyes)
         {
             image.Draw(eye, new Bgr(Color.Blue), 2);
         }
         captureImageBox.Image = image;
     }
 }
Пример #2
0
        private void ProcessFrame2(object sender, EventArgs arg)
        {
            if (comboBoxCapture.Text == "Camera")
            {
                image = _capture.RetrieveBgrFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
            }


            if (comboBoxCapture.Text == "Single Image")
            {
                OpenFileDialog Openfile = new OpenFileDialog();
                if (Openfile.ShowDialog() == DialogResult.OK)
                {
                    image = new Image <Bgr, byte>(Openfile.FileName);
                }
            }

            List <Rectangle> faces = new List <Rectangle>();
            List <Rectangle> eyes  = new List <Rectangle>();
            long             detectionTime;

            DetectFace.Detect(image, "haarcascade_frontalface_default.xml", "haarcascade_eye.xml", faces, eyes, out detectionTime);
            foreach (Rectangle face in faces)
            {
                //Image ROI selected as each face in image
                if (workCorruptedImages.Checked == true)
                {
                    image.ROI = face;
                }
                if (faceRecog.Checked == true)
                {
                    //now program apply selected algorithm if recognition has started

                    //For SURF Algorithm
                    if (comboBoxAlgorithm.Text == "SURF Feature Extractor")
                    {
                        string   dataDirectory = Directory.GetCurrentDirectory() + "\\TrainedFaces";
                        string[] files         = Directory.GetFiles(dataDirectory, "*.jpg", SearchOption.AllDirectories);

                        foreach (var file in files)
                        {
                            richTextBox1.Text += file.ToString();
                            long recpoints;
                            Image <Bgr, Byte> sampleImage = new Image <Bgr, Byte>(file);
                            secondImageBox.Image = sampleImage;
                            using (Image <Gray, Byte> modelImage = sampleImage.Convert <Gray, Byte>())
                                using (Image <Gray, Byte> observedImage = image.Convert <Gray, Byte>())
                                {
                                    Image <Bgr, byte> result = SurfRecognizer.Draw(modelImage, observedImage, out recpoints);
                                    //captureImageBox.Image = observedImage;
                                    if (recpoints > 10)
                                    {
                                        MCvFont f = new MCvFont(Emgu.CV.CvEnum.FONT.CV_FONT_HERSHEY_COMPLEX, 1.0, 1.0);
                                        result.Draw("Person Recognited, Welcome", ref f, new Point(40, 40), new Bgr(0, 255, 0));
                                        ImageViewer.Show(result, String.Format(" {0} Points Recognited", recpoints));
                                    }
                                }
                        }
                    }
                    //For EigenFaces
                    else if (comboBoxAlgorithm.Text == "EigenFaces")
                    {
                        CvInvoke.cvResetImageROI(image);
                        //image._EqualizeHist();
                        if (eqHisChecked.Checked == true)
                        {
                            image._EqualizeHist();
                        }
                        var result = eigenFaceRecognizer.Predict(image.Convert <Gray, Byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC));
                        if (result.Label != -1)
                        {
                            image.Draw(eigenlabels[result.Label].ToString(), ref font, new Point(face.X - 2, face.Y - 2), new Bgr(Color.LightGreen));
                            label6.Text = result.Distance.ToString();
                        }
                    }
                    //For FisherFaces
                    else if (comboBoxAlgorithm.Text == "FisherFaces")
                    {
                        CvInvoke.cvResetImageROI(image);
                        if (eqHisChecked.Checked == true)
                        {
                            image._EqualizeHist();
                        }
                        var result = fisherFaceRecognizer.Predict(image.Convert <Gray, Byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC));
                        if (result.Label != -1)
                        {
                            image.Draw(fisherlabels[result.Label].ToString(), ref font, new Point(face.X - 2, face.Y - 2), new Bgr(Color.LightGreen));
                            label6.Text = result.Distance.ToString();
                        }
                    }

                    //For LBPH
                    else if (comboBoxAlgorithm.Text == "LBPHFaces")
                    {
                        if (eqHisChecked.Checked == true)
                        {
                            image._EqualizeHist();
                        }
                        var result = lbphFaceRecognizer.Predict(image.Convert <Gray, Byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC));
                        if (result.Label != -1)
                        {
                            CvInvoke.cvResetImageROI(image);
                            image.Draw(lbphlabels[result.Label].ToString(), ref font, new Point(face.X - 2, face.Y - 2), new Bgr(Color.LightGreen));
                            label6.Text = result.Distance.ToString();
                            label7.Text = lbphlabels[result.Label].ToString();
                        }
                    }
                }

                CvInvoke.cvResetImageROI(image);
                image.Draw(face, new Bgr(Color.Red), 2);
            }
            captureImageBox.Image = image;
        }