Recognize() публичный Метод

Try to recognize the image and return its label
public Recognize ( Byte>.Image image ) : RecognitionResult
image Byte>.Image The image to be recognized
Результат RecognitionResult
        private void btnDetect_Click(object sender, EventArgs e)
        {
            current = new Image<Bgr, byte>(filePath).Resize(300, 250, INTER.CV_INTER_CUBIC);
            Image<Gray, byte> grayScale = current.Convert<Gray, byte>();

                MCvAvgComp[][] detected = grayScale.DetectHaarCascade(face, scale, minNeighbors, Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new Size(20, 20));
                foreach (MCvAvgComp d in detected[0])
                {
                    current.Draw(d.rect, new Bgr(Color.LawnGreen), 2);
                    if (trainingImgs.Count > 0)
                    {
                        Image<Gray, byte> dFace = current.Copy(d.rect).Convert<Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                        MCvTermCriteria criteria = new MCvTermCriteria(trainingImgs.Count, epsilon);  //count, epsilon value
                        EigenObjectRecognizer recognize = new EigenObjectRecognizer(trainingImgs.ToArray(), trainingNames.ToArray(), 0, ref criteria);
                        MCvFont font = new MCvFont(FONT.CV_FONT_HERSHEY_TRIPLEX, 1, 1);
                        string name = recognize.Recognize(dFace);
                        current.Draw(name, ref font, new Point(d.rect.X - 2, d.rect.Y - 20), new Bgr(Color.Red));
                    }
                }
            picWebCam.Image = current.ToBitmap();
        }
Пример #2
0
        public void FrameGrabber(object sender, EventArgs e)
        {
            _lastInfo = new List<HeadInformation>();

            CountOfFacesLabel.Text = "0";
            //label4.Text = "";
            NamePersons.Add("");

            //Get the current frame form capture device
            currentFrame = grabber.QueryFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);

            //Convert it to Grayscale
            gray = currentFrame.Convert<Gray, Byte>();

            //Face Detector
            MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(
              face,
              1.2,
              10,
              Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
              new Size(20, 20));

            //Action for each element detected
            foreach (MCvAvgComp f in facesDetected[0])
            {
                t = t + 1;
                result = currentFrame.Copy(f.rect).Convert<Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                //draw the face detected in the 0th (gray) channel with blue color
                currentFrame.Draw(f.rect, new Bgr(Color.Red), 2);

                if (trainingImages.ToArray().Length != 0)
                {
                    //TermCriteria for face recognition with numbers of trained images like maxIteration
                    MCvTermCriteria termCrit = new MCvTermCriteria(ContTrain, 0.001);

                    //Eigen face recognizer
                    EigenObjectRecognizer recognizer = new EigenObjectRecognizer(
                       trainingImages.ToArray(),
                       labels.ToArray(),
                       3000,
                       ref termCrit);

                    name = recognizer.Recognize(result);

                    //Draw the label for each face detected and recognized
                    currentFrame.Draw(name, ref font, new Point(f.rect.X - 2, f.rect.Y - 2), new Bgr(Color.Red));

                }

                NamePersons[t - 1] = name;
                NamePersons.Add("");

                //Set the number of faces detected on the scene
                CountOfFacesLabel.Text = facesDetected[0].Length.ToString();

                //Set the region of interest on the faces

                gray.ROI = f.rect;
                MCvAvgComp[][] eyesDetected = gray.DetectHaarCascade(
                   eye,
                   1.9,
                   5,
                   Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                   new Size(20, 20));
                gray.ROI = Rectangle.Empty;

                foreach (MCvAvgComp ey in eyesDetected[0])
                {
                    Rectangle eyeRect = ey.rect;
                    eyeRect.Inflate(-7, -7);
                    eyeRect.Offset(f.rect.X, f.rect.Y);
                    currentFrame.Draw(eyeRect, new Bgr(Color.Blue), 2);
                }

                //gray.ROI = f.rect;
                //MCvAvgComp[][] mouthDetected = gray.DetectHaarCascade(
                //   mouth,
                //   1.1,
                //   37,
                //   Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                //   new Size(20, 20));
                //gray.ROI = Rectangle.Empty;

                //foreach (MCvAvgComp ey in mouthDetected[0])
                //{
                //    Rectangle mouthRect = ey.rect;
                //    mouthRect.Offset(f.rect.X, f.rect.Y);
                //    currentFrame.Draw(mouthRect, new Bgr(Color.Black), 2);
                //}

                gray.ROI = f.rect;
                MCvAvgComp[][] smileDetected = gray.DetectHaarCascade(
                   smile,
                   2,
                   20,
                   Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                   new Size(20, 20));
                gray.ROI = Rectangle.Empty;

                HeadInformation hi = new HeadInformation();
                hi.IsSmile = false;

                foreach (MCvAvgComp ey in smileDetected[0])
                {
                    Rectangle smileRect = ey.rect;
                    smileRect.Offset(f.rect.X, f.rect.Y);
                    currentFrame.Draw(smileRect, new Bgr(Color.Black), 2);
                    currentFrame.Draw("smile", ref font, new Point(smileRect.X, smileRect.Y), new Bgr(Color.Red));
                    hi.IsSmile = true;
                }

                hi.Head = f.rect;
                if (eyesDetected[0] != null && eyesDetected[0].Length > 0)
                {
                    if (eyesDetected[0].Length == 1)
                    {
                        hi.Eye1 = eyesDetected[0][0].rect;
                    }
                    if (eyesDetected[0].Length == 2)
                    {
                        hi.Eye1 = eyesDetected[0][0].rect;
                        hi.Eye2 = eyesDetected[0][1].rect;
                    }
                }

                hi.Name = name;

                _lastInfo.Add(hi);
            }

            t = 0;

            //Names concatenation of persons recognized
            for (int nnn = 0; nnn < facesDetected[0].Length; nnn++)
            {
                names = names + NamePersons[nnn] + ", ";
            }
            //Show the faces procesed and recognized
            imageBoxFrameGrabber.Image = currentFrame.Resize(800, 600, INTER.CV_INTER_CUBIC);
            ListOfUserLabel.Text = names;
            names = "";
            //Clear the list(vector) of names
            NamePersons.Clear();
        }
        public User RecognizeFace(Image<Gray, byte> face)
        {
            using (var context = new FaceRecognitionContext())
            {
                var faces = new List<Image<Gray, byte>>();
                var ids = new List<string>();

                foreach (var user in context.Users)
                {
                    var reconogizedFace = new Image<Gray, byte>(user.Face.GetBitmap());
                    var id = user.Id.ToString(CultureInfo.InvariantCulture);

                    faces.Add(reconogizedFace);
                    ids.Add(id);
                }

                if (ids.Any())
                {
                    var termCrit = new MCvTermCriteria(ids.Count(), 0.001);
                    var recognizedFaces = new EigenObjectRecognizer(faces.ToArray(), ids.ToArray(), 2500, ref termCrit);

                    var label = recognizedFaces.Recognize(face);

                    if (!String.IsNullOrEmpty(label))
                    {
                        var id = int.Parse(label);
                        return context.Users.SingleOrDefault(x => x.Id == id);        
                    }

                    
                }

                return null;
            }
        }
Пример #4
0
        private void CompositionTarget_Rendering(object sender, EventArgs e)
        {
            _status.Fill = _rd;

            #region Recognition
            currentFrame = grabber.QueryFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
            gray = currentFrame.Convert<Gray, Byte>();

            var size = new System.Drawing.Size(20, 20);
            var window = new System.Drawing.Size(grabber.Width, grabber.Height);

            _rects = _faceClassifier.DetectMultiScale(gray, 1.2, 10, size, window);

            foreach (var f in _rects)
            {
                result = currentFrame.Copy(f).Convert<Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                _status.Fill = new SolidColorBrush(Colors.Green);
                currentFrame.Draw(f, new Bgr(System.Drawing.Color.Red), 2);

                //if we have already trained
                if (CommonData.TrainingImages.Count > 0)
                {
                    MCvTermCriteria termCrit = new MCvTermCriteria(ContTrain, 0.001);
                    //Eigen face recognizer
                    recognizer = new EigenObjectRecognizer(
                       CommonData.TrainingImages.ToArray(),
                       CommonData.Names.ToArray(),
                       3000,
                       ref termCrit);

                    string name = recognizer.Recognize(result);
                    currentFrame.Draw(name, ref font, new System.Drawing.Point(f.X - 2, f.Y - 2),
                        new Bgr(System.Drawing.Color.LightGreen));
                }

                //finally draw the source
                _imgCamera.Source = ImageHelper.ToBitmapSource(currentFrame);
            }
            #endregion
        }
        void FrameGrabber(object sender, EventArgs e)
        {
            currentFrame = grabber.QueryFrame();
            if (currentFrame != null)
            {
                if (counter >= 1)
                {
                    label5.Text     = "Selected character found in frame";
                    labcounter.Text = ":" + counter + "times.";
                }

                //label3.Text = "0";
                //label4.Text = "";
                //NamePersons.Add("");
                try
                {
                    //Get the current frame form capture device
                    currentFrame = grabber.QueryFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                }
                catch (Exception ex)
                {
                    Application.Idle += new EventHandler(checkoutput);
                    MessageBox.Show("Video was Ended", "Frame Query Exit", MessageBoxButtons.OK, MessageBoxIcon.Information);
                }
                //Convert it to Grayscale
                gray = currentFrame.Convert <Gray, Byte>();

                //Face Detector
                MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(
                    face,
                    1.1,
                    10,
                    Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                    new Size(20, 20));
                pro = 0;

                //Action for each element detected
                foreach (MCvAvgComp f in facesDetected[0])
                {
                    t      = t + 1;
                    result = currentFrame.Copy(f.rect).Convert <Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                    //draw the face detected in the 0th (gray) channel with blue color
                    currentFrame.Draw(f.rect, new Bgr(Color.Red), 2);

                    imageBox2.Image = result;
                    if (FaceImgaes.ToArray().Length != 0)
                    {
                        //TermCriteria for face recognition with numbers of trained images like maxIteration
                        MCvTermCriteria termCrit = new MCvTermCriteria(ContTrain, 0.001);

                        //Eigen face recognizer

                        Emgu.CV.EigenObjectRecognizer recognizer = new Emgu.CV.EigenObjectRecognizer(
                            FaceImgaes.ToArray(),
                            labels.ToArray(),
                            2000,
                            ref termCrit);

                        name = recognizer.Recognize(result);
                        if (pro == 0)
                        {
                            if (name != "")
                            {
                                counter++;
                                currentFrame.Draw(f.rect, new Bgr(Color.Green), 2);
                                pro++;
                            }



                            //Draw the label for each face detected and recognized
                            currentFrame.Draw(name, ref font, new Point(f.rect.X - 2, f.rect.Y - 2), new Bgr(Color.LightGreen));
                        }
                    }
                    else
                    {
                        imageBox2.Image = currentFrame;
                    }



                    //Set the number of faces detected on the scene
                    label4.Text = facesDetected[0].Length.ToString();

                    /*
                     *
                     * //Set the region of interest on the faces
                     *
                     * gray.ROI = f.rect;
                     * MCvAvgComp[][] eyesDetected = gray.DetectHaarCascade(
                     * eye,
                     * 1.1,
                     * 10,
                     * Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                     * new Size(20, 20));
                     * gray.ROI = Rectangle.Empty;
                     *
                     * foreach (MCvAvgComp ey in eyesDetected[0])
                     * {
                     *  Rectangle eyeRect = ey.rect;
                     *  eyeRect.Offset(f.rect.X, f.rect.Y);
                     *  currentFrame.Draw(eyeRect, new Bgr(Color.Blue), 2);
                     * }
                     */
                }
                t = 0;



                //Show the faces procesed and recognized
                imageBoxFrameGrabber.Image = currentFrame;
                names = "";
                //Clear the list(vector) of names
                NamePersons.Clear();
            }
            else
            {
                btn_decrec.Enabled = true;
                label16.Text       = "";
            }
        }
Пример #6
0
        void FrameGrabber(object sender, EventArgs e)
        {
            label3.Text = "0";
            //label4.Text = "";
            NamePersons.Add("");

            //Get the current frame form capture device
            currentFrame = grabber.QueryFrame().Resize(473, 355, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);

            //Convert it to Grayscale
            gray = currentFrame.Convert<Gray, Byte>();

            //Face Detector
            MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(
              face,
              1.2,
              10,
              Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
              new Size(20, 20));

            //Action for each element detected
            foreach (MCvAvgComp f in facesDetected[0])
            {
                t = t + 1;
                result = currentFrame.Copy(f.rect).Convert<Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                //draw the face detected in the 0th (gray) channel with blue color
                currentFrame.Draw(f.rect, new Bgr(Color.Yellow), 2);

                if (trainingImages.ToArray().Length != 0)
                {
                    //TermCriteria for face recognition with numbers of trained images like maxIteration
                    MCvTermCriteria termCrit = new MCvTermCriteria(ContTrain, 0.001);

                    //Eigen face recognizer
                    EigenObjectRecognizer recognizer = new EigenObjectRecognizer(
                       trainingImages.ToArray(),
                       labels.ToArray(),
                       3000,
                       ref termCrit);

                    name = recognizer.Recognize(result);
                    string index = recognizer.RecognizeIndex(result);
                    if (index != string.Empty && name != String.Empty)
                    {
                        imageBox1.Image = trainingImages[Convert.ToInt32(index)];
                    }
                    else {
                        imageBox1.Image = null;
                    }

                    LabelNameNId = name.Split('|');

                    //Draw the label for each face detected and recognized
                    currentFrame.Draw(LabelNameNId[0].ToString(), ref font, new Point(f.rect.X - 2, f.rect.Y - 2), new Bgr(Color.Yellow));

                    btnLogInNLogOut.Enabled = true;

                }

                NamePersons[t - 1] = name;
                NamePersons.Add("");

                //Set the number of faces detected on the scene
                label3.Text = facesDetected[0].Length.ToString();

                /*
                //Set the region of interest on the faces

                gray.ROI = f.rect;
                MCvAvgComp[][] eyesDetected = gray.DetectHaarCascade(
                   eye,
                   1.1,
                   10,
                   Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                   new Size(20, 20));
                gray.ROI = Rectangle.Empty;

                foreach (MCvAvgComp ey in eyesDetected[0])
                {
                    Rectangle eyeRect = ey.rect;
                    eyeRect.Offset(f.rect.X, f.rect.Y);
                    currentFrame.Draw(eyeRect, new Bgr(Color.Blue), 2);
                }
                 */

            }
            t = 0;

            //Names concatenation of persons recognized
            for (int nnn = 0; nnn < facesDetected[0].Length; nnn++)
            {
                if (nnn < facesDetected[0].Length - 1)
                {
                    names = names + NamePersons[nnn] + ", ";
                }
                else
                {
                    names = names + NamePersons[nnn];
                }
            }
            //Show the faces procesed and recognized
            imageBoxFrameGrabber.Image = currentFrame;
            if (imageBox1.Image==null)
                pictureBox2.Visible = true;
            else
                pictureBox2.Visible = false;

            if (names != String.Empty)
                pictureBox1.Visible = true;
            else
                pictureBox1.Visible = false;

            if (names == String.Empty)
            {
                imageBox1.Image = null;
                btnLogInNLogOut.Enabled = false;
            }
            else
            {
                btnLogInNLogOut.Enabled = true;
            }
            label4.Text = names;
            names = "";

            //Clear the list(vector) of names
            NamePersons.Clear();
        }
Пример #7
0
        public void TestEigenObjects()
        {
            String[] fileNames = new string[] { "stuff.jpg", "squares.gif", "lena.jpg" };

             int width = 100, height = 100;
             MCvTermCriteria termCrit = new MCvTermCriteria(3, 0.001);

             #region using batch method
             Image<Gray, Byte>[] imgs = Array.ConvertAll<String, Image<Gray, Byte>>(fileNames,
             delegate(String file)
             {
                return new Image<Gray, Byte>(file).Resize(width, height, CvEnum.INTER.CV_INTER_LINEAR);
             });

             EigenObjectRecognizer imgRecognizer1 = new EigenObjectRecognizer(imgs, ref termCrit);
             for (int i = 0; i < imgs.Length; i++)
             {
            Assert.AreEqual(i.ToString(), imgRecognizer1.Recognize(imgs[i]));
             }

             XmlDocument xDoc = Toolbox.XmlSerialize<EigenObjectRecognizer>(imgRecognizer1);
             EigenObjectRecognizer imgRecognizer2 = Toolbox.XmlDeserialize<EigenObjectRecognizer>(xDoc);

             for (int i = 0; i < imgs.Length; i++)
             {
            Assert.AreEqual(i.ToString(), imgRecognizer2.Recognize(imgs[i]));
             }

             System.Runtime.Serialization.Formatters.Binary.BinaryFormatter
             formatter = new System.Runtime.Serialization.Formatters.Binary.BinaryFormatter();

             Byte[] bytes;
             using (MemoryStream ms = new MemoryStream())
             {
            formatter.Serialize(ms, imgRecognizer1);
            bytes = ms.GetBuffer();
             }
             using (MemoryStream ms2 = new MemoryStream(bytes))
             {
            EigenObjectRecognizer imgRecognizer3 = (EigenObjectRecognizer)formatter.Deserialize(ms2);
            for (int i = 0; i < imgs.Length; i++)
            {
               Assert.AreEqual(i.ToString(), imgRecognizer3.Recognize(imgs[i]));
            }
             }
             #endregion
        }
        void FrameGrabber(object sender, EventArgs e)
        {
            label3.Text = "0";
            //label4.Text = "";
            NamePersons.Add("");

            //mengambil queryFrame dari gambar
            DateTime StarTime = DateTime.Now;
            currentFrame = grabber.QueryFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);

                    //konversi
                    gray = currentFrame.Convert<Gray, Byte>();

                    //yang terdeteksi
                    MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(
                  face,
                  1.2,
                  10,
                  Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                  new Size(20, 20));

                    //Aksi untuk setiap elemen terdeteksi
                    foreach (MCvAvgComp f in facesDetected[0])
                    {
                        t = t + 1;
                        result = currentFrame.Copy(f.rect).Convert<Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                        currentFrame.Draw(f.rect, new Bgr(Color.Red), 2);

                        if (trainingImages.ToArray().Length != 0)
                        {
                        MCvTermCriteria termCrit = new MCvTermCriteria(ContTrain, 0.001);

                        EigenObjectRecognizer recognizer = new EigenObjectRecognizer(
                           trainingImages.ToArray(),
                           labels.ToArray(),
                           3000,
                           ref termCrit);

                        name = recognizer.Recognize(result);

                        currentFrame.Draw(name, ref font, new Point(f.rect.X - 2, f.rect.Y - 2), new Bgr(Color.LightGreen));

                        }

                            NamePersons[t-1] = name;
                            NamePersons.Add("");

                        //jumlah yang terdeteksi
                        label3.Text = facesDetected[0].Length.ToString();

                    }
                        t = 0;

                        //nama yang terdeteksi
                    for (int nnn = 0; nnn < facesDetected[0].Length; nnn++)
                    {
                        names = names + NamePersons[nnn] + ", ";
                    }
                    //tampilan pada imageboxframeGrabber
                    imageBoxFrameGrabber.Image = currentFrame;
                    DateTime endTime = DateTime.Now;
                    textBox2.Text = (endTime - StarTime).ToString();
                    label4.Text = names;
                    names = "";
                    NamePersons.Clear();
        }
Пример #9
0
        void FrameGrabber(object sender, EventArgs e)
        {
            label3.Text = "0";
            label4.Text = "Tidak Ada";
            NamePersons.Add("");

            //Get the current frame form capture device
            currentFrame = grabber.QueryFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);

            //Convert it to Grayscale
            gray = currentFrame.Convert<Gray, Byte>();

            //Face Detector
            MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(
              face,
              1.2,
              10,
              Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
              new Size(20, 20));

            //Action for each element detected
            foreach (MCvAvgComp f in facesDetected[0])
            {
                t = t + 1;
                result = currentFrame.Copy(f.rect).Convert<Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                //draw the face detected in the 0th (gray) channel with blue color
                currentFrame.Draw(f.rect, new Bgr(Color.Red), 2);

                if (trainingImages.ToArray().Length != 0)
                {
                    //TermCriteria for face recognition with numbers of trained images like maxIteration
                    MCvTermCriteria termCrit = new MCvTermCriteria(ContTrain, 0.001);

                    //Eigen face recognizer
                    EigenObjectRecognizer recognizer = new EigenObjectRecognizer(
                       trainingImages.ToArray(),
                       labels.ToArray(),
                       3000,
                       ref termCrit);

                    name = recognizer.Recognize(result);

                    //Draw the label for each face detected and recognized
                    currentFrame.Draw(name, ref font, new Point(f.rect.X - 2, f.rect.Y - 2), new Bgr(Color.LightGreen));

                }

                NamePersons[t - 1] = name;
                NamePersons.Add("");

                //Set the number of faces detected on the scene
                label3.Text = facesDetected[0].Length.ToString();
            }
            t = 0;

            //Names concatenation of persons recognized
            for (int nnn = 0; nnn < facesDetected[0].Length; nnn++)
            {
                //names = names + NamePersons[nnn] + ", ";
                names = names + NamePersons[nnn];
            }

            //Show the faces procesed and recognized
            imageBoxFrameGrabber.Image = currentFrame;
            label4.Text = names;
            if (label4.Text != "")
            {
                Label_Nama.Text = label4.Text;
                //Label_NIP.Text = "test";

                DBConnect koneksi_db = new DBConnect();
                MySqlConnection db = new MySqlConnection(koneksi_db.koneksi());
                db.Open();
                MySqlCommand dbcmd = db.CreateCommand();
                string sql = "select nim from data_peg where nama like'%" + label4.Text + "%'";
                dbcmd.CommandText = sql;
                MySqlDataReader reader = dbcmd.ExecuteReader();
                while (reader.Read())
                {
                    Label_NIP.Text = reader.GetString(0).ToString();
                }
                db.Close();
                //button_SimpanAbsen.Enabled = true;
                //label11.Visible = false;
            }
            else
            {
                //button_SimpanAbsen.Enabled = false;
                //label11.Visible = true;
            }

            names = "";

            //ketemu = true;

            //Clear the list(vector) of names
            NamePersons.Clear();
        }
Пример #10
0
        public void FrameGrabber2(object sender, EventArgs e)
        {
            NamePersons.Add("");

            face = new HaarCascade("haarcascade_frontalface_default.xml");
            //Utility UTl = new Utility();

            //Get the current frame form capture device
            //Image<Bgr, Byte> currentFrame = UTl.ImageToBgrByte(Image);
            try
            {
                currentFrame = grabber.QueryFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
            }
            catch (Exception exp)
            {
                grabber = new Capture("video002.mp4");
            }
            //Convert it to Grayscale
            gray = currentFrame.Convert<Gray, Byte>();

            //Face Detector
            MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(face, 1.2, 10, Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new Size(20, 20));

            //Action for element detected
            try
            {
                MCvAvgComp f = facesDetected[0][0];

                result = currentFrame.Copy(f.rect).Convert<Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                //draw the face detected in the 0th (gray) channel with blue color
                currentFrame.Draw(f.rect, new Bgr(Color.White), 2);
            }
            catch (Exception ex)
            {
                //MessageBox.Show("Camera Error: Empty frames arrived" + ex.Message.ToString(), "Error", MessageBoxButtons.OK, MessageBoxIcon.Error);

            }

                if (trainingImages.ToArray().Length != 0)
                {
                    //TermCriteria for face recognition with numbers of trained images like maxIteration
                    MCvTermCriteria termCrit = new MCvTermCriteria(ContTrain, 0.001);

                    //Eigen face recognizer
                    EigenObjectRecognizer recognizer = new EigenObjectRecognizer(
                       trainingImages.ToArray(),
                       labels.ToArray(),
                       3000,
                       ref termCrit);

                    name = recognizer.Recognize(result);

                    //Draw the label for each face detected and recognized
                    //currentFrame.Draw(name, ref font, new Point(f.rect.X - 2, f.rect.Y - 2), new Bgr(Color.LightGreen));

                }

                //NamePersons[t - 1] = name;
                NamePersons.Add("");

            t = 0;

            //Names concatenation of persons recognized
            //for (int nnn = 0; nnn < facesDetected[0].Length; nnn++)
            //{
            //    names = names + NamePersons[nnn] + ", ";
            //}
            //Show the faces procesed and recognized
            emguImgFace.Image = currentFrame;
            lblCandidateID.Text = name;
            name = "";
            //Clear the list(vector) of names
            NamePersons.Clear();
        }
Пример #11
0
        private Image<Bgr, byte> findFaces(Image<Bgr, byte> img)
        {
            name = "";
            //Convert it to Grayscale
            Image<Gray, byte> gray = img.Convert<Gray, Byte>();

            //Equalization step
            gray._EqualizeHist();
            try
            {
                //Face Detector
                MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(
                        face,
                        scaleFactor,minNeighbors,
                        //Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.FIND_BIGGEST_OBJECT,
                        HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                        new Size(minsize, minsize));

                Image<Gray, byte> result;

                foreach (Control ct in flowLayoutPanel1.Controls)
                {
                    (ct as PictureBox).Image = null;
                    ct.Dispose();
                }
                flowLayoutPanel1.Controls.Clear();

                int ContTrain = 0;
                //Action for each element detected
                foreach (MCvAvgComp f in facesDetected[0])
                {
                    //result = gray.Copy(f.rect).Convert<Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                    result = gray.Convert<Gray, byte>().Resize(500, 500, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);


                    //draw the face detected in the 0th (gray) channel with blue color
                    img.Draw(f.rect, new Bgr(Color.Red), 2);

                    if (trainedFaces.Count != 0 && !skipname.Checked)
                    {
                        ContTrain = trainedFaces.Count;
                        //TermCriteria for face recognition with numbers of trained images like maxIteration
                        MCvTermCriteria termCrit = new MCvTermCriteria(ContTrain*2, 0.001);
                        try
                        {
                            //Eigen face recognizer
                            EigenObjectRecognizer recognizer = new EigenObjectRecognizer(trainedFaces.OrderBy(x => x.path).Select(x => x.img).ToArray(), trainedFaces.OrderBy(x => x.path).Select(x => x.name).ToList<string>().ToArray(), 4000, ref termCrit);

                            name = recognizer.Recognize(result);

                            //Draw the label for each face detected and recognized
                            img.Draw(name, ref font, new Point(f.rect.X - 2, f.rect.Y - 2), new Bgr(Color.LightGreen));
                        }
                        catch (Exception ex) { errorLabel.Text = ex.Message + "\n" + ex.StackTrace; }

                    }
                    addToFlow(img, f, name);
                }
            }
            catch (Exception ex) { errorLabel.Text = ex.Message + "\n" + ex.StackTrace; }
            GC.Collect();
            return img;
        }
Пример #12
0
        void FrameGrabber(object sender, EventArgs e)
        {
            NamePersons.Add("");

            currentFrame = grabber.QueryFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);

            //Convert it to Grayscale
            gray = currentFrame.Convert<Gray, Byte>();

            //Face Detector
            MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(
              face,
              1.2,
              10,
              Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
              new System.Drawing.Size(20, 20));

            Console.WriteLine(facesDetected[0].Length);

            //Action for each element detected
            foreach (MCvAvgComp f in facesDetected[0])
            {
                t = t + 1;
                result = currentFrame.Copy(f.rect).Convert<Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                //draw the face detected in the 0th (gray) channel with blue color
                currentFrame.Draw(f.rect, new Bgr(System.Drawing.Color.Red), 2);

                if (trainingImages.ToArray().Length != 0)
                {
                    //TermCriteria for face recognition with numbers of trained images like maxIteration
                    MCvTermCriteria termCrit = new MCvTermCriteria(ContTrain, 0.001);

                    EigenObjectRecognizer recognizer = new EigenObjectRecognizer(
                       trainingImages.ToArray(),
                       labels.ToArray(),
                       3000,
                       ref termCrit);

                    name = recognizer.Recognize(result);

                    //Draw the label for each face detected and recognized
                    currentFrame.Draw(name, ref font, new System.Drawing.Point(f.rect.X - 2, f.rect.Y - 2), new Bgr(System.Drawing.Color.LightGreen));

                }

                NamePersons[t - 1] = name;
                NamePersons.Add("");

                label3.Text = facesDetected[0].Length.ToString();

                if (result != null)
                {
                    dispatcherTimer.Stop();
                    break;
                }

            }
            t = 0;

            //Names concatenation of persons recognized
            for (int nnn = 0; nnn < facesDetected[0].Length; nnn++)
            {
                names = names + NamePersons[nnn] + ", ";
            }

            imageBoxFrameGrabber.Source = ConvertImage(currentFrame.ToBitmap());
            label4.Text = names;
            names = "";
            //Clear the list(vector) of names
            NamePersons.Clear();
        }
Пример #13
0
        private Image<Bgr, byte> findFaces(Image<Bgr, byte> img)
        {
            //Convert it to Grayscale
            Image<Gray, byte> gray = img.Convert<Gray, Byte>();

            //Equalization step
            gray._EqualizeHist();
            try
            {
                //Face Detector
                MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(
                        face,
                        scaleFactor, minNeighbors,
                    //Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.FIND_BIGGEST_OBJECT,
                        HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                        new Size(minsize, minsize));

                Image<Bgr, byte> result;


                int ContTrain = 0;
                //Action for each element detected
                foreach (MCvAvgComp f in facesDetected[0])
                {
                    result = img.Copy(f.rect);
                        //gray.Copy(f.rect).Convert<Gray, byte>().Resize(f.rect.Width, f.rect.Height, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                    //result = gray.Convert<Gray, byte>().Resize(f.rect.Width, f.rect.Height, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);

                    //draw the face detected in the 0th (gray) channel with blue color
                    img.Draw(f.rect, new Bgr(Color.Red), 2);
                    if(addFacesToolStripMenuItem.Checked)
                        addTrainedFace(result, f.rect);

                    if (trainedFaces.Count != 0 && matchFaceToolStripMenuItem.Checked)
                    {
                        ContTrain = trainedFaces.Count;
                        //TermCriteria for face recognition with numbers of trained images like maxIteration
                        MCvTermCriteria termCrit = new MCvTermCriteria(ContTrain * 2, 0.001);
                        try
                        {
                            //Eigen face recognizer
                            EigenObjectRecognizer recognizer = new EigenObjectRecognizer(trainedFaces.OrderBy(x => x.path).Select(x => x.img.Convert<Gray, Byte>().Resize(200, 200, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC)).ToArray(), trainedFaces.OrderBy(x => x.path).Select(x => x.name).ToList<string>().ToArray(), 4000, ref termCrit);

                            this.Text = recognizer.Recognize(result.Convert<Gray, Byte>().Resize(200, 200, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC));

                            //Draw the label for each face detected and recognized
                            img.Draw(this.Text, ref font, new Point(f.rect.X - 2, f.rect.Y - 2), new Bgr(Color.LightGreen));
                        }
                        catch (Exception ex) { this.Text = ex.Message + "\n" + ex.StackTrace; }

                    }
                }
            }
            catch (Exception ex) { this.Text = ex.Message + "\n" + ex.StackTrace; }
            GC.Collect();
            return img;
        }
Пример #14
0
        void FrameGrabber(object sender, EventArgs e)
        {
            label3.Text = "0";
            //label4.Text = "";
            NamePersons.Add("");
            if (doc != null)
            {
                //Console.WriteLine(getImageString(doc));

                //added by PFS
                //www.codeproject.com/Articles/257502/Creating-Your-First-EMGU-Image-Processing-Project
                //img = Base64ToImage(getImageString(doc));   //1-

                
            }
            else
            {
                img = Image.FromFile(String.Format("{0}/loading.jpg", curDir));
            }
            //Get the current frame form capture device
            //currentFrame = grabber.QueryFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);

            currentFrame = new Image<Bgr, byte>(new Bitmap(img)).Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
            //End addition by PFS

                    //Convert it to Grayscale
                    gray = currentFrame.Convert<Gray, Byte>();

                    //Face Detector
                    MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(
                  face,
                  1.2,
                  10,
                  Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                  new Size(20, 20));

                    //Action for each element detected
                    foreach (MCvAvgComp f in facesDetected[0])
                    {
                        t = t + 1;
                        result = currentFrame.Copy(f.rect).Convert<Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                        //draw the face detected in the 0th (gray) channel with blue color
                        currentFrame.Draw(f.rect, new Bgr(Color.Red), 2);


                        if (trainingImages.ToArray().Length != 0)
                        {
                            //TermCriteria for face recognition with numbers of trained images like maxIteration
                        MCvTermCriteria termCrit = new MCvTermCriteria(ContTrain, 0.001);

                        //Eigen face recognizer
                        EigenObjectRecognizer recognizer = new EigenObjectRecognizer(
                           trainingImages.ToArray(),
                           labels.ToArray(),
                           3000,
                           ref termCrit);

                        name = recognizer.Recognize(result);

                            //Draw the label for each face detected and recognized
                        currentFrame.Draw(name, ref font, new Point(f.rect.X - 2, f.rect.Y - 2), new Bgr(Color.LightGreen));

                        }

                            NamePersons[t-1] = name;
                            NamePersons.Add("");


                        //Set the number of faces detected on the scene
                        label3.Text = facesDetected[0].Length.ToString();
                       
                        /*
                        //Set the region of interest on the faces
                        
                        gray.ROI = f.rect;
                        MCvAvgComp[][] eyesDetected = gray.DetectHaarCascade(
                           eye,
                           1.1,
                           10,
                           Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                           new Size(20, 20));
                        gray.ROI = Rectangle.Empty;

                        foreach (MCvAvgComp ey in eyesDetected[0])
                        {
                            Rectangle eyeRect = ey.rect;
                            eyeRect.Offset(f.rect.X, f.rect.Y);
                            currentFrame.Draw(eyeRect, new Bgr(Color.Blue), 2);
                        }
                         */

                    }
                        t = 0;

                        //Names concatenation of persons recognized
                    for (int nnn = 0; nnn < facesDetected[0].Length; nnn++)
                    {
                        names = names + NamePersons[nnn] + ", ";
                    }
                    //Show the faces procesed and recognized
                    imageBoxFrameGrabber.Image = currentFrame;
                    label4.Text = names;
                    names = "";
                    //Clear the list(vector) of names
                    NamePersons.Clear();

                }
Пример #15
0
        private string recognizerall(MCvAvgComp f)
        {
            string[] ten = new string[5];
            ten[0] = "";

            if (trainingImages.ToArray().Length != 0)
            {

                //  /Term Criteria for face recognition with numbers of trained images like max Iteration,eps > =>chinh xac
                MCvTermCriteria termCrit = new MCvTermCriteria(tong, 0.6);
                MCvTermCriteria termCritn = new MCvTermCriteria(tong, 0.7);
                MCvTermCriteria termCritm = new MCvTermCriteria(tong, 0.7);
                MCvTermCriteria termCriteL = new MCvTermCriteria(tong, 0.7);
                MCvTermCriteria termCriteR = new MCvTermCriteria(tong, 0.7);
                //Eigen face recognizer

                EigenObjectRecognizer recognizer = new EigenObjectRecognizer(
                   trainingImages.ToArray(),
                   labels.ToArray(),
                   2000,
                   ref termCrit);

                ten[0] = recognizer.Recognize(resultface);
                /*

                 ///////////////////////////////////////////////////kiem tra nose/
                 if (resultnose != null)
                 {
                     EigenObjectRecognizer recognizernose = new EigenObjectRecognizer(
                        trainingImagenose.ToArray(),
                        labels.ToArray(),
                        1000,
                        ref termCritn);

                     ten[1] = recognizernose.Recognize(resultnose);
                     currentFrame.Draw("nose: "+ten[1], ref font, new Point(f.rect.X - 2, f.rect.Y - 15), new Bgr(Color.DarkBlue));

                 }
                 //////////////////////////////////////////////////////////

                 if (resultmouth != null)
                 {
                        EigenObjectRecognizer recognizermouth = new EigenObjectRecognizer(
                        trainingImagemouth.ToArray(),
                        labels.ToArray(),
                        1000,
                        ref termCritm);

                     ten[2] = recognizermouth.Recognize(resultmouth);
                     currentFrame.Draw("mouth: "+ten[2], ref font, new Point(f.rect.X - 2, f.rect.Y - 30), new Bgr(Color.LightGreen));
                 }

                 if (resulteyeL != null)
                 {
                     EigenObjectRecognizer recognizereyeL = new EigenObjectRecognizer(
                     trainingImageneyeL.ToArray(),
                     labels.ToArray(),
                     1000,
                     ref termCriteL);

                     ten[3] = recognizereyeL.Recognize(resulteyeL);
                     currentFrame.Draw("eyes: "+ten[3], ref font, new Point(f.rect.X - 45, f.rect.Y - 45), new Bgr(Color.LightGreen));
                 }
                 if (resulteyeR != null)
                 {
                     EigenObjectRecognizer recognizereyeR = new EigenObjectRecognizer(
                     trainingImageneyeR.ToArray(),
                     labels.ToArray(),
                     600,
                     ref termCriteR);

                    ten[4] = recognizereyeR.Recognize(resulteyeR);
                    currentFrame.Draw(ten[4], ref font, new Point(f.rect.X +65, f.rect.Y - 45), new Bgr(Color.LightGreen));
                 }

             }

             int tam = 0;
             string name="";
             for (int i = 1; i < 5; i++)
             {
                 if (ten[0] == ten[i]) tam++;
                 if (tam > 2&&ten[0]!=null) { name = ten[0]; break; } else name = "";
             }
                 */
            }
            return ten[0];
        }
Пример #16
0
        internal string getFaceTag(Bitmap sourceBmp)
        {
            //Get the current frame form capture device
            currentFrame = new Image<Bgr, byte>(sourceBmp).Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_LINEAR);

            if (currentFrame != null)
            {
                gray_frame = currentFrame.Convert<Gray, Byte>();

                //Face Detector
                MCvAvgComp[][] facesDetected = gray_frame.DetectHaarCascade(
                    Face,
                    1.2,
                    1,
                    Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                    new System.Drawing.Size(20, 20));

                foreach (MCvAvgComp f in facesDetected[0])
                {
                    t = t + 1;
                    result = currentFrame.Copy(f.rect).Convert<Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                    //draw the face detected in the 0th (gray) channel with blue color
                    //currentFrame.Draw(f.rect, new Bgr(Color.Red), 2);

                    if (trainingImages.ToArray().Length != 0)
                    {
                        //TermCriteria for face recognition with numbers of trained images like maxIteration
                        MCvTermCriteria termCrit = new MCvTermCriteria(ContTrain, 0.001);

                        //Eigen face recognizer
                        EigenObjectRecognizer recognizer = new EigenObjectRecognizer(
                           trainingImages.ToArray(),
                           labels.ToArray(),
                           3000,
                           ref termCrit);

                        name = recognizer.Recognize(result) ;
                        if (!name.Equals("")&&name!=null)
                        {
                            return name;
                        }
                    }
                }
            }
            return "Sanmeet" ;
        }
Пример #17
0
        void FrameGrabber(object sender, EventArgs e)
        {
            label3.Text = "0";
            //label4.Text = "";
            NamePersons.Add("");

            //Get the current frame form capture device
            currentFrame = grabber.QueryFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);

                    //Convert it to Grayscale
                    gray = currentFrame.Convert<Gray, Byte>();

                    //Face Detector
                    MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(
                  face,
                  1.2,
                  10,
                  Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                  new Size(20, 20));

                    //Action for each element detected
                    foreach (MCvAvgComp f in facesDetected[0])
                    {
                        t = t + 1;
                        result = currentFrame.Copy(f.rect).Convert<Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                        //draw the face detected in the 0th (gray) channel with blue color
                        currentFrame.Draw(f.rect, new Bgr(Color.Red), 2);

                        if (trainingImages.ToArray().Length != 0)
                        {
                            //TermCriteria for face recognition with numbers of trained images like maxIteration
                        MCvTermCriteria termCrit = new MCvTermCriteria(ContTrain, 0.001);

                        //Eigen face recognizer
                        EigenObjectRecognizer recognizer = new EigenObjectRecognizer(
                           trainingImages.ToArray(),
                           labels.ToArray(),
                           3000,
                           ref termCrit);

                        name = recognizer.Recognize(result);
                        if (!emailSent)
                        {
                            emailSent = true;
                            System.Diagnostics.Debug.WriteLine("sending Email");
                            email_send(currentFrame);
                        }
            //************* THIS WORKS!! *************************//
                        if (name == "" && intruderFrameCount < 20 && sentSMS == false)
                        {
                            intruderFrameCount++;
                        }
                        else if (intruderFrameCount == 20 && sentSMS == false)
                        {
                            sentSMS = true;
                            System.Diagnostics.Debug.WriteLine("sending Email");
                            email_send(currentFrame);
              //                          var request = (HttpWebRequest)WebRequest.Create("http://gb4.site40.net/HeardFood/sendSms.php?body=intruder&to=4105043967");
              //                          var response = (HttpWebResponse)request.GetResponse();
                        }
            //                        intruderFrameCount = name == "" ? intruderFrameCount + 1 : 0;
                        if (f.rect.X > 180 || f.rect.X < 80)
                        {
                            currentFrame.Draw(name + " is distracted", ref font, new Point(f.rect.X - 2, f.rect.Y - 2), new Bgr(Color.Red));
                        }
                        else
                        {

                            currentFrame.Draw(name + " is alert", ref font, new Point(f.rect.X - 2, f.rect.Y - 2), new Bgr(Color.Cyan));
                            //Draw the label for each face detected and recognized

                        }
                        }

                            NamePersons[t-1] = name;
                            NamePersons.Add("");

                        //Set the number of faces detected on the scene
                        label3.Text = facesDetected[0].Length.ToString();

                        /*
                        //Set the region of interest on the faces

                        gray.ROI = f.rect;
                        MCvAvgComp[][] eyesDetected = gray.DetectHaarCascade(
                           eye,
                           1.1,
                           10,
                           Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                           new Size(20, 20));
                        gray.ROI = Rectangle.Empty;

                        foreach (MCvAvgComp ey in eyesDetected[0])
                        {
                            Rectangle eyeRect = ey.rect;
                            eyeRect.Offset(f.rect.X, f.rect.Y);
                            currentFrame.Draw(eyeRect, new Bgr(Color.Blue), 2);
                        }
                         */

                    }
                        t = 0;

                        //Names concatenation of persons recognized
                    for (int nnn = 0; nnn < facesDetected[0].Length; nnn++)
                    {
                        names = names + NamePersons[nnn] + ", ";
                    }
                    //Show the faces procesed and recognized
                    imageBoxFrameGrabber.Image = currentFrame;
                    label4.Text = names;
                    names = "";
                    //Clear the list(vector) of names
                    NamePersons.Clear();
        }
Пример #18
0
        public void TestEigenObjectRecognizer()
        {
            Image<Gray, Byte>[] images = new Image<Gray, byte>[20];
             for (int i = 0; i < images.Length; i++)
             {
            images[i] = new Image<Gray, byte>(200, 200);
            images[i].SetRandUniform(new MCvScalar(0), new MCvScalar(255));
             }
             MCvTermCriteria termCrit = new MCvTermCriteria(10, 1.0e-6);

             EigenObjectRecognizer rec = new EigenObjectRecognizer(images, ref termCrit);
             foreach (Image<Gray, Byte> img in images)
             {
            rec.Recognize(img);
            //Trace.WriteLine(rec.Recognize(img));
             }
        }
Пример #19
0
        private void button_Click(object sender, RoutedEventArgs e)
        {
            OpenFileDialog openFileDialog = new OpenFileDialog();
            openFileDialog.ShowDialog();
              var filePath =   openFileDialog.FileName;
            Image<Bgr, Byte> image = new Image<Bgr, byte>(filePath); //Read the files as an 8-bit Bgr image
            List<System.Drawing.Rectangle> faces = new List<System.Drawing.Rectangle>();
            List<System.Drawing.Rectangle> eyes = new List<System.Drawing.Rectangle>();

            Detect(image, "haarcascade_frontalface_default.xml", "haarcascade_eye.xml", faces, eyes);

            foreach (System.Drawing.Rectangle face in faces)
                image.Draw(face, new Bgr(System.Drawing.Color.Red), 2);
            foreach (System.Drawing.Rectangle eye in eyes)
                image.Draw(eye, new Bgr(System.Drawing.Color.Blue), 2);

            ImageViewer.Show(image);
            File.WriteAllBytes("test.jpg", image.ToJpegData());

            Image<Gray, Byte> smileImage = new Image<Gray, byte>("happy.jpg"); //Read the files as an 8-bit Bgr image
            Image<Gray, Byte> sadImage = new Image<Gray, byte>("sad.jpg"); //Read the files as an 8-bit Bgr image

            List<Image<Gray, Byte>> trainingList = new List<Image<Gray, byte>>();
            trainingList.Add(smileImage);
            trainingList.Add(sadImage);

            List<string> labelList = new List<string>();
            labelList.Add("happy");
            labelList.Add("sad");
            // labelList.Add(2);

            MCvTermCriteria termCrit = new MCvTermCriteria(10, 0.001);

                //Eigen face recognizer
                EigenObjectRecognizer recognizer = new EigenObjectRecognizer(
                trainingList.ToArray(),
                labelList.ToArray(),
                5000,
                ref termCrit);

            Image<Gray, Byte> inputImage = new Image<Gray, byte>(filePath); //Read the files as an 8-bit Bgr image
            var resizedImage = inputImage.Resize(smileImage.Width, smileImage.Height, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);

            var name = recognizer.Recognize(resizedImage).Label;

            List<int> temp = new List<int>();
            temp.Add(1);
            temp.Add(2);

            EigenFaceRecognizer recogizer2 = new EigenFaceRecognizer(80, double.PositiveInfinity);
            recogizer2.Train(trainingList.ToArray(), temp.ToArray());
               var dd = recogizer2.Predict(resizedImage);

            ImageViewer.Show(resizedImage);
        }
Пример #20
0
        void FrameGrabber(object sender, EventArgs e)
        {
            label3.Text = "0";
            //label4.Text = "";
            NamePersons.Add("");

            //Get the current frame form capture device
            currentFrame = grabber.QueryFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);

            //Convert it to Grayscale
            gray = currentFrame.Convert<Gray, Byte>();

            //Face Detector
            MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(
              face,
              1.2,
              10,
              Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
              new Size(20, 20));

            foreach (MCvAvgComp f in facesDetected[0])
            {
                t = t + 1;
                result = currentFrame.Copy(f.rect).Convert<Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                //draw the face detected in the 0th (gray) channel with blue color
                currentFrame.Draw(f.rect, new Bgr(Color.Red), 2);

                if (trainingImages.ToArray().Length != 0)
                {
                    //TermCriteria for face recognition with numbers of trained images like maxIteration
                    MCvTermCriteria termCrit = new MCvTermCriteria(ContTrain, 0.001);

                    //Eigen face recognizer
                    EigenObjectRecognizer recognizer = new EigenObjectRecognizer(
                       trainingImages.ToArray(),
                       labels.ToArray(),
                       3000,
                       ref termCrit);

                    name = recognizer.Recognize(result);

                    //Draw the label for each face detected and recognized
                    currentFrame.Draw(name, ref font, new Point(f.rect.X - 2, f.rect.Y - 2), new Bgr(Color.LightGreen));

                }

                NamePersons[t - 1] = name;
                NamePersons.Add("");

                //Set the number of faces detected on the scene
                label3.Text = facesDetected[0].Length.ToString();

            }
            t = 0;

            //Names concatenation of persons recognized
            for (int nnn = 0; nnn < facesDetected[0].Length; nnn++)
            {
                names = names + NamePersons[nnn] + ", ";
            }
            //Show the faces procesed and recognized
            imageBoxFrameGrabber.Image = currentFrame;
            label4.Text = names;

            names = "";
            //Clear the list(vector) of names
            NamePersons.Clear();
        }
Пример #21
0
        void FrameGrabber(object sender, EventArgs e)
        {

            label3.Text = "0";
            //label4.Text = "";
            NamePersons.Add("");
            if (mc.isconnect()&& useNao && !useWebcam)
            {
                while (!mc.updatedUpper)
                { }
                img = Image.FromStream(new MemoryStream(mc.getByte(0)));
                currentFrame = new Image<Bgr, byte>(new Bitmap(img)).Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
            }
            else if(!useNao && useWebcam)
            {
                //Get the current frame form capture device
                currentFrame = grabber.QueryFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
            }
            else
            {
                img = Image.FromFile(String.Format("{0}/loading.jpg", curDir));
                currentFrame = new Image<Bgr, byte>(new Bitmap(img)).Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
            }             
            
            //End addition by PFS
                    //Convert it to Grayscale
                    gray = currentFrame.Convert<Gray, Byte>();

                    //Face Detector
                    MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(
                      face,
                      1.2,
                      10,
                      Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                      new Size(20, 20));
                    try
                    {
                        //Action for each element detected
                        foreach (MCvAvgComp f in facesDetected[0])
                        {
                            t = t + 1;
                            result = currentFrame.Copy(f.rect).Convert<Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                            //draw the face detected in the 0th (gray) channel with blue color
                            currentFrame.Draw(f.rect, new Bgr(Color.Red), 2);

                            if (trainingImages.ToArray().Length != 0)
                            {
                                //TermCriteria for face recognition with numbers of trained images like maxIteration
                                MCvTermCriteria termCrit = new MCvTermCriteria(ContTrain, 0.00005);

                                //Eigen face recognizer
                                EigenObjectRecognizer recognizer = new EigenObjectRecognizer(
                                   trainingImages.ToArray(),
                                   labels.ToArray(),
                                   3000,
                                   ref termCrit);
                                string[] faceName = new string[faceIteration];
                                //Addition by PS
                                for (int i = 0; i < faceIteration; i++)
                                {                                    
                                    termCrit.epsilon = i * 0.00005;
                                    faceName[i]=recognizer.Recognize(result);
                                    Console.WriteLine(faceName[i]);
                                }
                                
                                name = faceName.GroupBy(v => v)
                                        .OrderByDescending(g => g.Count())
                                        .First()
                                        .Key;
                                //End of Addition by PS

                                //    name = recognizer.Recognize(result);
                                if (name != ""  && useNao )//&& !calledName.Any(x=> x==name) )
                                {
                                    
                                    mc.sendVoice(name);                                    
                                    calledName.Add(name);
                                    calledNumber++;
                                }
                                else if (useNao )
                                {
                                    called = true;
                                    //mc.sendVoice("");
                                }

                                //Draw the label for each face detected and recognized
                                currentFrame.Draw(name, ref font, new Point(f.rect.X - 2, f.rect.Y - 2), new Bgr(Color.LightGreen));

                            }

                            NamePersons[t - 1] = name;
                            NamePersons.Add("");


                            //Set the number of faces detected on the scene
                            label3.Text = facesDetected[0].Length.ToString();

                            /*
                            //Set the region of interest on the faces
                        
                            gray.ROI = f.rect;
                            MCvAvgComp[][] eyesDetected = gray.DetectHaarCascade(
                               eye,
                               1.1,
                               10,
                               Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                               new Size(20, 20));
                            gray.ROI = Rectangle.Empty;

                            foreach (MCvAvgComp ey in eyesDetected[0])
                            {
                                Rectangle eyeRect = ey.rect;
                                eyeRect.Offset(f.rect.X, f.rect.Y);
                                currentFrame.Draw(eyeRect, new Bgr(Color.Blue), 2);
                            }
                             */

                        }
                    }
                    catch (Exception ex)
                    {
                        MessageBox.Show(ex.ToString());
                    }
                        t = 0;

                        //Names concatenation of persons recognized
                    for (int nnn = 0; nnn < facesDetected[0].Length; nnn++)
                    {
                        names = names + NamePersons[nnn] + ", ";
                    }
                    //Show the faces procesed and recognized
                    imageBoxFrameGrabber.Image = currentFrame;
                    label4.Text = names;
                    names = "";
                    //Clear the list(vector) of names
                    NamePersons.Clear();

                }