//Matching caotured image with the TrainedFace
        //--------------------------------------------
        public void FrameGrabber(object sender, EventArgs e)
        {
            label3.Text = "0";

            NamePersons.Add("");


            // capture a frame form  device both face and all things on the image
            currentFrame = grabber.QueryFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);


            gray = currentFrame.Convert <Gray, Byte>();

            //(TestImageBox.Image = currentFrame);

            //Result of haarCascade will be on the "MCvAvgComp"-facedetected (is it face or not )
            MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(
                face,
                1.2,
                10,
                Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                new Size(20, 20));


            foreach (MCvAvgComp f in facesDetected[0])
            {
                t      = t + 1;
                result = currentFrame.Copy(f.rect).Convert <Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);

                currentFrame.Draw(f.rect, new Bgr(Color.Red), 2); //Frame detect colour is 'read'


                if (trainingImages.ToArray().Length != 0)
                {
                    MCvTermCriteria termCrit = new MCvTermCriteria(ContTrain, 0.001);


                    EigenObjectRecognizer recognizer = new EigenObjectRecognizer(
                        trainingImages.ToArray(),
                        labels.ToArray(),
                        3000,
                        ref termCrit);

                    name = recognizer.Recognize(result); // detected name of the face is been saved  to the 'name'-variable

                    //the colour of  the face label name
                    currentFrame.Draw(name, ref font, new Point(f.rect.X - 2, f.rect.Y - 2), new Bgr(Color.LightGreen));
                }

                NamePersons[t - 1] = name;
                NamePersons.Add("");



                label3.Text = facesDetected[0].Length.ToString();
            }
            t = 0;


            for (int nnn = 0; nnn < facesDetected[0].Length; nnn++)
            {
                names = names + NamePersons[nnn] + ", ";
            }

            imageBoxFrameGrabber.Image = currentFrame;
            label4.Text = names;
            label6.Text = names;


            /*auto();
             * cc.con = new SqlConnection(cs.DBConn);
             * cc.con.Open();
             * string cb = "Update Student set StudentID=@d2,Name=@d3,Year=@d4,Term=@d5,Subject=@d6,PresentAbsent=@d7,Photo=@d8 where C_ID=@d1";
             * cc.cmd = new SqlCommand(cb);
             * cc.cmd.Connection = cc.con;
             * cc.cmd.Parameters.AddWithValue("@d1", txtID.Text);
             * cc.cmd.Parameters.AddWithValue("@d2", txtStudentID.Text);
             * cc.cmd.Parameters.AddWithValue("@d3", name);
             * cc.cmd.Parameters.AddWithValue("@d4", txtYear.Text);
             * cc.cmd.Parameters.AddWithValue("@d5", txtTerm.Text);
             * cc.cmd.Parameters.AddWithValue("@d6", txtSubject.Text);
             * cc.cmd.Parameters.AddWithValue("@d7", atten);
             * MemoryStream ms = new MemoryStream();
             * Bitmap bmpImage = new Bitmap(Picture.Image);
             * bmpImage.Save(ms, System.Drawing.Imaging.ImageFormat.Jpeg);
             * byte[] data = ms.GetBuffer();
             * SqlParameter p = new SqlParameter("@d8", SqlDbType.Image);
             * p.Value = data;
             * cc.cmd.Parameters.Add(p);
             * cc.cmd.ExecuteReader();
             * cc.con.Close();
             * st1 = lblUser.Text;
             * st2 = "updated the Student'" + name + "' having Student id '" + txtStudentID.Text + "'";
             * cf.LogFunc(st1, System.DateTime.Now, st2);
             * btnUpdate.Enabled = false;
             * MessageBox.Show("Successfully updated", "Record", MessageBoxButtons.OK, MessageBoxIcon.Information);
             */

            names = "";

            NamePersons.Clear();
        }
예제 #2
0
        void FrameGrabber(object sender, EventArgs e)
        {
            label3.Text = "0";
            //label4.Text = "";
            NamePersons.Add("");


            //Get the current frame form capture device
            currentFrame = grabber.QueryFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);

            //Convert it to Grayscale
            gray = currentFrame.Convert <Gray, Byte>();

            //Face Detector
            MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(
                face,
                1.2,
                10,
                Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                new Size(20, 20));

            //Action for each element detected
            foreach (MCvAvgComp f in facesDetected[0])
            {
                t      = t + 1;
                result = currentFrame.Copy(f.rect).Convert <Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                //draw the face detected in the 0th (gray) channel with blue color
                currentFrame.Draw(f.rect, new Bgr(Color.Red), 2);


                if (trainingImages.ToArray().Length != 0)
                {
                    //TermCriteria for face recognition with numbers of trained images like maxIteration
                    MCvTermCriteria termCrit = new MCvTermCriteria(ContTrain, 0.001);

                    //Eigen face recognizer
                    EigenObjectRecognizer recognizer = new EigenObjectRecognizer(
                        trainingImages.ToArray(),
                        labels.ToArray(),
                        3000,
                        ref termCrit);

                    name = recognizer.Recognize(result);

                    //Draw the label for each face detected and recognized
                    currentFrame.Draw(name, ref font, new Point(f.rect.X - 2, f.rect.Y - 2), new Bgr(Color.LightGreen));
                }

                NamePersons[t - 1] = name;
                NamePersons.Add("");


                //Set the number of faces detected on the scene
                label3.Text = facesDetected[0].Length.ToString();

                /*
                 * //Set the region of interest on the faces
                 *
                 * gray.ROI = f.rect;
                 * MCvAvgComp[][] eyesDetected = gray.DetectHaarCascade(
                 * eye,
                 * 1.1,
                 * 10,
                 * Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                 * new Size(20, 20));
                 * gray.ROI = Rectangle.Empty;
                 *
                 * foreach (MCvAvgComp ey in eyesDetected[0])
                 * {
                 *  Rectangle eyeRect = ey.rect;
                 *  eyeRect.Offset(f.rect.X, f.rect.Y);
                 *  currentFrame.Draw(eyeRect, new Bgr(Color.Blue), 2);
                 * }
                 */
            }
            t = 0;

            //Names concatenation of persons recognized
            for (int nnn = 0; nnn < facesDetected[0].Length; nnn++)
            {
                names = names + NamePersons[nnn] + ", ";
            }
            //Show the faces procesed and recognized
            imageBoxFrameGrabber.Image = currentFrame;
            label4.Text = names;
            names       = "";
            //Clear the list(vector) of names
            NamePersons.Clear();
        }
예제 #3
0
 //процедура по сверке лица из базы с камерой
 private void FrameProcedure(object sender, EventArgs e)
 {
     //проверка на всякий случай
     try
     {
         //добавляю новое лицо, не имеющее (пока что) имени
         Users.Add("");
         //создаю объект Frame, устанавливаю для него разрешение камеры (855х588)
         Frame = camera.QueryFrame().Resize(cameraBox.Width, cameraBox.Height, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
         //делаю изображение чёрно-белым
         grayFace = Frame.Convert <Gray, Byte>();
         MCvAvgComp[][] facesDetectedNow = grayFace.DetectHaarCascade(faceDetected, 1.2, 10, Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new Size(20, 20));
         //беру КАЖДОЕ фото из "базы лиц" и делаю для каждого лица следующее
         foreach (MCvAvgComp f in facesDetectedNow[0])
         {
             //изменяю размер "лица", делаю его чёрно-белым
             result = Frame.Copy(f.rect).Convert <Gray, Byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
             //делаю обводку лица зелёного цвета в ширине линии = 3
             Frame.Draw(f.rect, new Bgr(Color.Green), 3);
             //если фото из базы совпадает, то....
             if (trainingImames.ToArray().Length != 0)
             {
                 MCvTermCriteria termCriterias = new MCvTermCriteria(Count, 0.001);
                 //распознаю лицо из базы (создаю объект, который распознаёт лицо)
                 EigenObjectRecognizer recognizer = new EigenObjectRecognizer(trainingImames.ToArray(), labels.ToArray(), 1500, ref termCriterias);
                 //вывожу имя (name)
                 name = recognizer.Recognize(result);
                 //делаю подпись имени лица в зависимости от координат, в которых находится зелёный "квадрат" обводки лица
                 //т.е. отталкиваюсь от предыдущих координат лица, которые получены были выше
                 Frame.Draw(name, ref font, new Point(f.rect.X - 2, f.rect.Y - 2), new Bgr(Color.Red));
             }
             //если ничего не нашёл, печатаю пустоту
             Users.Add("");
         }
         //вывод найденого лица в label4
         if (name == "")
         {
             metroLabel3.Text = "Лица нет в базе лиц";
             metroLabel5.Text = "или";
             metroLabel6.Text = "Лицо не найдено";
         }
         else
         {
             metroLabel3.Text = name;
             metroLabel5.Text = "";
             metroLabel6.Text = "";
         }
         //пытаюсьь, подключена ли камера. В случае чего, вывожу ошибку
         try
         {
             cameraBox.Image = Frame;
             name            = "";
             Users.Clear();
         }
         catch (Exception)
         {
             //вывод сообщения и закрытие приложения, если всё плохо
             var camfail = MessageBox.Show("Похоже, что камера не была обнаружена. Вы уверены, что камера подключена и стабильно работает?", "Камера не обнаружена", MessageBoxButtons.YesNo, MessageBoxIcon.Question);
             if (camfail == DialogResult.Yes)
             {
                 cameraBox.Image = Frame;
                 name            = "";
                 Users.Clear();
             }
             if (camfail == DialogResult.No)
             {
                 MessageBox.Show("Попробуйте перезапустить компьютер. Если ошибка не исчезла, сообщите на почту: [email protected]", "Спасибо", MessageBoxButtons.OK, MessageBoxIcon.None);
                 Close();
                 Environment.Exit(0);
                 Application.Exit();
             }
         }
     }
     catch (Exception)
     {
         //вывод сообщения и закрытие приложения, если всё плохо
         var camfail = MessageBox.Show("Похоже, что камера не была обнаружена. Вы уверены, что камера подключена и стабильно работает?", "Камера не обнаружена", MessageBoxButtons.YesNo, MessageBoxIcon.Question);
         if (camfail == DialogResult.Yes)
         {
         }
         if (camfail == DialogResult.No)
         {
             MessageBox.Show("Попробуйте перезапустить компьютер. Если ошибка не исчезла, сообщите на почту: [email protected]", "Спасибо", MessageBoxButtons.OK, MessageBoxIcon.None);
             Close();
             Environment.Exit(0);
             Application.Exit();
         }
     }
 }
        void FrameGrabber(object sender, EventArgs e)
        {
            lblYuzSayisi2.Text = "0";

            kisiAdlari.Add("");
            kisiSoyAdlari.Add("");
            kisiNumaralari.Add("");
            kisiMailleri.Add("");
            kisiBolumleri.Add("");
            kisiSiniflari.Add("");
            kisiCinsiyetleri.Add("");



            kameraResmi = fotoCek.QueryFrame().Resize(300, 280, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);

            gray = kameraResmi.Convert <Gray, Byte>();

            MCvAvgComp[][] tanimliYuz = gray.DetectHaarCascade(
                yuz,
                1.1,
                5,
                Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                Size.Empty);



            foreach (MCvAvgComp f in tanimliYuz[0])
            {
                t     = t + 1;
                sonuc = kameraResmi.Copy(f.rect).Convert <Gray, byte>().Resize(150, 150, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);



                kameraResmi.Draw(f.rect, new Bgr(Color.Red), 2);


                if (kisiTanimla.ToArray().Length != 0)
                {
                    MCvTermCriteria tc = new MCvTermCriteria(kisiSayisi, 0.001);

                    EigenObjectRecognizer rec = new EigenObjectRecognizer(
                        kisiTanimla.ToArray(),
                        adlari.ToArray(),

                        3000,
                        ref tc);

                    ad = rec.Recognize(sonuc);


                    kameraResmi.Draw(ad, ref kameraKisiAd, new Point(f.rect.X - 5, f.rect.Y - 5), new Bgr(Color.Lime));

                    if ((!lblGuvenlikDuvariDurum.Text.Equals("Kapalı") && ad != ""))
                    {
                        //baglan.Write("1");
                        //baglan.Write("5");
                        //baglan.Write("8");
                    }



                    MCvTermCriteria       tc1  = new MCvTermCriteria(kisiSayisi, 0.001);
                    EigenObjectRecognizer rec1 = new EigenObjectRecognizer(
                        kisiTanimla.ToArray(),
                        soyAdlari.ToArray(),

                        3000,
                        ref tc1);
                    soyAd = rec1.Recognize(sonuc);

                    MCvTermCriteria       tc2  = new MCvTermCriteria(kisiSayisi, 0.001);
                    EigenObjectRecognizer rec2 = new EigenObjectRecognizer(
                        kisiTanimla.ToArray(),
                        numaralari.ToArray(),

                        3000,
                        ref tc2);
                    numara = rec2.Recognize(sonuc);

                    MCvTermCriteria       tc3  = new MCvTermCriteria(kisiSayisi, 0.001);
                    EigenObjectRecognizer rec3 = new EigenObjectRecognizer(
                        kisiTanimla.ToArray(),
                        mailleri.ToArray(),

                        3000,
                        ref tc3);
                    mail = rec3.Recognize(sonuc);


                    MCvTermCriteria       tc4  = new MCvTermCriteria(kisiSayisi, 0.001);
                    EigenObjectRecognizer rec4 = new EigenObjectRecognizer(
                        kisiTanimla.ToArray(),
                        bolumleri.ToArray(),

                        3000,
                        ref tc4);
                    bolum = rec4.Recognize(sonuc);


                    MCvTermCriteria       tc5  = new MCvTermCriteria(kisiSayisi, 0.001);
                    EigenObjectRecognizer rec5 = new EigenObjectRecognizer(
                        kisiTanimla.ToArray(),
                        siniflari.ToArray(),

                        3000,
                        ref tc5);
                    sinif = rec5.Recognize(sonuc);


                    MCvTermCriteria       tc6  = new MCvTermCriteria(kisiSayisi, 0.001);
                    EigenObjectRecognizer rec6 = new EigenObjectRecognizer(
                        kisiTanimla.ToArray(),
                        cinsiyetleri.ToArray(),

                        3000,
                        ref tc6);
                    cinsiyet = rec6.Recognize(sonuc);
                }

                kisiAdlari[t - 1] = ad;
                kisiAdlari.Add("");
                kisiSoyAdlari[t - 1] = soyAd;
                kisiSoyAdlari.Add("");
                kisiNumaralari[t - 1] = numara;
                kisiNumaralari.Add("");
                kisiMailleri[t - 1] = mail;
                kisiMailleri.Add("");
                kisiBolumleri[t - 1] = bolum;
                kisiBolumleri.Add("");
                kisiSiniflari[t - 1] = sinif;
                kisiSiniflari.Add("");
                kisiCinsiyetleri[t - 1] = cinsiyet;
                kisiCinsiyetleri.Add("");

                lblYuzSayisi2.Text = tanimliYuz[0].Length.ToString();



                if ((adlar == "Alanda Kimse Yok" ||
                     adlar == "" ||
                     adlar == "örn: BuuM" ||
                     numaralar == "Alanda Kimse Yok" ||

                     mailler == "Alanda Kimse Yok" ||
                     soyAdlar == "Alanda Kimse Yok" ||
                     soyAdlar == "örn: BuuM" ||
                     soyAdlar == "" ||
                     bolumler == "Alanda Kimse Yok" ||
                     siniflar == "Alanda Kimse Yok" ||

                     cinsiyetler == "Alanda Kimse Yok") &&


                    lblGuvenlikDuvariDurum.Text.Equals("Açık"))
                {
                    /*
                     * {
                     * baglan.Write("0");
                     * }
                     */



                    //Göz Tarama
                    gray.ROI = f.rect;
                }
                MCvAvgComp[][] gozTarama = gray.DetectHaarCascade(
                    goz,
                    1.1,
                    2,
                    Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                    Size.Empty);
                gray.ROI = Rectangle.Empty;

                foreach (MCvAvgComp g in gozTarama[0])
                {
                    Rectangle gozKayit = g.rect;
                    gozKayit.Offset(f.rect.X, f.rect.Y);
                    kameraResmi.Draw(gozKayit, new Bgr(Color.Blue), 2);
                }

                //Burun Tarama
                gray.ROI = f.rect;
                MCvAvgComp[][] burunTarama = gray.DetectHaarCascade(
                    burun,
                    1.1,
                    2,
                    Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                    Size.Empty);
                gray.ROI = Rectangle.Empty;

                foreach (MCvAvgComp b in burunTarama[0])
                {
                    Rectangle burunKayit = b.rect;
                    burunKayit.Offset(f.rect.X, f.rect.Y);
                    kameraResmi.Draw(burunKayit, new Bgr(Color.Orange), 2);
                }



                //Yuz2 Tarama
                gray.ROI = f.rect;
                MCvAvgComp[][] yuzTarama2 = gray.DetectHaarCascade(
                    yuz2,
                    1.1,
                    3,
                    Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                    Size.Empty);
                gray.ROI = Rectangle.Empty;

                foreach (MCvAvgComp y in yuzTarama2[0])
                {
                    Rectangle yuzKayit2 = y.rect;
                    yuzKayit2.Offset(f.rect.X, f.rect.Y);
                    kameraResmi.Draw(yuzKayit2, new Bgr(Color.SkyBlue), 1);
                }
            }


            t = 0;


            for (int i = 0; i < tanimliYuz[0].Length; i++)
            {
                adlar = kisiAdlari[i];
            }
            for (int i = 0; i < tanimliYuz[0].Length; i++)
            {
                soyAdlar = kisiSoyAdlari[i];
            }
            for (int i = 0; i < tanimliYuz[0].Length; i++)
            {
                numaralar = kisiNumaralari[i];
            }
            for (int i = 0; i < tanimliYuz[0].Length; i++)
            {
                mailler = kisiMailleri[i];
            }
            for (int i = 0; i < tanimliYuz[0].Length; i++)
            {
                bolumler = kisiBolumleri[i];
            }
            for (int i = 0; i < tanimliYuz[0].Length; i++)
            {
                siniflar = kisiSiniflari[i];
            }
            for (int i = 0; i < tanimliYuz[0].Length; i++)
            {
                cinsiyetler = kisiCinsiyetleri[i];
            }

            imgBoxYuzuTara.Image        = kameraResmi;
            lblTanimliKisiAd.Text       = adlar;
            lblTanimliKisiSoyad.Text    = soyAdlar;
            lblTanimliKisiNumara.Text   = numaralar;
            lblTanimliKisiMail.Text     = mailler;
            lblTanimliKisiBolum.Text    = bolumler;
            lblTanimliKisiSinif.Text    = siniflar;
            lblTanimliKisiCinsiyet.Text = cinsiyetler;

            adlar       = "Alanda Kimse Yok";
            numaralar   = "Alanda Kimse Yok";
            mailler     = "Alanda Kimse Yok";
            soyAdlar    = "Alanda Kimse Yok";
            bolumler    = "Alanda Kimse Yok";
            siniflar    = "Alanda Kimse Yok";
            cinsiyetler = "Alanda Kimse Yok";


            kisiAdlari.Clear();
            kisiSoyAdlari.Clear();
            kisiNumaralari.Clear();
            kisiMailleri.Clear();
            kisiBolumleri.Clear();
            kisiSiniflari.Clear();
            kisiCinsiyetleri.Clear();
        }
예제 #5
0
        void FrameGrabber(object sender, EventArgs e)
        {
            //mengisi default
            label3.Text = "0";
            NamePersons.Add("");



            //Mendapatkan jumlah frame dalam form kamera
            currentFrame = grabber.QueryFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);

            //Konversi ke abu abu
            gray = currentFrame.Convert <Gray, Byte>();

            //Face Detector
            MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(
                face,
                1.2,
                10,
                Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                new Size(20, 20));

            //Perulangan mendeteksi wajah
            foreach (MCvAvgComp f in facesDetected[0])
            {
                t      = t + 1;
                result = currentFrame.Copy(f.rect).Convert <Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                //membuat kotak wajah dengan warna kuning ke hijauan
                currentFrame.Draw(f.rect, new Bgr(Color.GreenYellow), 2);


                if (trainingImages.ToArray().Length != 0)
                {
                    //TermCriteria for face recognition
                    MCvTermCriteria       termCrit   = new MCvTermCriteria(ContTrain, 0.001);
                    EigenObjectRecognizer recognizer = new EigenObjectRecognizer(
                        trainingImages.ToArray(),
                        labels.ToArray(),
                        3000,
                        ref termCrit);
                    name = recognizer.Recognize(result);

                    //Membuat label diatas kotak wajah yang terdeteksi
                    for (int nnn = 0; nnn < facesDetected[0].Length; nnn++)
                    {
                        String tampil_dilayar = name;
                        currentFrame.Draw(tampil_dilayar, ref font, new Point(f.rect.X - 2, f.rect.Y - 7), new Bgr(Color.Yellow));
                    }
                }

                NamePersons[t - 1] = name;
                NamePersons.Add("");


                //Menampilkan nomor jumlah wajah yang terdeteksi dalam scene
                label3.Text = facesDetected[0].Length.ToString();
            }
            t = 0;


            for (int nnn = 0; nnn < facesDetected[0].Length; nnn++)
            {
                try
                {
                    //jika terdeteksi wajah
                    if (load == 1)
                    {
                        if (berapa < 100)
                        {
                            berapa = berapa + 1;
                            string nama_user = NamePersons[nnn];
                            string texts     = System.IO.File.ReadAllText(Application.StartupPath + "/" + "setting.txt");
                            webBrowser1.Navigate(texts + "baca.php?nama=" + nama_user);
                            if (nama_user == "wajah belum terdaftar")
                            {
                                axWindowsMediaPlayer1.URL = (Application.StartupPath + "/belum.mp3");
                            }
                            else
                            {
                                axWindowsMediaPlayer1.URL = (Application.StartupPath + "/suara.mp3");
                            }
                        }
                        else
                        {
                            berapa = 0;
                        }
                    }
                }
                catch (Exception ex)
                {
                    Debug.WriteLine(ex.Message);
                }
            }
            //menampilkan nama wajah yang terdeteksi
            imageBoxFrameGrabber.Image = currentFrame;
            label4.Text = names;
            names       = "";
            NamePersons.Clear();
        }
예제 #6
0
        void FrameGrabber(object sender, EventArgs e)
        {
            label3.Text = "0";
            NamePersons.Add("");

            currentFrame           = grabber.QueryFrame().Resize(800, 500, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
            gray                   = currentFrame.Convert <Gray, Byte>();
            grabber.FlipHorizontal = true;
            //Funkcja wykrywająca twarz
            MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(
                face,
                1.2,
                3,
                Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                new Size(100, 100));

            foreach (MCvAvgComp f in facesDetected[0])
            {
                t      = t + 1;
                result = currentFrame.Copy(f.rect).Convert <Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                //rysuje kwadrat na wykrytej twarzy
                currentFrame.Draw(f.rect, new Bgr(Color.Green), 3);


                if (trainingImages.ToArray().Length != 0)
                {
                    double                threshold  = trackBar1.Value;
                    MCvTermCriteria       termCrit   = new MCvTermCriteria(ContTrain, 0.001);
                    EigenObjectRecognizer recognizer = new EigenObjectRecognizer(trainingImages.ToArray(), labels.ToArray(), threshold, ref termCrit);
                    name = recognizer.Recognize(result);
                    //Podpis
                    if (name == "")
                    {
                        currentFrame.Draw("Unknown", ref font, new Point(f.rect.X, f.rect.Y - 10), new Bgr(Color.Red));
                    }
                    else
                    {
                        currentFrame.Draw(name, ref font, new Point(f.rect.X, f.rect.Y - 10), new Bgr(Color.Yellow));
                    }
                }

                NamePersons[t - 1] = name;
                NamePersons.Add("");

                //Liczba wykrytych twarzy
                label3.Text = facesDetected[0].Length.ToString();
            }
            if (wykryjOczyToolStripMenuItem.Checked)
            {
                MCvAvgComp[][] eyesDetected = gray.DetectHaarCascade(
                    eye,
                    1.2,
                    5,
                    Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                    new Size(50, 50)
                    );

                gray.ROI = Rectangle.Empty;

                foreach (MCvAvgComp ey in eyesDetected[0])
                {
                    Rectangle eyeRect = ey.rect;
                    currentFrame.Draw(eyeRect, new Bgr(Color.Red), 2);
                }
            }
            t = 0;

            //Names concatenation of persons recognized
            for (int n = 0; n < facesDetected[0].Length; n++)
            {
                names = names + NamePersons[n] + ", ";
            }
            //label z imionami
            imageBoxFrameGrabber.Image = currentFrame;
            label4.Text = names;
            names       = "";
            //Odświerzanie
            NamePersons.Clear();
        }
예제 #7
0
        // Hàm này thực hiện nhận dạng
        void FrameGrabber(object sender, EventArgs e)
        {
            try
            {
                // khởi tạo 1 đối tượng là currentFrame có dạng là hình ảnh
                imageBoxFrameGrabber.Image = currentFrame;
                // khởi tạo currentFrame sẽ có khung hình rộng 350, cao 335
                currentFrame = grabber.QueryFrame().Resize(350, 335, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                // khai báo đối tượng gray này để quy định cái ảnh mà mình nhận được nằm trong khung
                gray = currentFrame.Convert <Gray, Byte>();
                // facesDetected là khuôn hình ảnh nằm trong khung và sẽ có dạng theo quy định của gray
                MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(haar, 1.1, 10, Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new Size(20, 20));

                foreach (MCvAvgComp f in facesDetected[0])
                {
                    result = currentFrame.Copy(f.rect).Convert <Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                    currentFrame.Draw(f.rect, new Bgr(Color.Red), 2);
                    if (trainingImages.ToArray().Length != 0)
                    {
                        MCvTermCriteria       termCrit   = new MCvTermCriteria(ContTrain, 0.001);
                        EigenObjectRecognizer recognizer = new EigenObjectRecognizer(trainingImages.ToArray(), EmployeeID.ToArray(), 3000, ref termCrit);
                        name = recognizer.Recognize(result);
                        Check(name);
                        if (test == true)
                        {
                            double StopValue = Convert.ToDouble(System.Configuration.ConfigurationSettings.AppSettings["StopValue"]);
                            // cái này là hiển thị ở trên khung hình màu đỏ gồm có số ID, cái thứ 2 là font chữ, cái thứ 3 là vị trí hiển thị, cái cuối là màu sắc
                            currentFrame.Draw("Ma NV:" + name, ref font, new Point(f.rect.X - 2, f.rect.Y - 2), new Bgr(Color.White));

                            dt1.Clear();
                            SqlCommand cmd = new SqlCommand();
                            cmd.Connection  = con;
                            cmd.CommandType = CommandType.Text;
                            // truy vấn lấy thông tin của Employee trong csdl
                            cmd.CommandText = @"SELECT  lastdate,fldEmployeeID as N'" + applicationConfiguration.getLanguagePackage().getMemberCodeAlias()
                                              + @"',fldFirstName as N'" + applicationConfiguration.getLanguagePackage().getLastNameAlias()
                                              + @"',fldLastName as N'" + applicationConfiguration.getLanguagePackage().getFirstNameAlias()
                                              + @"',
                                            (Case fldSex when 'True' then N'" + applicationConfiguration.getLanguagePackage().getMaleAlias()
                                              + @"' else N'" + applicationConfiguration.getLanguagePackage().getFemaleAlias()
                                              + @"' end) as N'" + applicationConfiguration.getLanguagePackage().getSexAlias()
                                              + @"',
                                             fldBirth as N'" + applicationConfiguration.getLanguagePackage().getBirthAlias()
                                              + @"', fldGroupName as N'" + applicationConfiguration.getLanguagePackage().getClassAlias()
                                              + @"'
                                                                     FROM   tblGroup INNER JOIN tblEmployee 
                                                                     ON tblGroup.fldGroupID = tblEmployee.fldGroupID
                                                                      where fldEmployeeID= @EmployeeID";
                            cmd.Parameters.Add("@EmployeeID", SqlDbType.Int, 16).Value = Convert.ToInt16(name);
                            da.SelectCommand = cmd;

                            // Hiển thị thông tin của Employee vừa lấy được ở trên ra bảng
                            da.Fill(dt1);

                            /*grvdata.Columns[4].DefaultCellStyle.Format = "dd/MM/yyyy";
                             * txtmanv.DataBindings.Clear();
                             * txtmanv.DataBindings.Add("Text", dt1, applicationConfiguration.getLanguagePackage().getMemberCodeAlias());
                             * txtHo.DataBindings.Clear();
                             * txtHo.DataBindings.Add("Text", dt1, applicationConfiguration.getLanguagePackage().getLastNameAlias());
                             * txtTen.DataBindings.Clear();
                             * txtTen.DataBindings.Add("Text", dt1, applicationConfiguration.getLanguagePackage().getFirstNameAlias());
                             * txtPhong.DataBindings.Clear();
                             * txtPhong.DataBindings.Add("Text", dt1, applicationConfiguration.getLanguagePackage().getClassAlias());
                             * txtGioitinh.DataBindings.Clear();
                             * txtGioitinh.DataBindings.Add("Text", dt1, applicationConfiguration.getLanguagePackage().getSexAlias());
                             * dpBirth.DataBindings.Clear();
                             * dpBirth.DataBindings.Add("Text", dt1, applicationConfiguration.getLanguagePackage().getBirthAlias());*/
                            //btnCallTheRoll_Click(sender, e);
                            DateTime date    = Convert.ToDateTime(dt1.Rows[0]["lastdate"]);
                            TimeSpan diff    = DateTime.Now - date;
                            double   seconds = diff.TotalSeconds;
                            if (seconds > StopValue)
                            {
                                SqlCommand cmd2 = new SqlCommand();
                                cmd2.Connection  = con;
                                cmd2.CommandType = CommandType.Text;
                                // truy vấn lấy thông tin của Employee trong csdl
                                cmd2.CommandText = "update tblEmployee set lastdate=GETDATE() where fldEmployeeID=" + dt1.Rows[0][1].ToString();
                                cmd2.ExecuteNonQuery();
                                MessageBox.Show("Thông báo", "Quá thời gian quy định", MessageBoxButtons.OK, MessageBoxIcon.Warning);
                            }
                        }
                        else
                        {
                            /*name = "";
                             * txtmanv.Text = null;
                             * txtHo.Text = null;
                             * txtTen.Text = null;
                             * txtPhong.Text = null;
                             * txtGioitinh.Text = null;
                             * dpBirth.Value = Convert.ToDateTime("dd/MM/yyyy");*/
                        }
                    }
                }
            }
            catch (Exception)
            {
            }
        }
예제 #8
0
        public void CapturedImageFaceDetect()
        {
            Captured = Original;
            //Captured = Captured.Resize(imageBox2.Height, imageBox2.Width, INTER.CV_INTER_LINEAR);

            CaptureClicked = false;

            if (Captured != null)
            {
                Image <Gray, byte> capturedGrayFrame = Captured.Convert <Gray, byte>();
                var capturedFaces = capturedGrayFrame.DetectHaarCascade(Haar, 1.2, 4, HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new Size(imageBox2.Height / 5, imageBox2.Width / 5))[0];

                if (capturedFaces.Length > 0)
                {
                    Bitmap   bitmapInput = capturedGrayFrame.ToBitmap();
                    Bitmap   extractedFace;
                    Graphics faceCanvas;
                    extractedFaces = new Bitmap[capturedFaces.Length];

                    if (capturedFaces.Length > 1)
                    {
                        btnNext.Enabled     = true;
                        btnPrevious.Enabled = true;
                    }

                    foreach (var face in capturedFaces)
                    {
                        result = Captured.Copy(face.rect).Convert <Gray, byte>().Resize(100, 100, INTER.CV_INTER_CUBIC);
                        Captured.Draw(face.rect, new Bgr(Color.Yellow), 2);

                        MCvTermCriteria termCrit = new MCvTermCriteria(1000, 0.001);

                        //label2.Text = FacesDatabase.facesNamesFromDb.ToString();


                        try
                        {
                            EigenObjectRecognizer recongnizer = new EigenObjectRecognizer(FacesDatabase.facesFromDb.ToArray(), FacesDatabase.facesNamesFromDb.ToArray(), 2500, ref termCrit);
                            name = recongnizer.Recognize(result).Label;
                            Captured.Draw(name, ref font, new Point(face.rect.X - 2, face.rect.Y - 2), new Bgr(Color.LightGreen));
                        }
                        catch (Exception e) {
                            //MessageBox.Show("Ne postoji u bazi, greska: " + e.ToString());

                            //size of an empty box
                            extractedFace = new Bitmap(face.rect.Width, face.rect.Height);

                            //seting empty face image as canvas for painting
                            faceCanvas = Graphics.FromImage(extractedFace);

                            faceCanvas.DrawImage(bitmapInput, 0, 0, face.rect, GraphicsUnit.Pixel);

                            extractedFace = new Bitmap(extractedFace, new Size(100, 100));

                            extractedFaces[faceNumber] = extractedFace;
                            faceNumber++;
                        }

                        //size of an empty box

                        /* extractedFace = new Bitmap(face.rect.Width, face.rect.Height);
                         *
                         * //seting empty face image as canvas for painting
                         * faceCanvas = Graphics.FromImage(extractedFace);
                         *
                         * faceCanvas.DrawImage(bitmapInput, 0, 0, face.rect, GraphicsUnit.Pixel);
                         *
                         * extractedFace = new Bitmap(extractedFace, new Size(100, 100));
                         *
                         * extractedFaces[faceNumber] = extractedFace;
                         * faceNumber++;  */
                    }
                    faceNumber            = 0;
                    pbExtractedFace.Image = extractedFaces[faceNumber];
                    if (extractedFaces.Length == 1)
                    {
                        btnNext.Enabled     = false;
                        btnPrevious.Enabled = false;
                    }

                    imageBox2.Image = Captured;
                }
            }
        }
예제 #9
0
        void FrameGrabber(object sender, EventArgs e)
        {
            label3.Text = "0";
            //label4.Text = "";
            NamePersons.Add("");


            // Obtener el dispositivo de captura de forma de marco actual.
            currentFrame = grabber.QueryFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);

            // Convertirlo en escala de grises.
            gray = currentFrame.Convert <Gray, Byte>();

            //Reconocimiento Facial
            MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(
                face,
                1.2,
                10,
                Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                new Size(20, 20));

            // Acción por cada elemento detectado.
            foreach (MCvAvgComp f in facesDetected[0])
            {
                t      = t + 1;
                result = currentFrame.Copy(f.rect).Convert <Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                //dibujar la cara detectada en el canal 0 (gris) con color azul
                currentFrame.Draw(f.rect, new Bgr(Color.Yellow), 2);


                if (trainingImages.ToArray().Length != 0)
                {
                    //TermCriteria para reconocimiento facial con números de imágenes entrenadas como maxIteration.
                    MCvTermCriteria termCrit = new MCvTermCriteria(ContTrain, 0.001);

                    //Reconocedor de cara Eigen.
                    EigenObjectRecognizer recognizer = new EigenObjectRecognizer(
                        trainingImages.ToArray(),
                        labels.ToArray(),
                        3000,
                        ref termCrit);

                    name = recognizer.Recognize(result);

                    //Dibujar la etiqueta para cada cara detectada y reconocida.
                    currentFrame.Draw(name, ref font, new Point(f.rect.X - 2, f.rect.Y - 2), new Bgr(Color.LightGreen));
                }

                NamePersons[t - 1] = name;
                NamePersons.Add("");


                //Establecer el número de caras detectadas en la escena.
                label3.Text = facesDetected[0].Length.ToString();

                /*
                 * //Establecer la región de interés en las caras.
                 *
                 * gray.ROI = f.rect;
                 * MCvAvgComp[][] eyesDetected = gray.DetectHaarCascade(
                 * eye,
                 * 1.1,
                 * 10,
                 * Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                 * new Size(20, 20));
                 * gray.ROI = Rectangle.Empty;
                 *
                 * foreach (MCvAvgComp ey in eyesDetected[0])
                 * {
                 *  Rectangle eyeRect = ey.rect;
                 *  eyeRect.Offset(f.rect.X, f.rect.Y);
                 *  currentFrame.Draw(eyeRect, new Bgr(Color.Blue), 2);
                 * }
                 */
            }
            t = 0;

            //Concatenación de nombres de personas reconocidas.
            for (int nnn = 0; nnn < facesDetected[0].Length; nnn++)
            {
                names = names + NamePersons[nnn] + ", ";
            }
            //Mostrar las caras procesadas y reconocidas.
            imageBoxFrameGrabber.Image = currentFrame;
            label4.Text = names;
            names       = "";
            //Borrar la lista (vector) de nombres.
            NamePersons.Clear();
        }
예제 #10
0
        /// <summary>
        /// 人脸识别与检测
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        private void FrameGrabber(object sender, EventArgs e)
        {
            label3.Text = "0";
            //label4.Text = "";
            NamePersons.Add("");

            //得到摄像头当前帧
            currentFrame = grabber.QueryFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);

            //转换格式
            gray = currentFrame.Convert <Gray, Byte>();

            // 人脸检测
            // MCvAvgComp[] facesDetected1 =  face.Detect(gray, 1.2, 10, HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new Size(20, 20), new Size(20, 20));
            MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(
                face,
                1.2,
                10,
                Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                new Size(20, 20));

            foundPeople.Clear();
            // 人脸识别
            foreach (MCvAvgComp f in facesDetected[0])
            {
                t      = t + 1;
                result = currentFrame.Copy(f.rect).Convert <Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                // 圈出头像
                currentFrame.Draw(f.rect, new Bgr(Color.Red), 2);

                if (trainingImages.ToArray().Length != 0)
                {
                    // TermCriteria for face recognition with numbers of trained images like maxIteration
                    MCvTermCriteria termCrit = new MCvTermCriteria(ContTrain, 0.001);

                    // Eigen face 识别器
                    EigenObjectRecognizer recognizer = new EigenObjectRecognizer(
                        trainingImages.ToArray(),
                        labels.ToArray(),
                        5000, // 改为2500或者3000会更准确
                        ref termCrit);

                    name = recognizer.Recognize(result);
                    foundPeople[name] = f.rect;

                    // 显示识别出的头像标签
                    //currentFrame.Draw(name, ref font, new Point(f.rect.X - 2, f.rect.Y - 2), new Bgr(Color.LightGreen));
                }

                NamePersons[t - 1] = name;
                NamePersons.Add("");

                // 显示探测出的头像数
                label3.Text = facesDetected[0].Length.ToString();
            }
            t = 0;

            // 头像标签字符串拼接
            for (int nnn = 0; nnn < facesDetected[0].Length; nnn++)
            {
                names = names + NamePersons[nnn] + ", ";
            }
            // 显示识别出的头像标签
            imageBoxFrameGrabber.Image = currentFrame;
            label4.Text = names;
            namess      = names;
            names       = "";
            // Clear the list(vector) of names
            NamePersons.Clear();
        }
예제 #11
0
        void FrameGrabber(object sender, EventArgs e)
        {
            label3.Text = "0";
            //label4.Text = "";
            NamePersons.Add("");


            //Get the current frame form capture device
            currentFrame = grabber.QueryFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);

            //Convert it to Grayscale
            gray = currentFrame.Convert <Gray, Byte>();

            //Face Detector
            MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(face, 1.2, 10, Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                                                                  new Size(20, 20));

            //Action for each element detected
            foreach (MCvAvgComp f in facesDetected[0])
            {
                t      = t + 1;
                result = currentFrame.Copy(f.rect).Convert <Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                //draw the face detected in the 0th (gray) channel with blue color
                currentFrame.Draw(f.rect, new Bgr(Color.Gray), 2);


                if (trainingImages.ToArray().Length != 0)
                {
                    //TermCriteria for face recognition with numbers of trained images like maxIteration
                    MCvTermCriteria termCrit = new MCvTermCriteria(ContTrain, 0.001);

                    //Eigen face recognizer
                    EigenObjectRecognizer recognizer = new EigenObjectRecognizer(
                        trainingImages.ToArray(),
                        labels.ToArray(),
                        3000,
                        ref termCrit);

                    name = recognizer.Recognize(result);
                    textBoxMatricNumber.Text = name;


                    //Draw the label for each face detected and recognized
                    currentFrame.Draw(name, ref font, new Point(f.rect.X - 2, f.rect.Y - 2), new Bgr(Color.Green));
                }

                NamePersons[t - 1] = name;
                NamePersons.Add("");

                if (name == textBoxMatricNumber.Text)
                {
                    AttendanceTable attendanceTable = new AttendanceTable();
                    string          date            = DateTime.Today.ToShortDateString();
                    string          matricNumber    = name;

                    if (attendanceTable.AlreadyTaken(date, matricNumber) == false)
                    {
                        attendanceTable.TakeAttendance(date, matricNumber);
                        labelAttendance.Text = "Last Attendance Taken: " +
                                               new StudentTable().GetStudentName(matricNumber);
                    }
                    else
                    {
                        MessageBox.Show("Attendance Already Taken");
                    }
                }
                else
                {
                    MessageBox.Show("User not found");
                }



                //Set the number of faces detected on the scene
                label3.Text = facesDetected[0].Length.ToString();

                /*
                 * //Set the region of interest on the faces
                 *
                 * gray.ROI = f.rect;
                 * MCvAvgComp[][] eyesDetected = gray.DetectHaarCascade(
                 * eye,
                 * 1.1,
                 * 10,
                 * Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                 * new Size(20, 20));
                 * gray.ROI = Rectangle.Empty;
                 *
                 * foreach (MCvAvgComp ey in eyesDetected[0])
                 * {
                 *  Rectangle eyeRect = ey.rect;
                 *  eyeRect.Offset(f.rect.X, f.rect.Y);
                 *  currentFrame.Draw(eyeRect, new Bgr(Color.Blue), 2);
                 * }
                 */
            }
            t = 0;

            //Names concatenation of persons recognized
            for (int nnn = 0; nnn < facesDetected[0].Length; nnn++)
            {
                names = names + NamePersons[nnn] + ", ";
                talk.SelectVoiceByHints(VoiceGender.Female);
                talk.Speak("Attendance Taken Successfully For " + names);
            }
            //Show the faces procesed and recognized
            imageBoxFrameGrabber.Image = currentFrame;
            label4.Text = names;
            names       = "";
            //Clear the list(vector) of names
            NamePersons.Clear();
        }
예제 #12
0
        void FrameGrabber(object sender, EventArgs e)
        {
            label3.Text = "0";
            //label4.Text = "";
            NamePersons.Add("");

            //Get the current frame form capture device
            // lấy khung hình capture, truyền vào thuật toán tích phân bật 3
            currentFrame = grabber.QueryFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);

            //chuyển sang khung gray
            gray = currentFrame.Convert <Gray, Byte>();

            //nhận diện khuôn mặt
            // DetectHaarCascade: xữ lý nhận diện khuôn mặt bằng khoảng cách: 2 mắt, mũi, ...
            MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(
                face,
                1.2,
                10,
                Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                new Size(20, 20));

            //Action for each element detected
            // facesDetected[]: lưu khuôn mặt đang có trên camera
            foreach (MCvAvgComp f in facesDetected[0])
            {
                t      = t + 1;
                result = currentFrame.Copy(f.rect).Convert <Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                //draw the face detected in the 0th (gray) channel with blue color
                currentFrame.Draw(f.rect, new Bgr(Color.Red), 2);
                if (trainingImages.ToArray().Length != 0)
                {
                    //TermCriteria for face recognition with numbers of trained images like maxIteration
                    MCvTermCriteria termCrit = new MCvTermCriteria(ContTrain, 0.001);
                    //Eigen face recognizer
                    EigenObjectRecognizer recognizer = new EigenObjectRecognizer(
                        trainingImages.ToArray(),
                        labels.ToArray(),
                        3000,
                        ref termCrit);

                    name = recognizer.Recognize(result);

                    //Draw the label for each face detected and recognized
                    currentFrame.Draw(name, ref font, new Point(f.rect.X - 2, f.rect.Y - 2), new Bgr(Color.LightGreen));
                }

                NamePersons[t - 1] = name;
                NamePersons.Add("");

                //Set the number of faces detected on the scene
                label3.Text = facesDetected[0].Length.ToString();
            }
            t = 0;
            //Names concatenation of persons recognized
            for (int nnn = 0; nnn < facesDetected[0].Length; nnn++)
            {
                names = names + NamePersons[nnn] + ", ";
            }
            //Show the faces procesed and recognized
            imageBoxFrameGrabber.Image = currentFrame;
            label4.Text = names;
            names       = "";
            name        = "";
            //Clear the list(vector) of names
            NamePersons.Clear();
        }
        void FrameGrabber(object sender, EventArgs e)
        {
            label3.Text = "0";
            //label4.Text = "";
            NamePersons.Add("");


            //Get the current frame form capture device
            currentFrame = grabber.QueryFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);

            //Convert it to Grayscale
            gray = currentFrame.Convert <Gray, Byte>();

            //Face Detector
            MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(
                face,
                1.1,
                5,
                Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                new Size(20, 20));

            //Action for each element detected
            foreach (MCvAvgComp f in facesDetected[0])
            {
                t      = t + 1;
                result = currentFrame.Copy(f.rect).Convert <Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                //draw the face detected in the 0th (gray) channel with blue color
                currentFrame.Draw(f.rect, new Bgr(Color.Red), 2);


                if (trainingImages.ToArray().Length != 0)
                {
                    //TermCriteria for face recognition with numbers of trained images like maxIteration
                    MCvTermCriteria termCrit = new MCvTermCriteria(ContTrain, 0.001);

                    //Eigen face recognizer
                    EigenObjectRecognizer recognizer = new EigenObjectRecognizer(
                        trainingImages.ToArray(),
                        labels.ToArray(),
                        0,
                        ref termCrit);

                    name = recognizer.Recognize(result);

                    //Draw the label for each face detected and recognized
                    currentFrame.Draw(name, ref font, new Point(f.rect.X - 2, f.rect.Y - 2), new Bgr(Color.LightGreen));
                }

                NamePersons[t - 1] = name;
                NamePersons.Add("");


                //Set the number of faces detected on the scene
                label3.Text = facesDetected[0].Length.ToString();

                //check if number of person >1 then show exeption "only one person at a time"
                if (facesDetected[0].Length > 1)
                {
                    MessageBox.Show("ERROR : Only One Person at a time !!");
                }
            }
            t = 0;

            //Names concatenation of persons recognized
            for (int nnn = 0; nnn < facesDetected[0].Length; nnn++)
            {
                //names = names + NamePersons[nnn] + ", ";
                names = names + NamePersons[nnn];
                nam   = names;
            }
            //Show the faces procesed and recognized
            imageBoxFrameGrabber.Image = currentFrame;
            label4.Text = names;
            names       = "";
            //Clear the list(vector) of names
            NamePersons.Clear();
        }
        void FrameGrabber(object sender, EventArgs e)
        {
            try
            {
                label3.Text = "0";
                //label4.Text = "";
                NamePersons.Add("");


                //Get the current frame form capture device
                currentFrame = grabber.QueryFrame().Resize(imageBoxFrameGrabber.Width, imageBoxFrameGrabber.Height, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);

                //currentFrame =  brightnessImage(currentFrame, fc.brightness);

                currentFrame += fc.brightness;
                currentFrame  = currentFrame.Mul(fc.contrast / 4);

                //currentFrame.SmoothMedian(5);

                if (fc.checkqual.Checked)
                {
                    currentFrame._EqualizeHist();
                }

                if (fc.checkRetinexSSR.Checked || fc.checkRetinexHSV.Checked || fc.checkRetinexColors.Checked)
                {
                    if (fc.checkRetinexSSR.Checked)
                    {
                        currentFrame = SingleScaleRetinex(currentFrame, fc.gaussianKernelSize, fc.sigmaGaussian, (int)fc.numericUpDown1.Value, 1);
                    }
                    if (fc.checkRetinexHSV.Checked)
                    {
                        currentFrame = SingleScaleRetinex(currentFrame, fc.gaussianKernelSize, fc.sigmaGaussian, (int)fc.numericUpDown1.Value, 2);
                    }
                    if (fc.checkRetinexColors.Checked)
                    {
                        currentFrame = SingleScaleRetinex(currentFrame, fc.gaussianKernelSize, fc.sigmaGaussian, (int)fc.numericUpDown1.Value, 3);
                    }
                }

                if (fc.checkGaussian.Checked)
                {
                    currentFrame._SmoothGaussian(5);
                }

                if (fc.checkMedian.Checked)
                {
                    currentFrame.SmoothMedian(5);
                }

                if (fc.checkBinariz.Checked)
                {
                    gray = currentFrame.Convert <Gray, Byte>();
                    gray._ThresholdBinary(new Gray(fc.MinthreasoldBinarisation), new Gray(fc.MaxthreasoldBinarisation));
                    CvInvoke.cvCvtColor(gray, currentFrame, COLOR_CONVERSION.CV_GRAY2BGR);
                    //currentFrame._ThresholdBinaryInv(new Bgr(fc.MinthreasoldBinarisation, fc.MinthreasoldBinarisation, fc.MinthreasoldBinarisation), new Bgr(fc.MaxthreasoldBinarisation, fc.MaxthreasoldBinarisation, fc.MaxthreasoldBinarisation));
                }

                if (fc.checkCanny.Checked)
                {
                    gray = currentFrame.Convert <Gray, Byte>();
                    //gray.Canny(new Gray(80), new Gray(160));
                    CvInvoke.cvCvtColor(gray.Canny(new Gray(fc.MinthreasoldCanny), new Gray(fc.MaxthreasoldCanny)), currentFrame, COLOR_CONVERSION.CV_GRAY2BGR);
                }

                if (fc.checkSobel.Checked)
                {
                    gray = currentFrame.Convert <Gray, Byte>();
                    //gray.Canny(new Gray(80), new Gray(160));
                    CvInvoke.cvCvtColor(gray.Canny(new Gray(fc.MinthreasoldCanny), new Gray(fc.MaxthreasoldCanny)), currentFrame, COLOR_CONVERSION.CV_GRAY2BGR); //.Add(gray.Sobel(1, 0, 3)).AbsDiff(new Gray(0.0))
                }



                //Convert it to Grayscale
                gray = currentFrame.Convert <Gray, Byte>();

                //Face Detector
                MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(
                    face,
                    1.2,
                    10,
                    Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                    new Size(20, 20));

                //Action for each element detected
                foreach (MCvAvgComp f in facesDetected[0])
                {
                    t      = t + 1;
                    result = currentFrame.Copy(f.rect).Convert <Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                    //draw the face detected in the 0th (gray) channel with blue color
                    currentFrame.Draw(f.rect, new Bgr(Color.DarkGray), 2);


                    if (trainingImages.ToArray().Length != 0)
                    {
                        //TermCriteria for face recognition with numbers of trained images like maxIteration
                        MCvTermCriteria termCrit = new MCvTermCriteria(ContTrain, 0.001);

                        //Eigen face recognizer
                        EigenObjectRecognizer recognizer = new EigenObjectRecognizer(
                            trainingImages.ToArray(),
                            labels.ToArray(),
                            3000,
                            ref termCrit);

                        name = recognizer.Recognize(result);

                        //Draw the label for each face detected and recognized
                        currentFrame.Draw(name, ref font, new Point(f.rect.X - 2, f.rect.Y - 2), new Bgr(Color.LightGreen));
                    }

                    NamePersons[t - 1] = name;
                    NamePersons.Add("");


                    //Set the number of faces detected on the scene
                    label3.Text = facesDetected[0].Length.ToString();
                }
                t = 0;

                //Names concatenation of persons recognized
                for (int nnn = 0; nnn < facesDetected[0].Length; nnn++)
                {
                    names = names + NamePersons[nnn] + ", ";
                }
                //Show the faces procesed and recognized
                imageBoxFrameGrabber.Image = currentFrame;

                label4.Text = names;

                if (names == "")
                {
                    names = "Noknow";
                }

                //textLogs.Clear();
                if (names != "")
                {
                    //DateTime date1 = new DateTime();
                    string s = DateTime.Now.ToString("dd MMMM yyyy | HH:mm:ss");

                    textLogs.AppendText(s + " Имя:" + names + "\n");
                }
                names = "";
                //Clear the list(vector) of names
                NamePersons.Clear();
            }
            catch (Exception ex) {
                Console.WriteLine(ex.Message);
            }
        }
예제 #15
0
        void FrameGrabber(object sender, EventArgs e)
        {
            label3.Text = "0";
            //label4.Text = "";
            NamePersons.Add("");


            //Pega o frame para captura
            currentFrame = grabber.QueryFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);

            //Converte em Grayscale
            gray = currentFrame.Convert <Gray, Byte>();

            //Face Detector
            MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(
                face,
                1.2,
                10,
                Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                new Size(20, 20));

            //Ação para cada elemnto detectado
            foreach (MCvAvgComp f in facesDetected[0])
            {
                t      = t + 1;
                result = currentFrame.Copy(f.rect).Convert <Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                //Desenha o quadrado na cara
                currentFrame.Draw(f.rect, new Bgr(Color.Red), 2);


                if (trainingImages.ToArray().Length != 0)
                {
                    //TermCriteria for face recognition with numbers of trained images like maxIteration
                    MCvTermCriteria termCrit = new MCvTermCriteria(ContTrain, 0.001);

                    //Eigen face recognizer
                    EigenObjectRecognizer recognizer = new EigenObjectRecognizer(
                        trainingImages.ToArray(),
                        labels.ToArray(),
                        3000,
                        ref termCrit);

                    name = recognizer.Recognize(result);

                    //Escreve o nome para cada face
                    currentFrame.Draw(name, ref font, new Point(f.rect.X - 2, f.rect.Y - 2), new Bgr(Color.LightGreen));
                }

                NamePersons[t - 1] = name;
                NamePersons.Add("");


                //Seta o numero de faces detectadas
                label3.Text = facesDetected[0].Length.ToString();

                /*
                 *
                 *  //Le a quantidade de pessoas e atribui a variavel pessoas
                 * string Pessoas = facesDetected[0].Length.ToString();
                 * if (Pessoas != "0")
                 * {
                 * Application.Idle -= FrameGrabber;
                 * grabber.Dispose();
                 * this.Hide();
                 * Painel painel = new Painel();
                 * painel.label4.Text = names;
                 * painel.Closed += (s, args) => this.Close();
                 * painel.Show();
                 * }
                 *
                 * //Set the region of interest on the faces
                 *
                 * gray.ROI = f.rect;
                 * MCvAvgComp[][] eyesDetected = gray.DetectHaarCascade(
                 * eye,
                 * 1.1,
                 * 10,
                 * Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                 * new Size(20, 20));
                 * gray.ROI = Rectangle.Empty;
                 *
                 * foreach (MCvAvgComp ey in eyesDetected[0])
                 * {
                 *  Rectangle eyeRect = ey.rect;
                 *  eyeRect.Offset(f.rect.X, f.rect.Y);
                 *  currentFrame.Draw(eyeRect, new Bgr(Color.Blue), 2);
                 * }
                 */
            }
            t = 0;

            //Concatena o nome das pessoas
            for (int nnn = 0; nnn < facesDetected[0].Length; nnn++)
            {
                names = names + NamePersons[nnn];
            }

            //Le a quantidade de pessoas e atribui a variavel pessoas
            string Pessoas = facesDetected[0].Length.ToString();

            if (Pessoas != "0")
            {
                Application.Idle -= FrameGrabber;
                grabber.Dispose();
                this.Hide();
                Painel painel = new Painel();

                //Faz a separação do nome pro nivel
                char[]   Separador  = { '|' };
                string   NomeENivel = names;
                string[] split      = NomeENivel.Split(Separador, StringSplitOptions.None);

                painel.label4.Text = split[0];
                painel.label3.Text = split[1];

                painel.Closed += (s, args) => this.Close();
                painel.Show();
            }
            //Show the faces procesed and recognized
            imageBoxFrameGrabber.Image = currentFrame;
            label4.Text = names;
            names       = "";
            //Clear the list(vector) of names
            NamePersons.Clear();
        }
예제 #16
0
        void FrameGrabber(object sender, EventArgs e)
        {
            try
            {
                label3.Text = "0";
                //label4.Text = "";
                NamePersons.Add("");

                //Get the current frame form capture device
                //currentFrame = grabber.QueryFrame().Rotate(90, new Bgr(255, 255, 255)).Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                currentFrame = grabber.QueryFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);

                //Convert it to Grayscale
                gray = currentFrame.Convert <Gray, Byte>();

                //Face Detector
                MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(face,
                                                                      1.2,
                                                                      10,
                                                                      Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                                                                      new Size(20, 20));

                //Action for each element detected
                foreach (MCvAvgComp f in facesDetected[0])
                {
                    t = t + 1;

                    result = currentFrame.Copy(f.rect).Convert <Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                    //draw the face detected in the 0th (gray) channel with blue color
                    currentFrame.Draw(f.rect, new Bgr(Color.Red), 2);


                    if (trainingImages.ToArray().Length != 0)
                    {
                        //TermCriteria for face recognition with numbers of trained images like maxIteration
                        MCvTermCriteria termCrit = new MCvTermCriteria(ContTrain, 0.001);

                        //Eigen face recognizer
                        EigenObjectRecognizer recognizer = new EigenObjectRecognizer(
                            trainingImages.ToArray(),
                            labels.ToArray(),
                            5000,
                            ref termCrit);

                        name = recognizer.Recognize(result);

                        // PONER ACÁ EXCERCICE SERVICE.


                        //Draw the label for each face detected and recognized
                        currentFrame.Draw(name, ref font, new Point(f.rect.X - 2, f.rect.Y - 2), new Bgr(Color.LightGreen));

                        _timesAppeared.Add(name);
                    }

                    NamePersons[t - 1] = name;
                    NamePersons.Add("");

                    //Set the number of faces detected on the scene
                    label3.Text = facesDetected[0].Length.ToString();

                    if (name != string.Empty)
                    {
                        var userId = int.Parse(name.Substring(0, name.IndexOf(".jpeg")));

                        _exerciseService.Start(userId);
                    }

                    /*
                     * //Set the region of interest on the faces
                     *
                     * gray.ROI = f.rect;
                     * MCvAvgComp[][] eyesDetected = gray.DetectHaarCascade(
                     *  eye,
                     *  1.1,
                     *  10,
                     *  Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                     *  new Size(20, 20));
                     * gray.ROI = Rectangle.Empty;
                     *
                     * foreach (MCvAvgComp ey in eyesDetected[0])
                     * {
                     *  Rectangle eyeRect = ey.rect;
                     *  eyeRect.Offset(f.rect.X, f.rect.Y);
                     *  currentFrame.Draw(eyeRect, new Bgr(Color.Blue), 2);
                     * }
                     */
                }
                t = 0;

                //Names concatenation of persons recognized
                for (int nnn = 0; nnn < facesDetected[0].Length; nnn++)
                {
                    names = names + NamePersons[nnn] + ", ";

                    //if (NamePersons[nnn] != string.Empty)
                    //{
                    //    var i = int.Parse(NamePersons[nnn].Substring(0, NamePersons[nnn].IndexOf(".jpeg")));

                    //    exerciseService.Start(i);
                    //    var index = exerciseService.Exercises.IndexOf(exerciseService.Exercises.Where(ee => ee.UserId == i).FirstOrDefault());

                    //    exerciseService.Exercises[index].LastCheckTicks = DateTime.Now.Ticks;
                    //}
                }
                //Show the faces procesed and recognized
                imageBoxFrameGrabber.Image = currentFrame;
                label4.Text = names;
                names       = "";
                //Clear the list(vector) of names
                NamePersons.Clear();
            } catch (Exception ex)
            {
                var distinct = _timesAppeared.GroupBy(test => test)
                               .Select(grp => grp)
                               .ToList();
            }
        }
        void FrameGrabber(object sender, EventArgs e)
        {
            label3.Text = "0";
            //label4.Text = "";
            NamePersons.Add("");


            // Obtener el dispositivo de captura de forma de marco actual.
            currentFrame = grabber.QueryFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);

            // Convertirlo en escala de grises.
            gray = currentFrame.Convert <Gray, Byte>();

            //Reconocimiento Facial
            MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(
                face,
                1.2,
                10,
                Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                new Size(20, 20));

            // Acción por cada elemento detectado.
            foreach (MCvAvgComp f in facesDetected[0])
            {
                t      = t + 1;
                result = currentFrame.Copy(f.rect).Convert <Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                //dibujar la cara detectada en el canal 0 (gris) con color azul
                currentFrame.Draw(f.rect, new Bgr(Color.DarkRed), 2);


                if (trainingImages.ToArray().Length != 0)
                {
                    //TermCriteria para reconocimiento facial con números de imágenes entrenadas como maxIteration.
                    MCvTermCriteria termCrit = new MCvTermCriteria(ContTrain, 0.001);

                    //Reconocedor de cara Eigen.
                    EigenObjectRecognizer recognizer = new EigenObjectRecognizer(
                        trainingImages.ToArray(),
                        labels.ToArray(),
                        3000,
                        ref termCrit);

                    name = recognizer.Recognize(result);

                    //Dibujar la etiqueta para cada cara detectada y reconocida.
                    currentFrame.Draw(name, ref font, new Point(f.rect.X - 2, f.rect.Y - 2), new Bgr(Color.DarkRed));
                }

                NamePersons[t - 1] = name;
                NamePersons.Add("");
                label3.Text = facesDetected[0].Length.ToString();
            }
            t = 0;
            for (int nnn = 0; nnn < facesDetected[0].Length; nnn++)
            {
                names = names + NamePersons[nnn] + ", ";
            }
            imageBoxFrameGrabber.Image = currentFrame;
            label4.Text = names;
            names       = "";
            NamePersons.Clear();
        }
예제 #18
0
        void FrameGrabber(object sender, EventArgs e)
        {
            label3.Text = "0";

            NamePersons.Add("");



            currentFrame = grabber.QueryFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);


            gray = currentFrame.Convert <Gray, Byte>();


            MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(
                face,
                1.2,
                10,
                Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                new Size(20, 20));


            foreach (MCvAvgComp f in facesDetected[0])
            {
                t      = t + 1;
                result = currentFrame.Copy(f.rect).Convert <Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);

                currentFrame.Draw(f.rect, new Bgr(Color.Red), 2);


                if (trainingImages.ToArray().Length != 0)
                {
                    MCvTermCriteria termCrit = new MCvTermCriteria(ContTrain, 0.001);


                    EigenObjectRecognizer recognizer = new EigenObjectRecognizer(
                        trainingImages.ToArray(),
                        labels.ToArray(),
                        3000,
                        ref termCrit);

                    name = recognizer.Recognize(result);


                    currentFrame.Draw(name, ref font, new Point(f.rect.X - 2, f.rect.Y - 2), new Bgr(Color.LightGreen));
                }

                NamePersons[t - 1] = name;
                NamePersons.Add("");



                label3.Text = facesDetected[0].Length.ToString();

                /*
                 * //Set the region of interest on the faces
                 *
                 * gray.ROI = f.rect;
                 * MCvAvgComp[][] eyesDetected = gray.DetectHaarCascade(
                 * eye,
                 * 1.1,
                 * 10,
                 * Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                 * new Size(20, 20));
                 * gray.ROI = Rectangle.Empty;
                 *
                 * foreach (MCvAvgComp ey in eyesDetected[0])
                 * {
                 *  Rectangle eyeRect = ey.rect;
                 *  eyeRect.Offset(f.rect.X, f.rect.Y);
                 *  currentFrame.Draw(eyeRect, new Bgr(Color.Blue), 2);
                 * }
                 */
            }
            t = 0;


            for (int nnn = 0; nnn < facesDetected[0].Length; nnn++)
            {
                names = names + NamePersons[nnn] + ", ";
            }

            imageBoxFrameGrabber.Image = currentFrame;
            label4.Text = names;

            names = "";

            NamePersons.Clear();
        }
예제 #19
0
        /// <summary>
        /// Functon to process captured frame and detection of face and also recognizing
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        private void FrameProcedure(object sender, EventArgs e)
        {
            try
            {
                Users.Add("");
                Frame    = camera.QueryFrame().Resize(480, 360, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                grayFace = Frame.Convert <Gray, byte>();


                /*
                 * Face and face feactures detection data, if you don't want to detect of face feacture comment the line.
                 * this portion is releted to Haarcascade
                 */

                MCvAvgComp[][] facesDetectedNow = grayFace.DetectHaarCascade(faceDetected, 1.2, 10, Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new Size(10, 10));
                //MCvAvgComp[][] eyesDetectedNow = grayFace.DetectHaarCascade(eyesDetected, 1.2, 10, Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new Size(5, 5));
                //MCvAvgComp[][] noseDetectedNow = grayFace.DetectHaarCascade(noseDetected, 1.2, 10, Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new Size(5, 5));
                //MCvAvgComp[][] mouthDetectedNow = grayFace.DetectHaarCascade(mouthDetected, 1.2, 10, Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new Size(5, 5));
                foreach (MCvAvgComp f in facesDetectedNow[0])
                {
                    result = Frame.Copy(f.rect).Convert <Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                    Frame.Draw(f.rect, new Bgr(Color.DarkViolet), 2);
                    if (trainingImages.ToArray().Length != 0)
                    {
                        MCvTermCriteria       termCriteries = new MCvTermCriteria(Count, 0.001);
                        EigenObjectRecognizer recognizer    = new EigenObjectRecognizer(trainingImages.ToArray(), labels.ToArray(), 1500, ref termCriteries);
                        name = recognizer.Recognize(result);
                        Frame.Draw(name, ref font, new Point(f.rect.X - 2, f.rect.Y - 2), new Bgr(Color.Green)); //drawing box around the detect face
                        if (name != "")
                        {
                            if (monitorMode)
                            {
                                imageBoxFaceDetected.Image = new Image <Gray, byte>(Application.StartupPath + $"/Faces/{name}.bmp");
                                Person person = DBConnecttion.getInstance().personInfo(name);
                                labelName.Text     = person.Name;
                                labelUserName.Text = name;
                                labelAge.Text      = person.Age;
                            }
                        }
                    }

                    Users.Add("");

                    /*
                     * foreach(MCvAvgComp j in eyesDetectedNow[0])
                     * {
                     *
                     *  Frame.Draw(j.rect, new Bgr(Color.Red), 2);
                     *
                     * }
                     */
                    /*
                     *
                     * foreach (MCvAvgComp j in noseDetectedNow[0])
                     * {
                     *
                     *  Frame.Draw(j.rect, new Bgr(Color.Red), 2);
                     *
                     * }
                     */



                    /*
                     * foreach (MCvAvgComp j in mouthDetectedNow[0])
                     * {
                     *  Frame.Draw(j.rect, new Bgr(Color.Red), 2);
                     *
                     * }
                     */
                }
                liveCameraView.Image = Frame;
                names = "";
                Users.Clear();
            }
            catch (Exception ex)
            {
                //MessageBox.Show(ex.Message);
            }
        }
예제 #20
0
            // Per camera frame routine method
            private static void FrameGrabber(object sender, EventArgs e)
            {
                if (!Started)
                {
                    return;
                }

                // Get current frame from camera
                currentFrame = grabber.QueryFrame().Resize(420, 340, INTER.CV_INTER_CUBIC);

                // Convert to grayscale
                gray = currentFrame.Convert <Gray, byte>();

                // Do haar cascade classifier on camera frame
                MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(
                    face,
                    1.2,
                    10,
                    HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                    new Size(20, 20));

                // Classify detected faces on frame
                currentDectectionCount = facesDetected[0].Length;
                foreach (MCvAvgComp f in facesDetected[0])
                {
                    // Crop face image area and convert to gray
                    result = currentFrame.Copy(f.rect).Convert <Gray, byte>().Resize(100, 100, INTER.CV_INTER_CUBIC);
                    // Draw red rectangle on face area
                    currentFrame.Draw(f.rect, new Bgr(Color.Red), 2);
                    //
                    if (trainingImages.ToArray().Length != 0)
                    {
                        // Eigen algorithm term criteria for recognizing face
                        MCvTermCriteria termCrit = new MCvTermCriteria(trainingImages.Count, 0.001);
                        // Create eigen recognizer object
                        EigenObjectRecognizer recognizer = new EigenObjectRecognizer(
                            trainingImages.ToArray(),
                            labels.ToArray(),
                            3000,
                            ref termCrit);

                        // Do recognize current face using eigen algorithm
                        currentDectection = recognizer.Recognize(result);

                        // Verbous diagnostic show name on recognizer
                        if (ShowLabel)
                        {
                            currentFrame.Draw(currentDectection, ref font, new Point(f.rect.X - 2, f.rect.Y - 2), new Bgr(Color.LightGreen));
                        }
                    }
                }

                // Update frame on UI
                imageBoxFrameGrabber.Image = currentFrame;

                // Update current recognizer state on UI
                if (currentDectectionCount == 1 && !string.IsNullOrEmpty(currentDectection))
                {
                    OnFaceRecognized?.Invoke(currentDectection);
                }
            }
 public void FrameGrabberRecognition(object sender, EventArgs e)
 {
     try
     {
         NamePersons.Add("");
         //Get the current frame form capture device
         currentFrameRecognition = grabberRecognition.QueryFrame().Resize(501, 407, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
         //Convert it to Grayscale
         gray = currentFrameRecognition.Convert <Gray, Byte>();
         //Face Detector
         MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(
             faceRecognition,
             1.2,
             2,
             Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
             new System.Drawing.Size(20, 20));
         //Action for each element detected
         foreach (MCvAvgComp f in facesDetected[0])
         {
             t      = t + 1;
             result = currentFrameRecognition.Copy(f.rect).Convert <Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
             //draw the face detected in the 0th (gray) channel with blue color
             currentFrameRecognition.Draw(f.rect, new Bgr(Color.Red), 2);
             //initialize result,t and gray if (trainingImages.ToArray().Length != 0)
             {
                 //term criteria against each image to find a match with it, perform different iterations
                 MCvTermCriteria termCrit = new MCvTermCriteria(contTrain, 0.001);
                 //call class by creating object and pass parameters
                 EigenObjectRecognizer recognizer = new EigenObjectRecognizer(
                     trainingImages.ToArray(),
                     labelsRecognition.ToArray(),
                     3000,
                     ref termCrit);
                 //next step is to name find for recognize face
                 name = recognizer.Recognize(result);
                 //now show recognized person name so
                 currentFrameRecognition.Draw(name, ref font, new System.Drawing.Point(f.rect.X - 2, f.rect.Y - 2), new Bgr(Color.LightGreen));//initalize font for the name captured
             }
             if (!FacesAlreadyDetected.Contains(name))
             {
                 SaveToDatabase(name, DateTime.Now);
                 FacesAlreadyDetected.Add(name);
             }
             NamePersons[t - 1] = name;
             NamePersons.Add("");
         }
         t = 0;
         //Names concatenation of persons recognized
         for (int nnn = 0; nnn < facesDetected[0].Length; nnn++)
         {
             names = names + NamePersons[nnn] + ", ";
             //System.Windows.MessageBox.Show(NamePersons[nnn]);
             string test = NamePersons[nnn] + ",";
         }
         //load haarclassifier and previous saved images to find matches
         imgBox_Detected.Image    = currentFrameRecognition;
         TBx_DetectedStudent.Text = names;
         names = "";
         NamePersons.Clear();
     }
     catch (Exception ex)
     {
         MessageBox.Show("Press Ok to Continue");
     }
 }
예제 #22
0
        /// <summary>
        /// Loads the traing data given a (string) folder location
        /// </summary>
        /// <param name="Folder_location"></param>
        /// <returns></returns>
        private bool LoadTrainingData(string Folder_location)
        {
            if (File.Exists(Folder_location + "\\" + XmlVeriDosyasi))
            {
                try
                {
                    //message_bar.Text = "";
                    Names_List.Clear();
                    trainingImages.Clear();
                    FileStream filestream = File.OpenRead(Folder_location + "\\" + XmlVeriDosyasi);
                    long       filelength = filestream.Length;
                    byte[]     xmlBytes   = new byte[filelength];
                    filestream.Read(xmlBytes, 0, (int)filelength);
                    filestream.Close();

                    MemoryStream xmlStream = new MemoryStream(xmlBytes);

                    using (XmlReader xmlreader = XmlTextReader.Create(xmlStream))
                    {
                        while (xmlreader.Read())
                        {
                            if (xmlreader.IsStartElement())
                            {
                                switch (xmlreader.Name)
                                {
                                case "NAME":
                                    if (xmlreader.Read())
                                    {
                                        Names_List.Add(xmlreader.Value.Trim());
                                        NumLabels += 1;
                                    }
                                    break;

                                case "FILE":
                                    if (xmlreader.Read())
                                    {
                                        //PROBLEM HERE IF TRAININGG MOVED
                                        trainingImages.Add(new Image <Gray, byte>(Dizin + "\\" + xmlreader.Value.Trim()));
                                    }
                                    break;
                                }
                            }
                        }
                    }
                    ContTrain = NumLabels;

                    if (trainingImages.ToArray().Length != 0)
                    {
                        //Eigen face recognizer
                        recognizer = new EigenObjectRecognizer(trainingImages.ToArray(),
                                                               Names_List.ToArray(), 5000, ref termCrit); //5000 default
                        return(true);
                    }
                    else
                    {
                        return(false);
                    }
                }
                catch (Exception ex)
                {
                    Error = ex.ToString();
                    return(false);
                }
            }
            else
            {
                return(false);
            }
        }
예제 #23
0
        private void FrameProcedure(object sender, EventArgs e)
        {
            Users.Add("");
            Frame    = camera.QueryFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
            grayFace = Frame.Convert <Gray, Byte>();
            // MCvAvgComp[][] facesDetectedNow = grayFace.DetectHaarCascade(faceDetected, 1.2, 10,
            //    Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new Size(20, 20));

            MCvAvgComp[][] facesDetectedNow = grayFace.DetectHaarCascade(
                faceDetected,
                1.1,
                10,
                Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                new Size(20, 20));


            foreach (MCvAvgComp f in facesDetectedNow[0])
            {
                result = Frame.Copy(f.rect).Convert <Gray, Byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                Frame.Draw(f.rect, new Bgr(Color.Green), 2);

                if (trainningImages.ToArray().Length != 0)
                {
                    MCvTermCriteria       termCriteria = new MCvTermCriteria(count, 0.001);
                    EigenObjectRecognizer recognizer   = new EigenObjectRecognizer(trainningImages.ToArray(), labels.ToArray(),
                                                                                   2500, ref termCriteria);
                    name = recognizer.Recognize(result);
                    if (name.Length != 0)
                    {
                        Frame.Draw(name, ref font, new Point(f.rect.X - 2, f.rect.Y - 2), new Bgr(Color.Green));
                    }
                    else
                    {
                        Frame.Draw("Unknown", ref font, new Point(f.rect.X - 2, f.rect.Y - 2), new Bgr(Color.Green));

                        // code สำหรับใช้จับหน้าเป็นชุดโดยผมตั้งไว้ให้เก็บหน้าไว้แค่ 20 ภาพ ต่อ ครั้ง
                        if (capture_mode == true && capture_count < 20)
                        {
                            count    = count + 1;
                            grayFace = camera.QueryGrayFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);

                            MCvAvgComp[][] detectedFaces = grayFace.DetectHaarCascade(
                                faceDetected,
                                1.2,
                                10,
                                Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                                new Size(40, 40));


                            trainedFace = result.Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                            trainningImages.Add(trainedFace);
                            labels.Add(tbxName.Text);
                            File.WriteAllText(Application.StartupPath + "/Faces/Faces.txt", trainningImages.ToArray().Length.ToString() + ",");
                            capture_count++;
                            //lblFace.Text = "# of faces : " + capture_count;
                            for (int i = 1; i < trainningImages.ToArray().Length + 1; i++)
                            {
                                trainningImages.ToArray()[i - 1].Save(Application.StartupPath + "/Faces/face" + i + ".bmp");
                                File.AppendAllText(Application.StartupPath + "/Faces/Faces.txt", labels.ToArray()[i - 1] + ",");
                            }
                        }
                        else
                        {
                            capture_count = 0;
                            capture_mode  = false;
                            //btnCapture.Text = "Start";
                            //lblFace.Text = "# of faces : 0";
                        }
                    }
                }
                else
                {
                    Frame.Draw("Unknown", ref font, new Point(f.rect.X - 2, f.rect.Y - 2), new Bgr(Color.Green));

                    // code สำหรับใช้จับหน้าเป็นชุดโดยผมตั้งไว้ให้เก็บหน้าไว้แค่ 20 ภาพ ต่อ ครั้ง
                    if (capture_mode == true && capture_count < 20)
                    {
                        count    = count + 1;
                        grayFace = camera.QueryGrayFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                        //MCvAvgComp[][] detectedFaces = grayFace.DetectHaarCascade(faceDetected, 1.2, 10,
                        //  Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new Size(20, 20));

                        MCvAvgComp[][] detectedFaces = grayFace.DetectHaarCascade(
                            faceDetected,
                            1.2,
                            10,
                            Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                            new Size(40, 40));


                        trainedFace = result.Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                        trainningImages.Add(trainedFace);
                        labels.Add(tbxName.Text);
                        File.WriteAllText(Application.StartupPath + "/Faces/Faces.txt", trainningImages.ToArray().Length.ToString() + ",");
                        capture_count++;
                        //lblFace.Text = "# of faces : " + capture_count;
                        for (int i = 1; i < trainningImages.ToArray().Length + 1; i++)
                        {
                            trainningImages.ToArray()[i - 1].Save(Application.StartupPath + "/Faces/face" + i + ".bmp");
                            File.AppendAllText(Application.StartupPath + "/Faces/Faces.txt", labels.ToArray()[i - 1] + ",");
                        }
                    }
                    else
                    {
                        capture_count = 0;
                        capture_mode  = false;
                        //btnCapture.Text = "Start";
                        //lblFace.Text = "# of faces : 0";
                    }
                }

                //Users[t - 1] = name;
                Users.Add("");
            }

            cameraBox.Image = Frame;
            names           = "";
            Users.Clear();


            //Frame.Draw(font.rect, new Bgr(Color.Green), 3);
        }
        private void FrameGrabber(object sender, EventArgs e)
        {
            //enquanto nao é detectado o rosto dou o valor de 0
            lblNumeroDetect.Text = "0";
            //mesmo processo em nome das pessoas
            NamePersons.Add("");


            //vamos tratar o processo abaixo com try
            try
            {
                currentFrame = grabber.QueryFrame().Resize(400, 300, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                currentFrame._Flip(FLIP.HORIZONTAL);
                //Converte a escala para cinza
                gray = currentFrame.Convert <Gray, Byte>();

                //Aqui vai detectar os rostos no frame
                MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(face, 1.2, 10, Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new Size(20, 20));

                //ação para o elemento detectado
                foreach (MCvAvgComp f in facesDetected[0])
                {
                    t      = t + 1;
                    result = currentFrame.Copy(f.rect).Convert <Gray, byte>().Resize(100, 100, INTER.CV_INTER_CUBIC);
                    //Mostrar o quadro nos rostos ou no rosto e a propriedade de cor das bordas
                    currentFrame.Draw(f.rect, new Bgr(Color.Aquamarine), 1);

                    if (trainingImages.ToArray().Length != 0)
                    {
                        //Reconhecer o numero de rostos na imagem
                        MCvTermCriteria termCrit = new MCvTermCriteria(ContTrain, 0.001);

                        //reconhecer o rosto
                        EigenObjectRecognizer recognizer = new EigenObjectRecognizer(trainingImages.ToArray(), labels.ToArray(), ref termCrit);
                        var fa = new Image <Gray, byte> [trainingImages.Count];

                        name = recognizer.Recognize(result);

                        //Mostrar o nome em base de dados de cada rosto
                        currentFrame.Draw(name, ref font, new Point(f.rect.X - 2, f.rect.Y - 2), new Bgr(Color.Black));
                    }

                    NamePersons[t - 1] = name;
                    NamePersons.Add("");

                    //Establecer quantidade de rostos
                    lblNumeroDetect.Text = facesDetected[0].Length.ToString();
                    label1.Text          = name;
                }
                t = 0;

                //Nomes concatenados
                for (int nnn = 0; nnn < facesDetected[0].Length; nnn++)
                {
                    lblNadie = names + NamePersons[nnn] + ", ";
                }

                //rostos processados e identificados
                imageBoxFrameGrabber.Image = currentFrame;
                name = "";

                //limpar a lista dos nomes
                NamePersons.Clear();
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message);
            }
        }
예제 #25
0
        private void FrameProcedure2(object sender, EventArgs e)
        {
            Users.Add("");
            Frame    = camera.QueryFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
            grayFace = Frame.Convert <Gray, Byte>();
            // MCvAvgComp[][] facesDetectedNow = grayFace.DetectHaarCascade(faceDetected, 1.2, 10,
            //    Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new Size(20, 20));

            MCvAvgComp[][] facesDetectedNow = grayFace.DetectHaarCascade(
                faceDetected,
                1.1,
                10,
                Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                new Size(20, 20));


            foreach (MCvAvgComp f in facesDetectedNow[0])
            {
                result = Frame.Copy(f.rect).Convert <Gray, Byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                Frame.Draw(f.rect, new Bgr(Color.Green), 2);

                if (trainningImages.ToArray().Length != 0)
                {
                    MCvTermCriteria       termCriteria = new MCvTermCriteria(count, 0.001);
                    EigenObjectRecognizer recognizer   = new EigenObjectRecognizer(trainningImages.ToArray(), labels.ToArray(),
                                                                                   2500, ref termCriteria);
                    name = recognizer.Recognize(result);
                    if (name.Length != 0)
                    {
                        Frame.Draw(name, ref font, new Point(f.rect.X - 2, f.rect.Y - 2), new Bgr(Color.Green));

                        EmpID++;
                        //ใส่ตาราง
                        //ตั้งเวลาไว้ เมื่อ EmpID นับได้ถึง 20
                        if (EmpID == 20)
                        {
                            table.Rows.Add(int.Parse(name), DateTime.Now.ToString("HH:mm:ss"));
                            dataGridView1.DataSource = table;

                            //ส่งข้อมูลไปยังDB
                            string        sqlDataSource = "Data Source=LAPTOP-3KQ0AE11\\MSSQLSERVER01;Initial Catalog=GUYs;Integrated Security=True;";
                            SqlDataReader myReader;
                            using (SqlConnection con = new SqlConnection(sqlDataSource))
                            {
                                for (int i = 0; i < dataGridView1.Rows.Count - 2; i++)
                                {
                                    using (SqlCommand cmd = new SqlCommand(@"INSERT INTO dbo.UserCheck VALUES ( '" + dataGridView1.Rows[i].Cells[0].Value + "', CAST(GETDATE() AS DATE), '" + dataGridView1.Rows[i].Cells[1].Value + "','');", con))
                                    {
                                        con.Open();
                                        myReader = cmd.ExecuteReader();
                                        table.Load(myReader);
                                        myReader.Close();
                                        con.Close();
                                    }
                                }
                            }
                            EmpID = 0;
                        }
                    }
                    else
                    {
                        Frame.Draw("Unknown", ref font, new Point(f.rect.X - 2, f.rect.Y - 2), new Bgr(Color.Green));

                        // code สำหรับใช้จับหน้าเป็นชุดโดยผมตั้งไว้ให้เก็บหน้าไว้แค่ 20 ภาพ ต่อ ครั้ง
                        if (capture_mode == true && capture_count < 20)
                        {
                            count    = count + 1;
                            grayFace = camera.QueryGrayFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);

                            MCvAvgComp[][] detectedFaces = grayFace.DetectHaarCascade(
                                faceDetected,
                                1.2,
                                10,
                                Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                                new Size(40, 40));


                            trainedFace = result.Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                            trainningImages.Add(trainedFace);
                            labels.Add(tbxName.Text);
                            File.WriteAllText(Application.StartupPath + "/Faces/Faces.txt", trainningImages.ToArray().Length.ToString() + ",");
                            capture_count++;
                            //lblFace.Text = "# of faces : " + capture_count;
                            for (int i = 1; i < trainningImages.ToArray().Length + 1; i++)
                            {
                                trainningImages.ToArray()[i - 1].Save(Application.StartupPath + "/Faces/face" + i + ".bmp");
                                File.AppendAllText(Application.StartupPath + "/Faces/Faces.txt", labels.ToArray()[i - 1] + ",");
                            }
                        }
                        else
                        {
                            capture_count = 0;
                            capture_mode  = false;
                            //btnCapture.Text = "Start";
                            //lblFace.Text = "# of faces : 0";
                        }
                    }
                }
                else
                {
                    Frame.Draw("Unknown", ref font, new Point(f.rect.X - 2, f.rect.Y - 2), new Bgr(Color.Green));

                    // code สำหรับใช้จับหน้าเป็นชุดโดยผมตั้งไว้ให้เก็บหน้าไว้แค่ 20 ภาพ ต่อ ครั้ง
                    if (capture_mode == true && capture_count < 20)
                    {
                        count    = count + 1;
                        grayFace = camera.QueryGrayFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                        //MCvAvgComp[][] detectedFaces = grayFace.DetectHaarCascade(faceDetected, 1.2, 10,
                        //  Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new Size(20, 20));

                        MCvAvgComp[][] detectedFaces = grayFace.DetectHaarCascade(
                            faceDetected,
                            1.2,
                            10,
                            Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                            new Size(40, 40));


                        trainedFace = result.Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                        trainningImages.Add(trainedFace);
                        labels.Add(tbxName.Text);
                        File.WriteAllText(Application.StartupPath + "/Faces/Faces.txt", trainningImages.ToArray().Length.ToString() + ",");
                        capture_count++;
                        //lblFace.Text = "# of faces : " + capture_count;
                        for (int i = 1; i < trainningImages.ToArray().Length + 1; i++)
                        {
                            trainningImages.ToArray()[i - 1].Save(Application.StartupPath + "/Faces/face" + i + ".bmp");
                            File.AppendAllText(Application.StartupPath + "/Faces/Faces.txt", labels.ToArray()[i - 1] + ",");
                        }
                    }
                    else
                    {
                        capture_count = 0;
                        capture_mode  = false;
                        //btnCapture.Text = "Start";
                        //lblFace.Text = "# of faces : 0";
                    }
                }

                //Users[t - 1] = name;
                Users.Add("");
            }

            cameraBox.Image = Frame;
            names           = "";
            Users.Clear();


            //Frame.Draw(font.rect, new Bgr(Color.Green), 3);
        }
예제 #26
0
        void FrameGrabber(object sender, EventArgs e)
        {
            label3.Text = "0";
            NamePersons.Add("");

            //Get the current frame form capture device
            if (strPicVid == "Picture")
            {
                currentFrame = new Image <Bgr, Byte>(strBrowseFileName).Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
            }
            if (strPicVid == "Video")
            {
                currentFrame = grabber.QueryFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
            }

            //Convert it to Grayscale
            gray = currentFrame.Convert <Gray, Byte>();

            if (blnDetection == true)
            {
                //Face Detector
                MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(
                    face,
                    1.2,
                    10,
                    Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                    new Size(20, 20));

                //Action for each element detected
                foreach (MCvAvgComp f in facesDetected[0])
                {
                    t      = t + 1;
                    result = currentFrame.Copy(f.rect).Convert <Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                    //draw the face detected in the 0th (gray) channel with blue color
                    currentFrame.Draw(f.rect, new Bgr(Color.Red), 2);

                    //This gets the face so it can be added to the db
                    if (blnRecSetting == false)
                    {
                        if (blnSelect == true)
                        {
                            imageBox1.Image = imageBox1.Image;
                            //this stops the FrameGrabber so that detection can end cleanly until the face is added to the db
                            //or continue is selected
                            Application.Idle -= FrameGrabber;
                            if (strPicVid == "Video")
                            {
                                grabber.Dispose();
                            }
                        }
                        else
                        {
                            imageBox1.Image = result;
                        }
                    }
                    else
                    {
                        imageBox1.Image = null;
                    }

                    if (blnRecognition == true)
                    {
                        if (trainingImages.ToArray().Length != 0)
                        {
                            //TermCriteria for face recognition with numbers of trained images like maxIteration
                            MCvTermCriteria termCrit = new MCvTermCriteria(ContTrain, 0.001);
                            //Eigen face recognizer
                            EigenObjectRecognizer recognizer = new EigenObjectRecognizer(
                                trainingImages.ToArray(),
                                labels.ToArray(),
                                3000,
                                ref termCrit);
                            name = recognizer.Recognize(result);
                            //Draw the label for each face detected and recognized
                            currentFrame.Draw(name, ref font, new Point(f.rect.X - 2, f.rect.Y - 2), new Bgr(Color.LightGreen));
                        }
                        NamePersons[t - 1] = name;
                        NamePersons.Add("");
                    }

                    //Set the number of faces detected on the scene
                    label3.Text = facesDetected[0].Length.ToString();
                }
                t = 0;

                //Names concatenation of persons recognized
                for (int nnn = 0; nnn < facesDetected[0].Length; nnn++)
                {
                    names = names + NamePersons[nnn] + ", ";
                }
            }
            //Show the faces procesed and recognized
            imageBoxFrameGrabber.Image = currentFrame;
            label4.Text = names;
            names       = "";
            //Clear the list(vector) of names
            NamePersons.Clear();
        }
        void FrameGrabber(object sender, EventArgs e)
        {
            //label4.Text = "";
            NamePersons.Add("");


            //Get the current frame form capture device
            currentFrame = grabber.QueryFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);

            //Convert it to Grayscale
            gray = currentFrame.Convert<Gray, Byte>();

            //Face Detector
            MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(
          face,
          1.2,
          10,
          Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
          new Size(20, 20));

            //Action for each element detected
            foreach (MCvAvgComp f in facesDetected[0])
            {
                t = t + 1;
                result = currentFrame.Copy(f.rect).Convert<Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                //draw the face detected in the 0th (gray) channel with blue color
                currentFrame.Draw(f.rect, new Bgr(Color.Red), 2);


                if (trainingImages.ToArray().Length != 0)
                {
                    //TermCriteria for face recognition with numbers of trained images like maxIteration
                    MCvTermCriteria termCrit = new MCvTermCriteria(ContTrain, 0.001);

                    //Eigen face recognizer
                    EigenObjectRecognizer recognizer = new EigenObjectRecognizer(
                       trainingImages.ToArray(),
                       labels.ToArray(),
                       3000,
                       ref termCrit);

                    name = recognizer.Recognize(result);
                    //Draw the label for each face detected and recognized
                    currentFrame.Draw(name, ref font, new Point(f.rect.X - 2, f.rect.Y - 2), new Bgr(Color.LightGreen));
                }

                NamePersons[t - 1] = name;
                NamePersons.Add("");
                if (!name.Equals(""))
                {
                    b += 1;
                    a = true;
                }
            }
            t = 0;
            
            //Names concatenation of persons recognized
            for (int nnn = 0; nnn < facesDetected[0].Length; nnn++)
            {
            }
            //Show the faces procesed and recognized
            imageBoxFrameGrabber.Image = currentFrame;
            names = "";
            //Clear the list(vector) of names
            NamePersons.Clear();
            if (a == true&&b==5) {
                File.WriteAllText(@"C:\Users\kiat\Desktop\abc2.txt", "True");
                System.Windows.Forms.Application.Exit();
            }
         
        }
        public void FrameGrabber(object sender, EventArgs e)
        {
            if (checkBox2.Checked)
            {
                eyesfocus = true;
            }
            else
            {
                eyesfocus = false;
            }
            label3.Text = "0";
            label4.Text = "";

            NamePersons.Add("Khong x.dinh");
            NamePersonsEyes.Add("Khong x.dinh");



            //Get the current frame form capture device
            //currentFrame = grabber.QueryFrame().Resize(550, 450, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);

            currentFrame = grabber.QueryFrame().Resize(600, 400, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
            if (label11.Text != "00" || label11.Text != "1" || label11.Text != "-1")
            {
                currentFrame = new Image <Bgr, Byte>(AdjustBrightness(currentFrame.ToBitmap(), bri));
            }

            //Convert it to Grayscale
            gray = currentFrame.Convert <Gray, Byte>();
            MCvAvgComp[][] facesDetected;
            //Face Detector
            if (checkBox1.Checked)
            {
                facesDetected = gray.DetectHaarCascade(
                    face,
                    1.1,
                    10,
                    Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                    new Size(20, 20));
            }
            else
            {
                facesDetected = gray.DetectHaarCascade(
                    face,
                    1.1,
                    10,
                    Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.FIND_BIGGEST_OBJECT,
                    new Size(20, 20));
            }

            //Action for each element detected
            foreach (MCvAvgComp f in facesDetected[0])
            {
                imageBox3.Image = currentFrame.Copy(f.rect).Convert <Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                Int32     yCoordStartSearchEyes   = f.rect.Top + (f.rect.Height * 3 / 11);
                Point     startingPointSearchEyes = new Point(f.rect.X, yCoordStartSearchEyes);
                Size      searchEyesAreaSize      = new Size(f.rect.Width, (f.rect.Height * 3 / 11));
                Rectangle possibleROI_eyes        = new Rectangle(startingPointSearchEyes, searchEyesAreaSize);
                t = t + 1;
                tEyes++;
                result = currentFrame.Copy(f.rect).Convert <Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);

                resultEyes = currentFrame.Copy(possibleROI_eyes).Convert <Gray, byte>().Resize(100, 30, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                //draw the face detected in the 0th (gray) channel with blue color
                currentFrame.Draw(f.rect, new Bgr(Color.Yellow), 1);
                currentFrame.Draw(new CircleF(new PointF(f.rect.X, f.rect.Y), 1), new Bgr(Color.Yellow), 10);
                currentFrame.Draw(new LineSegment2D(new Point(f.rect.X, f.rect.Y), new Point(f.rect.X - 80, f.rect.Y)), new Bgr(Color.Yellow), 2);
                currentFrame.Draw(new LineSegment2D(new Point(f.rect.X - 80, f.rect.Y), new Point(f.rect.X - 80, f.rect.Y - 30)), new Bgr(Color.Yellow), 1);
                currentFrame.Draw(new LineSegment2D(new Point(f.rect.X - 80, f.rect.Y - 30), new Point(f.rect.X - 150, f.rect.Y - 30)), new Bgr(Color.Yellow), 1);
                currentFrame.Draw(new LineSegment2D(new Point(f.rect.X - 80, f.rect.Y - 30), new Point(f.rect.X - 70, f.rect.Y - 30)), new Bgr(Color.Yellow), 1);

                currentFrame.Draw(possibleROI_eyes, new Bgr(Color.DeepPink), 1);
                currentFrame.Draw(new CircleF(new PointF(possibleROI_eyes.X, possibleROI_eyes.Y + possibleROI_eyes.Height), 1), new Bgr(Color.DeepPink), 5);
                currentFrame.Draw(new LineSegment2D(new Point(possibleROI_eyes.X, possibleROI_eyes.Y + possibleROI_eyes.Height), new Point(possibleROI_eyes.X - 40, possibleROI_eyes.Y + possibleROI_eyes.Height)), new Bgr(Color.DeepPink), 1);
                currentFrame.Draw(new LineSegment2D(new Point(possibleROI_eyes.X - 40, possibleROI_eyes.Y + possibleROI_eyes.Height), new Point(possibleROI_eyes.X - 40, possibleROI_eyes.Y + possibleROI_eyes.Height + 120)), new Bgr(Color.DeepPink), 1);
                currentFrame.Draw(new LineSegment2D(new Point(possibleROI_eyes.X - 40, possibleROI_eyes.Y + possibleROI_eyes.Height + 120), new Point(possibleROI_eyes.X, possibleROI_eyes.Y + possibleROI_eyes.Height + 120)), new Bgr(Color.DeepPink), 1);
                currentFrame.Draw(new LineSegment2D(new Point(possibleROI_eyes.X - 40, possibleROI_eyes.Y + possibleROI_eyes.Height + 120), new Point(possibleROI_eyes.X - 80, possibleROI_eyes.Y + possibleROI_eyes.Height + 120)), new Bgr(Color.DeepPink), 1);
                currentFrame.Draw("Eyes Area", ref font, new Point(possibleROI_eyes.X - 80, possibleROI_eyes.Y + possibleROI_eyes.Height + 130), new Bgr(Color.DeepPink));
                if (trainingImages.ToArray().Length != 0)
                {
                    //TermCriteria for face recognition with numbers of trained images like maxIteration
                    MCvTermCriteria termCrit = new MCvTermCriteria(ContTrain, 0.001);

                    //Eigen face recognizer
                    EigenObjectRecognizer recognizer = new EigenObjectRecognizer(
                        trainingImages.ToArray(),
                        labels.ToArray(),
                        3000,
                        ref termCrit);

                    name = recognizer.Recognize(result);

                    //Draw the label for each face detected and recognized
                    if (trainingImagesEyes.ToArray().Length != 0)
                    {
                        MCvTermCriteria termCritEyes = new MCvTermCriteria(ContTrainEyes, 0.001);

                        //Eigen face recognizer
                        EigenObjectRecognizer recognizerEyes = new EigenObjectRecognizer(
                            trainingImagesEyes.ToArray(),
                            labelsEyes.ToArray(),
                            3000,
                            ref termCritEyes);
                        nameEyes = recognizerEyes.Recognize(resultEyes);
                        if (name == nameEyes)
                        {
                            currentFrame.Draw(nameEyes, ref font, new Point(f.rect.X - 150, f.rect.Y - 35), new Bgr(Color.Red));
                        }
                        else
                        {
                            currentFrame.Draw("noname", ref font, new Point(f.rect.X - 150, f.rect.Y - 35), new Bgr(Color.Red));
                        }
                    }
                }
                else
                {
                    currentFrame.Draw("noname", ref font, new Point(f.rect.X - 150, f.rect.Y - 35), new Bgr(Color.Red));
                }

                NamePersons[t - 1]     = name;
                NamePersonsEyes[t - 1] = nameEyes;
                NamePersons.Add("");
                NamePersonsEyes.Add("");


                //Set the number of faces detected on the scene
                label3.Text = facesDetected[0].Length.ToString();


                //Set the region of interest on the faces

                gray.ROI = f.rect;
                MCvAvgComp[][] eyesDetected = gray.DetectHaarCascade(
                    eye,
                    1.1,
                    10,
                    Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                    new Size(20, 20));
                gray.ROI = Rectangle.Empty;

                foreach (MCvAvgComp ey in eyesDetected[0])
                {
                    Rectangle eyeRect = ey.rect;
                    eyeRect.Offset(f.rect.X, f.rect.Y);


                    //var Arrayeyes = eyesDetected[0];
                    // Arrayeyes[0].rect.X
                    if (eyesfocus)
                    {
                        currentFrame.Draw(eyeRect, new Bgr(Color.Blue), 1);
                        if (eyeRect.X < f.rect.X + (f.rect.Width / 2))
                        {
                            currentFrame.Draw(new CircleF(new PointF(eyeRect.X, eyeRect.Y), 1), new Bgr(Color.Blue), 5);
                            currentFrame.Draw(new LineSegment2D(new Point(eyeRect.X, eyeRect.Y), new Point(eyeRect.X - 100, eyeRect.Y)), new Bgr(Color.Blue), 1);
                            currentFrame.Draw(new LineSegment2D(new Point(eyeRect.X - 100, eyeRect.Y), new Point(eyeRect.X - 100, eyeRect.Y + 15)), new Bgr(Color.Blue), 1);
                            currentFrame.Draw(new LineSegment2D(new Point(eyeRect.X - 100, eyeRect.Y + 15), new Point(eyeRect.X - 170, eyeRect.Y + 15)), new Bgr(Color.Blue), 1);
                            currentFrame.Draw("right eye", ref font, new Point(eyeRect.X - 185, eyeRect.Y + 10), new Bgr(Color.Blue));
                        }

                        else
                        {
                            currentFrame.Draw(new CircleF(new PointF(eyeRect.X + eyeRect.Width, eyeRect.Y), 1), new Bgr(Color.Blue), 5);
                            currentFrame.Draw(new LineSegment2D(new Point(eyeRect.X + eyeRect.Width, eyeRect.Y), new Point(eyeRect.X + eyeRect.Width, eyeRect.Y - 100)), new Bgr(Color.Blue), 1);
                            currentFrame.Draw(new LineSegment2D(new Point(eyeRect.X + eyeRect.Width, eyeRect.Y - 100), new Point(eyeRect.X + eyeRect.Width + 40, eyeRect.Y - 100)), new Bgr(Color.Blue), 1);
                            currentFrame.Draw(new LineSegment2D(new Point(eyeRect.X + eyeRect.Width + 40, eyeRect.Y - 100), new Point(eyeRect.X + eyeRect.Width + 40, eyeRect.Y - 90)), new Bgr(Color.Blue), 1);
                            currentFrame.Draw(new LineSegment2D(new Point(eyeRect.X + eyeRect.Width + 40, eyeRect.Y - 90), new Point(eyeRect.X + eyeRect.Width + 140, eyeRect.Y - 90)), new Bgr(Color.Blue), 1);
                            currentFrame.Draw("left eye", ref font, new Point(eyeRect.X + eyeRect.Width + 45, eyeRect.Y - 95), new Bgr(Color.Blue));
                        }
                    }
                }
            }
            t     = 0;
            tEyes = 0;

            //Names concatenation of persons recognized
            for (int nnn = 0; nnn < facesDetected[0].Length; nnn++)
            {
                names     = names + NamePersons[nnn] + ", ";
                namesEyes = namesEyes + NamePersonsEyes[nnn] + ", ";
            }


            //Show the faces procesed and recognized
            //  imageBoxFrameGrabber.Image = currentFrame.Flip(FLIP.HORIZONTAL);
            if (camlive)
            {
                imageBoxFrameGrabber.Image = currentFrame;
            }
            label4.Text = names;
            names       = "";
            nameEyes    = "";
            //Clear the list(vector) of names
            NamePersons.Clear();
            NamePersonsEyes.Clear();
        }
        private void FrameGrabber(object sender, EventArgs e)
        {
            lblCantidad.Text = "0";
            NamePersons.Add("");
            try
            {
                currentFrame = grabber.QueryFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                //Convertir a escala de grises
                gray = currentFrame.Convert <Gray, Byte>();

                //Detector de Rostros
                MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(face, 1.5, 10, Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new Size(20, 20));
                //1.2
                //Accion para cada elemento detectado
                foreach (MCvAvgComp f in facesDetected[0])
                {
                    t      = t + 1;
                    result = currentFrame.Copy(f.rect).Convert <Gray, byte>().Resize(100, 100, INTER.CV_INTER_CUBIC);
                    //Dibujar el cuadro para el rostro
                    currentFrame.Draw(f.rect, new Bgr(Color.Blue), 1);

                    if (trainingImages.ToArray().Length != 0)
                    {
                        //Clase para reconocimiento con el nùmero de imagenes
                        MCvTermCriteria termCrit = new MCvTermCriteria(ContTrain, 0.88);
                        //0.001
                        //Clase Eigen para reconocimiento de rostro
                        EigenObjectRecognizer recognizer = new EigenObjectRecognizer(trainingImages.ToArray(), labels.ToArray(), ref termCrit);
                        var fa = new Image <Gray, byte> [trainingImages.Count];

                        name = recognizer.Recognize(result);

                        //Dibujar el nombre para cada rostro detectado y reconocido
                        currentFrame.Draw(name, ref font, new Point(f.rect.X - 2, f.rect.Y - 2), new Bgr(Color.Blue));
                    }



                    NamePersons[t - 1] = name;
                    NamePersons.Add("");
                    //Establecer el nùmero de rostros detectados
                    lblCantidad.Text = facesDetected[0].Length.ToString();
                    lblNombre.Text   = name;
                }
                t = 0;

                //Nombres concatenados de todos los rostros reconocidos
                for (int nnn = 0; nnn < facesDetected[0].Length; nnn++)
                {
                    names = names + NamePersons[nnn] + ", ";
                }

                //Mostrar los rostros procesados y reconocidos
                imageBox1.Image = currentFrame;
                //lblNombre.Text = names;
                name = "";
                //Borrar la lista de nombres
                NamePersons.Clear();
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message);
            }
        }
        private void FrameGrabber(object sender, EventArgs e)
        {
            label3.Text = "0";
            NamePersons.Add("");

            //Get the current frame form capture device
            currentFrame = grabber.QueryFrame().Resize(320, 240, INTER.CV_INTER_CUBIC);

            //Convert it to Grayscale
            gray = currentFrame.Convert<Gray, byte>();

            //Face Detector
            var facesDetected = gray.DetectHaarCascade(
                face,
                1.2,
                10,
                HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                new Size(20, 20));

            //Action for each element detected
            foreach (var f in facesDetected[0])
            {
                t = t + 1;
                result = currentFrame.Copy(f.rect).Convert<Gray, byte>().Resize(100, 100, INTER.CV_INTER_CUBIC);
                //draw the face detected in the 0th (gray) channel with blue color
                currentFrame.Draw(f.rect, new Bgr(Color.Red), 2);

                if (trainingImages.ToArray().Length != 0)
                {
                    //TermCriteria for face recognition with numbers of trained images like maxIteration
                    var termCrit = new MCvTermCriteria(ContTrain, 0.001);

                    //Eigen face recognizer
                    var recognizer = new EigenObjectRecognizer(
                        trainingImages.ToArray(),
                        labels.ToArray(),
                        3000,
                        ref termCrit);

                    name = recognizer.Recognize(result);

                    //Draw the label for each face detected and recognized
                    currentFrame.Draw(name, ref font, new Point(f.rect.X - 2, f.rect.Y - 2), new Bgr(Color.LightGreen));

                    //Add prezent faces to label
                    if (!label8.Text.Contains(name))
                    {
                        label8.Text = label8.Text + " " + name;
                    }
                }

                NamePersons[t - 1] = name;
                NamePersons.Add("");

                //Set the number of faces detected on the scene
                label3.Text = facesDetected[0].Length.ToString();

                /*
                //Set the region of interest on the faces

                gray.ROI = f.rect;
                MCvAvgComp[][] eyesDetected = gray.DetectHaarCascade(
                   eye,
                   1.1,
                   10,
                   Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                   new Size(20, 20));
                gray.ROI = Rectangle.Empty;

                foreach (MCvAvgComp ey in eyesDetected[0])
                {
                    Rectangle eyeRect = ey.rect;
                    eyeRect.Offset(f.rect.X, f.rect.Y);
                    currentFrame.Draw(eyeRect, new Bgr(Color.Blue), 2);
                }
                 */
            }
            t = 0;

            //Names concatenation of persons recognized
            for (var nnn = 0; nnn < facesDetected[0].Length; nnn++)
            {
                names = names + NamePersons[nnn] + ", ";
            }

            //Show the faces procesed and recognized
            imageBoxFrameGrabber.Image = currentFrame;
            label4.Text = names;
            names = "";
            //Clear the list(vector) of names
            NamePersons.Clear();
        }
예제 #31
0
        void FrameGrabber(object sender, EventArgs e)
        {
            NamePersons.Add("");
            //now detect no. of faces in scene
            label2.Text = "0";

            //Get the current frame form capture device

            currentFrame = grabber.QueryFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);

            //Convert it to Grayscale
            gray = currentFrame.Convert <Gray, Byte>();

            //Face Detector
            MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(
                face,
                1.3,
                10,
                Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                new Size(20, 20));

            //Action for each element detected
            foreach (MCvAvgComp f in facesDetected[0])
            {
                DateTime horadetectada = DateTime.Now;
                t      = t + 1;
                result = currentFrame.Copy(f.rect).Convert <Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                //draw the face detected in the 0th (gray) channel with blue color
                currentFrame.Draw(f.rect, new Bgr(Color.Red), 2);
                //initialize result,t and gray if (trainingImages.ToArray().Length != 0)
                {
                    //termcriteria against each image to find a match with it perform different iterations
                    MCvTermCriteria termCrit = new MCvTermCriteria(ContTrain, 0.001);
                    //call class by creating object and pass parameters
                    EigenObjectRecognizer recognizer = new EigenObjectRecognizer(
                        trainingImages.ToArray(),
                        labels.ToArray(),
                        1000,
                        ref termCrit);
                    //next step is to name find for recognize face
                    name = recognizer.Recognize(result);
                    //now show recognized person name so

                    currentFrame.Draw(name, ref font, new Point(f.rect.X - 2, f.rect.Y - 2), new Bgr(Color.LightGreen));//initalize font for the name captured
                    c.InsertarHoradetect(name, horadetectada);
                }
                NamePersons[t - 1] = name;
                NamePersons.Add("");
                //now we will check detected faces multiple or just one in next lecture uptill now we are done with recognition
                label2.Text = facesDetected[0].Length.ToString();


                if (label2.Text == "1")
                {
                    c.InsertarHoradetect(name, horadetectada);
                    MessageBox.Show(" Te la inserto toda");

                    this.BackColor = System.Drawing.Color.Lime;
                }

                else
                {
                    MessageBox.Show(" No se logro Insertar nada ctm");
                    this.BackColor = System.Drawing.Color.Red;
                }
            }
            imageBox1.Image = currentFrame;
            //load haarclassifier and previous save image in directory to find match

            //hi now perform face recognitione
            //first of all add eigen class to project
            //i will upload in resource section so you can have it

            //Check that trained faces are present to recognize face
            //Done now run and test your program
            //Done with this now i will upload complete face recognition sdk
            //hope you learn program and enjoyed it
        }