Example #1
0
        private void face_add_Click(object sender, RoutedEventArgs e)
        {
            try
            {
                //Trained face counter
                ContTrain = ContTrain + 1;

                //Get a gray frame from capture device
                gray = grabber.QueryGrayFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);

                //Face Detector
                MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(
                    face,
                    1.2,
                    10,
                    Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                    new System.Drawing.Size(20, 20));

                //Action for each element detected
                foreach (MCvAvgComp f in facesDetected[0])
                {
                    TrainedFace = currentFrame.Copy(f.rect).Convert <Gray, byte>();
                    break;
                }

                //resize face detected image for force to compare the same size with the
                //test image with cubic interpolation type method
                TrainedFace = result.Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                trainingImages.Add(TrainedFace);
                labels.Add(person_name.Text);

                //Show face added in gray scale
                train_image.Source = ImageSourceForBitmap(TrainedFace.ToBitmap());

                //Write the number of triained faces in a file text for further load
                File.WriteAllText(AppDomain.CurrentDomain.BaseDirectory + "/TrainedFaces/TrainedLabels.txt", trainingImages.ToArray().Length.ToString() + "%");

                //Write the labels of triained faces in a file text for further load
                for (int i = 1; i < trainingImages.ToArray().Length + 1; i++)
                {
                    trainingImages.ToArray()[i - 1].Save(AppDomain.CurrentDomain.BaseDirectory + "/TrainedFaces/face" + i + ".bmp");
                    File.AppendAllText(AppDomain.CurrentDomain.BaseDirectory + "/TrainedFaces/TrainedLabels.txt", labels.ToArray()[i - 1] + "%");
                }

                MessageBox.Show(person_name.Text + "´s face detected and added :)", "Training OK");
            }
            catch
            {
                MessageBox.Show("Enable the face detection first", "Training Fail");
            }
        }
Example #2
0
        public void addEmp()
        {
            string     query = "INSERT INTO train(empcode, empname, date, imagee) VALUES (@empcode, @empname, @date, @imagee)";
            SqlCommand cmd   = new SqlCommand(query, connect);

            ImageConverter converter = new ImageConverter();

            byte[] imagepic = (byte[])converter.ConvertTo(TrainedFace.ToBitmap(), typeof(byte[]));

            cmd.Parameters.AddWithValue("@empcode", txbEmpCode.Text);
            cmd.Parameters.AddWithValue("@empname", txbEmpName.Text);
            cmd.Parameters.AddWithValue("@date", toolStripStatusLabel1.Text);
            cmd.Parameters.AddWithValue("@imagee", imagepic);

            cmd.ExecuteNonQuery();
        }
        public AddStudent()
        {
            //loading haarcascade file by file name and assining to haarcascade variable
            //Load haarcascades for face detection
            face = new HaarCascade("haarcascade-frontalface-default.xml");
            //Load haarcascades for eye detection
            eyes = new HaarCascade("haarcascade_mcs_eyepair_big.xml");
            //Load haarcascades for mouth detection
            mouth = new HaarCascade("mouth.xml");
            //Load haarcascades for nose detection
            nose = new HaarCascade("nose.xml");

            InitializeComponent();

            try
            {
                //Previous trained faces and labels for each image
                string   Labelsinfo = File.ReadAllText(Application.StartupPath + "/TrainedFaces/TrainedNames.txt");
                string[] Labels     = Labelsinfo.Split('%');
                NumLabels = Convert.ToInt16(Labels[0]); //total number of faces detected
                ContTrain = NumLabels;                  //new images will be added to the previous set
                string LoadFaces;
                string LoadEyes;
                string LoadMouth;
                string LoadNose;

                for (int tf = 1; tf < NumLabels + 1; tf++)
                {
                    LoadFaces = "face" + tf + ".bmp";
                    LoadEyes  = "eyes" + tf + ".bmp";
                    LoadMouth = "mouth" + tf + ".bmp";
                    LoadNose  = "nose" + tf + ".bmp";
                    TrainedFace.Add(new Image <Gray, byte>(Application.StartupPath + "/TrainedFaces/" + LoadFaces));
                    TrainedEyes.Add(new Image <Gray, byte>(Application.StartupPath + "/TrainedFaces/" + LoadEyes));
                    TrainedMouth.Add(new Image <Gray, byte>(Application.StartupPath + "/TrainedFaces/" + LoadMouth));
                    TrainedNose.Add(new Image <Gray, byte>(Application.StartupPath + "/TrainedFaces/" + LoadNose));
                    labels.Add(Labels[tf]);
                }
            }

            catch (Exception e)
            {
                MessageBox.Show("Press okay to continue");
            }
        }
        public string SaveString(string inputpath, string label, ref int index)
        {
            try
            {
                ContTrain = ContTrain + 1;
                bool detected = false;
                gray = new Image <Gray, byte>(inputpath);

                MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(
                    face,
                    1.2,
                    10,
                    Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                    new Size(20, 20));

                foreach (MCvAvgComp f in facesDetected[0])
                {
                    TrainedFace = gray.Copy(f.rect).Convert <Gray, byte>();
                    detected    = true;
                    break;
                }

                if (!detected)
                {
                    return(string.Empty);
                }

                TrainedFace = TrainedFace.Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                TrainedFace = new Image <Gray, byte>(ImageProcessing.ImagePreProcessing(TrainedFace.ToBitmap()));

                trainingImages.Add(TrainedFace);
                labels.Add(label);



                UpdateRecognizer();

                return(BasicOperations.SaveImage(TrainedFace.ToBitmap(), ref index));;
            }
            catch
            {
                return(string.Empty);
            }
        }
Example #5
0
        private void btnVideoRecorder_Click(object sender, EventArgs e)
        {
            if (txbEmpCode.Text == "" || txbEmpName.Text == "")
            {
                MessageBox.Show("Null", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
            }
            else
            {
                try
                {
                    faceRecProcess();

                    ImageConverter converter = new ImageConverter();
                    byte[]         imagepic  = (byte[])converter.ConvertTo(TrainedFace.ToBitmap(), typeof(byte[]));

                    connect.Open();

                    addEmp();

                    connect.Close();

                    totalclick++;
                    //btnVideoRecorder.Enabled = false;
                    txbEmpCode.Enabled  = false;
                    txbEmpName.Enabled  = false;
                    lbNotification.Text = "Successfully! *Name: " + txbEmpName.Text + "  *Code: " + txbEmpCode.Text + " added " + totalclick.ToString() + "/5.";


                    if (totalclick == 5)
                    {
                        btnVideoRecorder.Enabled = false;
                        btnSave.Enabled          = true;
                        videorecord = true;
                    }
                }
                catch
                {
                    MessageBox.Show("Error");
                }
            }
        }
Example #6
0
        private void btnFileImage_Click(object sender, EventArgs e)
        {
            OpenFileDialog openFileDialog = new OpenFileDialog();

            //DialogResult dialogResult = openFileDialog.ShowDialog();
            if (txbEmpName.Text == "" || txbEmpCode.Text == "" || statuscam == true)
            {
                MessageBox.Show("Null or Camera is running", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
            }
            else
            {
                if (openFileDialog.ShowDialog() == DialogResult.OK)
                {
                    File.Copy(openFileDialog.FileName, Directory.GetCurrentDirectory() + "\\image\\" + openFileDialog.SafeFileName);
                    Image img = Image.FromFile(openFileDialog.FileName);

                    ImageBox          ib           = new ImageBox();
                    Bitmap            masterImage  = (Bitmap)img;
                    Image <Bgr, Byte> currentImage = new Image <Bgr, byte>(masterImage);

                    faceRecProcess();

                    ImageConverter converter = new ImageConverter();
                    byte[]         imagepic  = (byte[])converter.ConvertTo(TrainedFace.ToBitmap(), typeof(byte[]));

                    connect.Open();
                    addEmp();
                    connect.Close();

                    lbNotification.Text = "Successfully! *Name: " + txbEmpName.Text + "  *Code: " + txbEmpCode.Text + " added!";
                    lbDailyCheck.Text   = "";
                    txbEmpName.Text     = "";
                    txbEmpCode.Text     = "";
                    disPlayImage();
                    btnCorrection.Enabled = true;
                }
            }
        }
Example #7
0
        private void SavePicture(object sender, EventArgs e)
        {
            try
            {
                TrainedFacesCounter++;

                // Get a gray frame from capture device
                Gray = Grabber.QueryGrayFrame().Resize(320, 240, INTER.CV_INTER_CUBIC);

                // Face Detector
                var            scaleFactor   = 1.2;
                var            minNeighbors  = 10;
                var            detectionType = HAAR_DETECTION_TYPE.DO_CANNY_PRUNING;
                MCvAvgComp[][] facesDetected = Gray.DetectHaarCascade(Face, scaleFactor, minNeighbors, detectionType, new Size(20, 20));

                // Action for each element detected
                foreach (MCvAvgComp f in facesDetected[0])
                {
                    TrainedFace = CurrentFrame.Copy(f.rect).Convert <Gray, byte>();
                    break;
                }

                LogIt($"Handle frame - scaleFactor:{scaleFactor} minNeighbors:{minNeighbors} detectionType:{detectionType.ToString()}");

                string labelName = textBox1.Text.Trim().Replace(" ", "_").ToLower();

                // Resize face detected image in order to force to compare the same size with the
                // Test image with cubic interpolation type method
                TrainedFace = Result.Resize(100, 100, INTER.CV_INTER_CUBIC);

                // Show face added in gray scale
                imageBox1.Image = TrainedFace;

                var imgPath = $"/TrainedFaces/{labelName}@face{DateTime.Now.ToString("yyyymmddhhmmssmm")}.bmp";
                TrainedFace.Save(Application.StartupPath + imgPath);
            }
            catch { }
        }
        private void button1_Click(object sender, EventArgs e)
        {
            try
            {
                gray = grabber.QueryGrayFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);

                //Face Detector
                MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(
                    face,
                    1.2,
                    10,
                    Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                    new Size(20, 20));

                //Action for each element detected
                foreach (MCvAvgComp f in facesDetected[0])
                {
                    TrainedFace = currentFrame.Copy(f.rect).Convert <Gray, byte>();
                    break;
                }

                TrainedFace.Save(Application.StartupPath + "/face" + 1 + ".bmp");
                if (SendIamge(Image.FromFile(Application.StartupPath + "/face" + 1 + ".bmp")))
                {
                    File.Delete(Application.StartupPath + "/face" + 1 + ".bmp");
                    MessageBox.Show(UserLogin.Username + "´s face detected and added :)", "Training OK", MessageBoxButtons.OK, MessageBoxIcon.Information);
                }
                else
                {
                    MessageBox.Show("Some Thing Go Wrong Contact Support Now..", "Training Fail", MessageBoxButtons.OK, MessageBoxIcon.Exclamation);
                }
            }
            catch
            {
                MessageBox.Show("Enable the face detection first", "Training Fail", MessageBoxButtons.OK, MessageBoxIcon.Exclamation);
            }
        }
Example #9
0
        private void btnCheckout_Click(object sender, EventArgs e)
        {
            offCorrection();
            btnOneFace.Enabled       = true;
            btnVideoRecorder.Enabled = true;
            btnFileImage.Enabled     = true;
            btnSave.Enabled          = false;
            txbEmpCode.Text          = "";
            txbEmpName.Text          = "";


            if (statuscam == true)
            {
                try
                {
                    faceRecProcess();
                    ImageConverter converter = new ImageConverter();
                    byte[]         imagepic  = (byte[])converter.ConvertTo(TrainedFace.ToBitmap(), typeof(byte[]));

                    connect.Open();
                    string     query = "INSERT INTO Employee(empcode, empname, checkout, imagee) VALUES (@empcode, @empname, @checkout, @imagee)";
                    SqlCommand cmd   = new SqlCommand(query, connect);
                    cmd.Parameters.AddWithValue("@empcode", displayEmpCode.Text);
                    cmd.Parameters.AddWithValue("@empname", displayEmpName.Text);
                    cmd.Parameters.AddWithValue("@checkout", toolStripStatusLabel1.Text);
                    cmd.Parameters.AddWithValue("@imagee", imagepic);

                    cmd.ExecuteNonQuery();

                    SqlCommand nameCmd = new SqlCommand("SELECT TOP 1 empname FROM Employee ORDER BY ID DESC", connect);
                    SqlCommand codeCmd = new SqlCommand("SELECT TOP 1 empcode FROM Employee ORDER BY ID DESC", connect);

                    string empname = (string)nameCmd.ExecuteScalar();
                    string empcode = (string)codeCmd.ExecuteScalar();

                    connect.Close();

                    txbCorrectName.Text  = empname.ToString();
                    txbCorrectCode.Text  = empcode.ToString();
                    displayCheckin.Text  = "Null";
                    displayCheckout.Text = toolStripStatusLabel1.ToString();
                    disPlayImage();
                    lbDailyCheck.Text   = "Successfully! *Name: " + empname + "  *Code: " + empcode + " checked out!";
                    lbNotification.Text = "";
                }
                catch
                {
                    MessageBox.Show("Error for check out", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
                }
            }
            else
            {
                try
                {
                    byte[] imagepic = msImage.ToArray();

                    Image              img         = Image.FromStream(msImage);
                    Bitmap             masterImage = (Bitmap)img;
                    Image <Gray, Byte> graypic     = new Image <Gray, Byte>(masterImage);
                    trainingImages.Add(graypic);

                    labels.Add(displayEmpName.Text);
                    codes.Add(displayEmpCode.Text);

                    connect.Open();
                    string     query = "INSERT INTO Employee(empcode, empname, checkout, imagee) VALUES (@empcode, @empname, @checkout, @imagee)";
                    SqlCommand cmd   = new SqlCommand(query, connect);
                    cmd.Parameters.AddWithValue("@empcode", displayEmpCode.Text);
                    cmd.Parameters.AddWithValue("@empname", displayEmpName.Text);
                    cmd.Parameters.AddWithValue("@checkout", toolStripStatusLabel1.Text);
                    cmd.Parameters.AddWithValue("@imagee", imagepic);

                    cmd.ExecuteNonQuery();

                    SqlCommand nameCmd = new SqlCommand("SELECT TOP 1 empname FROM Employee ORDER BY ID DESC", connect);
                    SqlCommand codeCmd = new SqlCommand("SELECT TOP 1 empcode FROM Employee ORDER BY ID DESC", connect);

                    string empname = (string)nameCmd.ExecuteScalar();
                    string empcode = (string)codeCmd.ExecuteScalar();

                    connect.Close();

                    txbCorrectName.Text  = empname.ToString();
                    txbCorrectCode.Text  = empcode.ToString();
                    displayCheckin.Text  = "Null";
                    displayCheckout.Text = toolStripStatusLabel1.ToString();

                    disPlayImage();
                    lbDailyCheck.Text   = "Successfully! *Name: " + empname + "  *Code: " + empcode + " checked out!";
                    lbNotification.Text = "";
                }
                catch
                {
                    MessageBox.Show("Error for check out", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
                }
            }
        }
Example #10
0
        private void btnBroweImg_Click(object sender, EventArgs e)
        {
            if (frmLogin.admin == false)
            {
                frmLogin loginForm = new frmLogin();
                loginForm.Show();
                return;
            }
            if (txtName.Text == "")
            {
                MessageBox.Show("Please enter name first.");
                return;
            }
            openFileDialog1.FileName = String.Empty;
            openFileDialog1.Filter   = "JPEG Files (*.jpeg)|*.jpeg|PNG Files (*.png)|*.png|JPG Files (*.jpg)|*.jpg|GIF Files (*.gif)|*.gif";
            openFileDialog1.Title    = "Please select an image file to training.";
            if (openFileDialog1.ShowDialog() == DialogResult.OK)
            {
                grabber      = new Capture(openFileDialog1.FileName);
                currentFrame = grabber.QueryFrame().Resize(473, 355, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                //Trained face counter
                ContTrain = ContTrain + 1;

                //Get a gray frame from capture device
                gray = currentFrame.Convert <Gray, Byte>();
                //gray = grabber.QueryGrayFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);

                //Face Detector
                MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(
                    face,
                    1.2,
                    10,
                    Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                    new Size(20, 20));

                //Action for each element detected
                foreach (MCvAvgComp f in facesDetected[0])
                {
                    TrainedFace = currentFrame.Copy(f.rect).Convert <Gray, byte>();
                    break;
                }

                //resize face detected image for force to compare the same size with the
                //test image with cubic interpolation type method
                TrainedFace = TrainedFace.Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                trainingImages.Add(TrainedFace);
                labels.Add(txtName.Text + "|" + (ContTrain).ToString());

                //Show face added in gray scale
                imageBox1.Image = TrainedFace;

                //Write the number of triained faces in a file text for further load
                File.WriteAllText(Application.StartupPath + "/TrainedFaces/TrainedLabels.txt", trainingImages.ToArray().Length.ToString() + "%");

                //Write the labels of triained faces in a file text for further load
                for (int i = 1; i < trainingImages.ToArray().Length + 1; i++)
                {
                    trainingImages.ToArray()[i - 1].Save(Application.StartupPath + "/TrainedFaces/face" + i + ".bmp");
                    File.AppendAllText(Application.StartupPath + "/TrainedFaces/TrainedLabels.txt", labels.ToArray()[i - 1] + "%");
                }

                MessageBox.Show(txtName.Text + "´s face detected and added :)", "Training OK", MessageBoxButtons.OK, MessageBoxIcon.Information);
                txtName.Text = "";
            }
        }
        private void trainBtn_Click(object sender, EventArgs e)
        {
            try
            {
                //Trained face counter
                ContTrain = ContTrain + 1;

                //Get a gray frame from capture device
                gray = grabber.QueryGrayFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);

                //Face Detector
                MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(
                    face,
                    1.2,
                    10,
                    Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                    new Size(20, 20));

                //Action for each element detected
                foreach (MCvAvgComp f in facesDetected[0])
                {
                    TrainedFace = currentFrame.Copy(f.rect).Convert <Gray, byte>();
                    break;
                }

                //resize face detected image for force to compare the same size with the
                //test image with cubic interpolation type method
                TrainedFace = result.Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                trainingImages.Add(TrainedFace);
                labels.Add(rollnoBox.Text);

                //Show face added in gray scale
                pictureBox2.Image = TrainedFace.ToBitmap();

                //Write the number of triained faces in a file text for further load
                File.WriteAllText(Application.StartupPath + "/TrainedFaces/TrainedLabels.txt", trainingImages.ToArray().Length.ToString() + "%");

                //Write the labels of triained faces in a file text for further load
                for (int i = 1; i < trainingImages.ToArray().Length + 1; i++)
                {
                    trainingImages.ToArray()[i - 1].Save(Application.StartupPath + "/TrainedFaces/face" + i + ".bmp");
                    File.AppendAllText(Application.StartupPath + "/TrainedFaces/TrainedLabels.txt", labels.ToArray()[i - 1] + "%");
                }

                Student student = new Student(nameBox.Text, rollnoBox.Text);

                foreach (var index in courseListBox.CheckedIndices)
                {
                    ListNode <Course> temp = loadcourses.getHead();
                    for (int i = 0; temp != null; i++, temp = temp.next)
                    {
                        if (i == (int)index)
                        {
                            break;
                        }
                    }
                    student.studentcourses.Add(temp.val);
                }
                student_list.Add(student);
                string studentfile = "student_file.bin";

                //serialize
                using (Stream stream = File.Open(studentfile, FileMode.Create))
                {
                    var bformatter = new BinaryFormatter();

                    bformatter.Serialize(stream, student_list);
                }


                MessageBox.Show(nameBox.ToString() + "Student registered", "Training OK", MessageBoxButtons.OK, MessageBoxIcon.Information);
            }
            catch (Exception exc)
            {
                MessageBox.Show("Failed To Register " + exc.Message, "Error", MessageBoxButtons.OK, MessageBoxIcon.Exclamation);
            }
            this.Hide();
            Form1 form1 = new Form1();

            form1.ShowDialog();
        }
        public bool CatchFace(string label, PictureBox pcb)
        {
            try
            {
                bool detected = false;
                //Trained face counter

                //Get a gray frame from capture device
                gray = grabber.QueryGrayFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);

                //Face Detector
                MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(
                    face,
                    1.2,
                    10,
                    Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                    new Size(20, 20));

                //Action for each element detected
                foreach (MCvAvgComp f in facesDetected[0])
                {
                    TrainedFace = currentFrame.Copy(f.rect).Convert <Gray, byte>();
                    detected    = true;
                    break;
                }
                if (!detected)
                {
                    return(false);
                }
                //resize face detected image for force to compare the same size with the
                //test image with cubic interpolation type method
                TrainedFace = result.Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);

                TrainedFace = new Image <Gray, byte>(ImageProcessing.ImagePreProcessing(TrainedFace.ToBitmap()));

                //Show face added in gray scale
                pcb.Image = TrainedFace.ToBitmap();

                UpdateRecognizer();
                MessageBox.Show(label + "´s face detected and added :)", "Training OK", MessageBoxButtons.OK, MessageBoxIcon.Information);
                return(true);
            }
            catch
            {
                MessageBox.Show("Enable the face detection first", "Training Fail", MessageBoxButtons.OK, MessageBoxIcon.Exclamation);
                return(false);
            }
        }