private void face_add_Click(object sender, RoutedEventArgs e) { try { //Trained face counter ContTrain = ContTrain + 1; //Get a gray frame from capture device gray = grabber.QueryGrayFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC); //Face Detector MCvAvgComp[][] facesDetected = gray.DetectHaarCascade( face, 1.2, 10, Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new System.Drawing.Size(20, 20)); //Action for each element detected foreach (MCvAvgComp f in facesDetected[0]) { TrainedFace = currentFrame.Copy(f.rect).Convert <Gray, byte>(); break; } //resize face detected image for force to compare the same size with the //test image with cubic interpolation type method TrainedFace = result.Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC); trainingImages.Add(TrainedFace); labels.Add(person_name.Text); //Show face added in gray scale train_image.Source = ImageSourceForBitmap(TrainedFace.ToBitmap()); //Write the number of triained faces in a file text for further load File.WriteAllText(AppDomain.CurrentDomain.BaseDirectory + "/TrainedFaces/TrainedLabels.txt", trainingImages.ToArray().Length.ToString() + "%"); //Write the labels of triained faces in a file text for further load for (int i = 1; i < trainingImages.ToArray().Length + 1; i++) { trainingImages.ToArray()[i - 1].Save(AppDomain.CurrentDomain.BaseDirectory + "/TrainedFaces/face" + i + ".bmp"); File.AppendAllText(AppDomain.CurrentDomain.BaseDirectory + "/TrainedFaces/TrainedLabels.txt", labels.ToArray()[i - 1] + "%"); } MessageBox.Show(person_name.Text + "´s face detected and added :)", "Training OK"); } catch { MessageBox.Show("Enable the face detection first", "Training Fail"); } }
public void addEmp() { string query = "INSERT INTO train(empcode, empname, date, imagee) VALUES (@empcode, @empname, @date, @imagee)"; SqlCommand cmd = new SqlCommand(query, connect); ImageConverter converter = new ImageConverter(); byte[] imagepic = (byte[])converter.ConvertTo(TrainedFace.ToBitmap(), typeof(byte[])); cmd.Parameters.AddWithValue("@empcode", txbEmpCode.Text); cmd.Parameters.AddWithValue("@empname", txbEmpName.Text); cmd.Parameters.AddWithValue("@date", toolStripStatusLabel1.Text); cmd.Parameters.AddWithValue("@imagee", imagepic); cmd.ExecuteNonQuery(); }
private void btnVideoRecorder_Click(object sender, EventArgs e) { if (txbEmpCode.Text == "" || txbEmpName.Text == "") { MessageBox.Show("Null", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error); } else { try { faceRecProcess(); ImageConverter converter = new ImageConverter(); byte[] imagepic = (byte[])converter.ConvertTo(TrainedFace.ToBitmap(), typeof(byte[])); connect.Open(); addEmp(); connect.Close(); totalclick++; //btnVideoRecorder.Enabled = false; txbEmpCode.Enabled = false; txbEmpName.Enabled = false; lbNotification.Text = "Successfully! *Name: " + txbEmpName.Text + " *Code: " + txbEmpCode.Text + " added " + totalclick.ToString() + "/5."; if (totalclick == 5) { btnVideoRecorder.Enabled = false; btnSave.Enabled = true; videorecord = true; } } catch { MessageBox.Show("Error"); } } }
private void btnFileImage_Click(object sender, EventArgs e) { OpenFileDialog openFileDialog = new OpenFileDialog(); //DialogResult dialogResult = openFileDialog.ShowDialog(); if (txbEmpName.Text == "" || txbEmpCode.Text == "" || statuscam == true) { MessageBox.Show("Null or Camera is running", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error); } else { if (openFileDialog.ShowDialog() == DialogResult.OK) { File.Copy(openFileDialog.FileName, Directory.GetCurrentDirectory() + "\\image\\" + openFileDialog.SafeFileName); Image img = Image.FromFile(openFileDialog.FileName); ImageBox ib = new ImageBox(); Bitmap masterImage = (Bitmap)img; Image <Bgr, Byte> currentImage = new Image <Bgr, byte>(masterImage); faceRecProcess(); ImageConverter converter = new ImageConverter(); byte[] imagepic = (byte[])converter.ConvertTo(TrainedFace.ToBitmap(), typeof(byte[])); connect.Open(); addEmp(); connect.Close(); lbNotification.Text = "Successfully! *Name: " + txbEmpName.Text + " *Code: " + txbEmpCode.Text + " added!"; lbDailyCheck.Text = ""; txbEmpName.Text = ""; txbEmpCode.Text = ""; disPlayImage(); btnCorrection.Enabled = true; } } }
private void btnCheckout_Click(object sender, EventArgs e) { offCorrection(); btnOneFace.Enabled = true; btnVideoRecorder.Enabled = true; btnFileImage.Enabled = true; btnSave.Enabled = false; txbEmpCode.Text = ""; txbEmpName.Text = ""; if (statuscam == true) { try { faceRecProcess(); ImageConverter converter = new ImageConverter(); byte[] imagepic = (byte[])converter.ConvertTo(TrainedFace.ToBitmap(), typeof(byte[])); connect.Open(); string query = "INSERT INTO Employee(empcode, empname, checkout, imagee) VALUES (@empcode, @empname, @checkout, @imagee)"; SqlCommand cmd = new SqlCommand(query, connect); cmd.Parameters.AddWithValue("@empcode", displayEmpCode.Text); cmd.Parameters.AddWithValue("@empname", displayEmpName.Text); cmd.Parameters.AddWithValue("@checkout", toolStripStatusLabel1.Text); cmd.Parameters.AddWithValue("@imagee", imagepic); cmd.ExecuteNonQuery(); SqlCommand nameCmd = new SqlCommand("SELECT TOP 1 empname FROM Employee ORDER BY ID DESC", connect); SqlCommand codeCmd = new SqlCommand("SELECT TOP 1 empcode FROM Employee ORDER BY ID DESC", connect); string empname = (string)nameCmd.ExecuteScalar(); string empcode = (string)codeCmd.ExecuteScalar(); connect.Close(); txbCorrectName.Text = empname.ToString(); txbCorrectCode.Text = empcode.ToString(); displayCheckin.Text = "Null"; displayCheckout.Text = toolStripStatusLabel1.ToString(); disPlayImage(); lbDailyCheck.Text = "Successfully! *Name: " + empname + " *Code: " + empcode + " checked out!"; lbNotification.Text = ""; } catch { MessageBox.Show("Error for check out", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error); } } else { try { byte[] imagepic = msImage.ToArray(); Image img = Image.FromStream(msImage); Bitmap masterImage = (Bitmap)img; Image <Gray, Byte> graypic = new Image <Gray, Byte>(masterImage); trainingImages.Add(graypic); labels.Add(displayEmpName.Text); codes.Add(displayEmpCode.Text); connect.Open(); string query = "INSERT INTO Employee(empcode, empname, checkout, imagee) VALUES (@empcode, @empname, @checkout, @imagee)"; SqlCommand cmd = new SqlCommand(query, connect); cmd.Parameters.AddWithValue("@empcode", displayEmpCode.Text); cmd.Parameters.AddWithValue("@empname", displayEmpName.Text); cmd.Parameters.AddWithValue("@checkout", toolStripStatusLabel1.Text); cmd.Parameters.AddWithValue("@imagee", imagepic); cmd.ExecuteNonQuery(); SqlCommand nameCmd = new SqlCommand("SELECT TOP 1 empname FROM Employee ORDER BY ID DESC", connect); SqlCommand codeCmd = new SqlCommand("SELECT TOP 1 empcode FROM Employee ORDER BY ID DESC", connect); string empname = (string)nameCmd.ExecuteScalar(); string empcode = (string)codeCmd.ExecuteScalar(); connect.Close(); txbCorrectName.Text = empname.ToString(); txbCorrectCode.Text = empcode.ToString(); displayCheckin.Text = "Null"; displayCheckout.Text = toolStripStatusLabel1.ToString(); disPlayImage(); lbDailyCheck.Text = "Successfully! *Name: " + empname + " *Code: " + empcode + " checked out!"; lbNotification.Text = ""; } catch { MessageBox.Show("Error for check out", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error); } } }
private void trainBtn_Click(object sender, EventArgs e) { try { //Trained face counter ContTrain = ContTrain + 1; //Get a gray frame from capture device gray = grabber.QueryGrayFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC); //Face Detector MCvAvgComp[][] facesDetected = gray.DetectHaarCascade( face, 1.2, 10, Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new Size(20, 20)); //Action for each element detected foreach (MCvAvgComp f in facesDetected[0]) { TrainedFace = currentFrame.Copy(f.rect).Convert <Gray, byte>(); break; } //resize face detected image for force to compare the same size with the //test image with cubic interpolation type method TrainedFace = result.Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC); trainingImages.Add(TrainedFace); labels.Add(rollnoBox.Text); //Show face added in gray scale pictureBox2.Image = TrainedFace.ToBitmap(); //Write the number of triained faces in a file text for further load File.WriteAllText(Application.StartupPath + "/TrainedFaces/TrainedLabels.txt", trainingImages.ToArray().Length.ToString() + "%"); //Write the labels of triained faces in a file text for further load for (int i = 1; i < trainingImages.ToArray().Length + 1; i++) { trainingImages.ToArray()[i - 1].Save(Application.StartupPath + "/TrainedFaces/face" + i + ".bmp"); File.AppendAllText(Application.StartupPath + "/TrainedFaces/TrainedLabels.txt", labels.ToArray()[i - 1] + "%"); } Student student = new Student(nameBox.Text, rollnoBox.Text); foreach (var index in courseListBox.CheckedIndices) { ListNode <Course> temp = loadcourses.getHead(); for (int i = 0; temp != null; i++, temp = temp.next) { if (i == (int)index) { break; } } student.studentcourses.Add(temp.val); } student_list.Add(student); string studentfile = "student_file.bin"; //serialize using (Stream stream = File.Open(studentfile, FileMode.Create)) { var bformatter = new BinaryFormatter(); bformatter.Serialize(stream, student_list); } MessageBox.Show(nameBox.ToString() + "Student registered", "Training OK", MessageBoxButtons.OK, MessageBoxIcon.Information); } catch (Exception exc) { MessageBox.Show("Failed To Register " + exc.Message, "Error", MessageBoxButtons.OK, MessageBoxIcon.Exclamation); } this.Hide(); Form1 form1 = new Form1(); form1.ShowDialog(); }
public string SaveString(string inputpath, string label, ref int index) { try { ContTrain = ContTrain + 1; bool detected = false; gray = new Image <Gray, byte>(inputpath); MCvAvgComp[][] facesDetected = gray.DetectHaarCascade( face, 1.2, 10, Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new Size(20, 20)); foreach (MCvAvgComp f in facesDetected[0]) { TrainedFace = gray.Copy(f.rect).Convert <Gray, byte>(); detected = true; break; } if (!detected) { return(string.Empty); } TrainedFace = TrainedFace.Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC); TrainedFace = new Image <Gray, byte>(ImageProcessing.ImagePreProcessing(TrainedFace.ToBitmap())); trainingImages.Add(TrainedFace); labels.Add(label); UpdateRecognizer(); return(BasicOperations.SaveImage(TrainedFace.ToBitmap(), ref index));; } catch { return(string.Empty); } }
public bool CatchFace(string label, PictureBox pcb) { try { bool detected = false; //Trained face counter //Get a gray frame from capture device gray = grabber.QueryGrayFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC); //Face Detector MCvAvgComp[][] facesDetected = gray.DetectHaarCascade( face, 1.2, 10, Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new Size(20, 20)); //Action for each element detected foreach (MCvAvgComp f in facesDetected[0]) { TrainedFace = currentFrame.Copy(f.rect).Convert <Gray, byte>(); detected = true; break; } if (!detected) { return(false); } //resize face detected image for force to compare the same size with the //test image with cubic interpolation type method TrainedFace = result.Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC); TrainedFace = new Image <Gray, byte>(ImageProcessing.ImagePreProcessing(TrainedFace.ToBitmap())); //Show face added in gray scale pcb.Image = TrainedFace.ToBitmap(); UpdateRecognizer(); MessageBox.Show(label + "´s face detected and added :)", "Training OK", MessageBoxButtons.OK, MessageBoxIcon.Information); return(true); } catch { MessageBox.Show("Enable the face detection first", "Training Fail", MessageBoxButtons.OK, MessageBoxIcon.Exclamation); return(false); } }