/// <summary> /// button event to person in database /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void button1_Click_1(object sender, EventArgs e) { if (!(IsEmptyOrWhiteSpace(textBoxName.Text))) { if (!(IsEmptyOrWhiteSpace(textBoxPersonName.Text))) { if (!(IsEmptyOrWhiteSpace(textBoxAge.Text))) { try { if (Utilities.checkUserName(textBoxName.Text) && !(labels.Contains(textBoxName.Text))) { Bitmap masterImage = (Bitmap)pictureBox1.Image; Image <Gray, Byte> normalizedMasterImage = new Image <Gray, Byte>(masterImage); trainingImages.Add(normalizedMasterImage); labels.Add(textBoxName.Text); for (int i = 1; i < trainingImages.ToArray().Length + 1; i++) { trainingImages.ToArray()[i - 1].Save(Application.StartupPath + "/Faces/" + textBoxName.Text + ".bmp"); //saving greyscaled face image as trained data } File.AppendAllText(Application.StartupPath + "/Faces/Faces.txt", textBoxName.Text + ","); //writing person username to database file DBConnecttion.getInstance().createPerson(textBoxName.Text, textBoxPersonName.Text, textBoxAge.Text, "xc"); //inserting remaining person info in sqlite database textBoxAge.Text = ""; textBoxName.Text = ""; textBoxPersonName.Text = ""; MessageBox.Show("Person Added to Database!"); } else { MessageBox.Show("Warning : Username only contain alphanumeric characters and must not be already regsitered!"); } } catch (Exception exp) { MessageBox.Show("Warning : Detect face first!"); } } else { MessageBox.Show("Age must be filled!"); } } else { MessageBox.Show("Name must be filled"); } } else { MessageBox.Show("Username must be filled"); } }
/// <summary> /// button event to delete all data from sqllite database and terminate the application /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void button1_Click(object sender, EventArgs e) { DBConnecttion.getInstance().deleteAll(); System.IO.DirectoryInfo di = new DirectoryInfo(Application.StartupPath + "/Faces"); foreach (FileInfo file in di.GetFiles()) { file.Delete(); } foreach (DirectoryInfo dir in di.GetDirectories()) { dir.Delete(true); } Application.Exit(); }
/// <summary> /// Constructor of this form /// </summary> public Form1() { InitializeComponent(); DBConnecttion.getInstance().openConnection(); //Establising connection with sqllite database /* * * these are haarcascades for detecting face,eyes,nose,mouth. * comment out with features you don't want to detect. * if you comment from here you also have to comment related code from "FrameProcedure" function * */ faceDetected = new HaarCascade("haar/haarcascade_frontalface_default.xml"); //HaarCascade is to detect face eyesDetected = new HaarCascade("haar/eye.xml"); //HaarCascade is to detect eyes //noseDetected = new HaarCascade("haar/nose.xml"); //HaarCascade is to detect eyes // mouthDetected = new HaarCascade("haar/mouth.xml"); //HaarCascade is to detect eyes try { //importing trained data from db file and adding to runtime list string Labelsinf = File.ReadAllText(Application.StartupPath + "/Faces/Faces.txt"); List <string> Labels = Labelsinf.Split(',').Distinct().ToList <string>(); Numlabels = Labels.Count(); Count = Numlabels; string FacesLoad; foreach (string i in Labels) { trainingImages.Add(new Image <Gray, byte>(Application.StartupPath + $"/Faces/{i}.bmp")); labels.Add(i); } }catch (Exception e) { } }
/// <summary> /// Functon to process captured frame and detection of face and also recognizing /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void FrameProcedure(object sender, EventArgs e) { try { Users.Add(""); Frame = camera.QueryFrame().Resize(480, 360, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC); grayFace = Frame.Convert <Gray, byte>(); /* * Face and face feactures detection data, if you don't want to detect of face feacture comment the line. * this portion is releted to Haarcascade */ MCvAvgComp[][] facesDetectedNow = grayFace.DetectHaarCascade(faceDetected, 1.2, 10, Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new Size(10, 10)); //MCvAvgComp[][] eyesDetectedNow = grayFace.DetectHaarCascade(eyesDetected, 1.2, 10, Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new Size(5, 5)); //MCvAvgComp[][] noseDetectedNow = grayFace.DetectHaarCascade(noseDetected, 1.2, 10, Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new Size(5, 5)); //MCvAvgComp[][] mouthDetectedNow = grayFace.DetectHaarCascade(mouthDetected, 1.2, 10, Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new Size(5, 5)); foreach (MCvAvgComp f in facesDetectedNow[0]) { result = Frame.Copy(f.rect).Convert <Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC); Frame.Draw(f.rect, new Bgr(Color.DarkViolet), 2); if (trainingImages.ToArray().Length != 0) { MCvTermCriteria termCriteries = new MCvTermCriteria(Count, 0.001); EigenObjectRecognizer recognizer = new EigenObjectRecognizer(trainingImages.ToArray(), labels.ToArray(), 1500, ref termCriteries); name = recognizer.Recognize(result); Frame.Draw(name, ref font, new Point(f.rect.X - 2, f.rect.Y - 2), new Bgr(Color.Green)); //drawing box around the detect face if (name != "") { if (monitorMode) { imageBoxFaceDetected.Image = new Image <Gray, byte>(Application.StartupPath + $"/Faces/{name}.bmp"); Person person = DBConnecttion.getInstance().personInfo(name); labelName.Text = person.Name; labelUserName.Text = name; labelAge.Text = person.Age; } } } Users.Add(""); /* * foreach(MCvAvgComp j in eyesDetectedNow[0]) * { * * Frame.Draw(j.rect, new Bgr(Color.Red), 2); * * } */ /* * * foreach (MCvAvgComp j in noseDetectedNow[0]) * { * * Frame.Draw(j.rect, new Bgr(Color.Red), 2); * * } */ /* * foreach (MCvAvgComp j in mouthDetectedNow[0]) * { * Frame.Draw(j.rect, new Bgr(Color.Red), 2); * * } */ } liveCameraView.Image = Frame; names = ""; Users.Clear(); } catch (Exception ex) { //MessageBox.Show(ex.Message); } }