private void loadSubject(string fileName) { FSDK.SetFaceDetectionParameters(false, true, 384); FSDK.SetFaceDetectionThreshold((int)FaceDetectionThreshold); TFaceRecord fr = new TFaceRecord(); fr.ImageFileName = fileName; fr.FacePosition = new FSDK.TFacePosition(); fr.FacialFeatures = new FSDK.TPoint[2]; fr.Template = new byte[FSDK.TemplateSize]; fr.image = new FSDK.CImage(fileName); fr.FacePosition = fr.image.DetectFace(); if (0 == fr.FacePosition.w) { if (fileName.Length <= 1) { MessageBox.Show("No faces found. Try to lower the Minimal Face Quality parameter in the Options dialog box.", "Enrollment error"); } } else { fr.faceImage = fr.image.CopyRect((int)(fr.FacePosition.xc - Math.Round(fr.FacePosition.w * 0.5)), (int)(fr.FacePosition.yc - Math.Round(fr.FacePosition.w * 0.5)), (int)(fr.FacePosition.xc + Math.Round(fr.FacePosition.w * 0.5)), (int)(fr.FacePosition.yc + Math.Round(fr.FacePosition.w * 0.5))); try { fr.FacialFeatures = fr.image.DetectEyesInRegion(ref fr.FacePosition); } catch (Exception ex2) { MessageBox.Show(ex2.Message, "Error detecting eyes."); } try { fr.Template = fr.image.GetFaceTemplateInRegion(ref fr.FacePosition); // get template with higher precision } catch (Exception ex2) { MessageBox.Show(ex2.Message, "Error retrieving face template."); } FaceSearchList.Add(fr); FaceSearchImageList.Images.Add(fr.faceImage.ToCLRImage()); listView1.Items.Add((FaceSearchImageList.Images.Count - 1).ToString(), fileName.Split('\\')[fileName.Split('\\').Length - 1], FaceSearchImageList.Images.Count - 1); using (Image img = fr.image.ToCLRImage()) { pictureBox1.Image = img; pictureBox1.Refresh(); } } pictureBox1.Image = null; }
public void SaveSubject(TFaceRecord fr, string conString) { using (conn = new SqlCeConnection(conString)) { conn.Open(); TFaceRecord tfr = new TFaceRecord(); tfr = fr; Image img = null; Image img_face = null; MemoryStream strm = new MemoryStream(); MemoryStream strm_face = new MemoryStream(); img = tfr.image.ToCLRImage(); img_face = tfr.faceImage.ToCLRImage(); img.Save(strm, System.Drawing.Imaging.ImageFormat.Jpeg); img_face.Save(strm_face, System.Drawing.Imaging.ImageFormat.Jpeg); byte[] img_array = new byte[strm.Length]; byte[] img_face_array = new byte[strm_face.Length]; strm.Position = 0; strm.Read(img_array, 0, img_array.Length); strm_face.Position = 0; strm_face.Read(img_face_array, 0, img_face_array.Length); using (cmd = new SqlCeCommand("insert into FaceList (ImageFileName,SubjectName,FacePositionXc,FacePositionYc" + ",FacePositionW,FacePositionAngle,Eye1X,Eye1Y,Eye2X,Eye2Y,Template,Image,FaceImage) values " + "(@IFName,@SName,@FPXc,@FPYc,@FPW,@FPA,@Eye1X,@Eye1Y,@Eye2X,@Eye2Y,@Template,@Image,@FaceImage)", conn)) { cmd.Parameters.AddWithValue(@"IFName", tfr.ImageFileName); cmd.Parameters.AddWithValue(@"SName", tfr.suspectName); cmd.Parameters.AddWithValue(@"FPXc", tfr.FacePosition.xc); cmd.Parameters.AddWithValue(@"FPYc", tfr.FacePosition.yc); cmd.Parameters.AddWithValue(@"FPW", tfr.FacePosition.w); cmd.Parameters.AddWithValue(@"FPA", tfr.FacePosition.angle); cmd.Parameters.AddWithValue(@"Eye1X", tfr.FacialFeatures[0].x); cmd.Parameters.AddWithValue(@"Eye1Y", tfr.FacialFeatures[0].y); cmd.Parameters.AddWithValue(@"Eye2X", tfr.FacialFeatures[1].x); cmd.Parameters.AddWithValue(@"Eye2Y", tfr.FacialFeatures[1].y); cmd.Parameters.AddWithValue(@"Template", tfr.Template); cmd.Parameters.AddWithValue(@"Image", img_array); cmd.Parameters.AddWithValue(@"FaceImage", img_face_array); cmd.ExecuteNonQuery(); } conn.Close(); } MessageBox.Show(" Subject added successfully !!"); }
private void button2_Click(object sender, EventArgs e) { if (textBox1.Text.Length == 0) { MessageBox.Show("Please Enter the Subject Name"); textBox1.Focus(); } else { OpenFileDialog dlg = new OpenFileDialog(); dlg.Filter = "JPEG (*.jpg)|*.jpg|Windows bitmap (*.bmp)|*.bmp|All files|*.*"; dlg.Multiselect = true; if (dlg.ShowDialog() == DialogResult.OK) { try { FSDK.SetFaceDetectionParameters(false, true, 384); FSDK.SetFaceDetectionThreshold((int)FaceDetectionThreshold); foreach (string fn in dlg.FileNames) { FaceList.Clear(); TFaceRecord fr = new TFaceRecord(); fr.ImageFileName = fn; fr.suspectName = textBox1.Text.Trim(); fr.FacePosition = new FSDK.TFacePosition(); fr.FacialFeatures = new FSDK.TPoint[2]; fr.Template = new byte[FSDK.TemplateSize]; fr.image = new FSDK.CImage(fn); fr.FacePosition = fr.image.DetectFace(); if (0 == fr.FacePosition.w) { if (dlg.FileNames.Length <= 1) { MessageBox.Show("No faces found. Try to lower the Minimal Face Quality parameter in the Options dialog box.", "Enrollment error"); } else { } } else { fr.faceImage = fr.image.CopyRect((int)(fr.FacePosition.xc - Math.Round(fr.FacePosition.w * 0.5)), (int)(fr.FacePosition.yc - Math.Round(fr.FacePosition.w * 0.5)), (int)(fr.FacePosition.xc + Math.Round(fr.FacePosition.w * 0.5)), (int)(fr.FacePosition.yc + Math.Round(fr.FacePosition.w * 0.5))); fr.FacialFeatures = fr.image.DetectEyesInRegion(ref fr.FacePosition); fr.Template = fr.image.GetFaceTemplateInRegion(ref fr.FacePosition); // get template with higher precision FaceList.Add(fr); imageList1.Images.Add(fr.faceImage.ToCLRImage()); listView1.Items.Add((imageList1.Images.Count - 1).ToString(), fn.Split('\\')[fn.Split('\\').Length - 1], imageList1.Images.Count - 1); } listView1.SelectedIndices.Clear(); // listView1.SelectedIndices.Add(listView1.Items.Count - 1); db.SaveSubject(fr, Constants.conString); } listView1.Refresh(); } catch (Exception ex) { MessageBox.Show("Can't open image(s) with error: " + ex.Message.ToString(), "Error"); } } } }
private void matchesFace() { if (dataGridView1.Rows.Count > 0) { this.dataGridView1.Invoke(new MethodInvoker(() => this.dataGridView1.Rows.Clear())); this.dataGridView1.Invoke(new MethodInvoker(() => this.dataGridView1.Refresh())); } for (int i = 0; i < SubjectList.Count; i++) { if (SubjectList.Count >= 1) { FSDK.GetMatchingThresholdAtFAR(FARValue / 100, ref FaceDetectionThreshold); TFaceRecord DbSubject = SubjectList[i]; int MatchedCount = 0; int FaceCount = FaceSearchList.Count; float[] Similarities = new float[FaceCount]; float[] Smile = new float[FaceCount]; float[] EyesOpen = new float[FaceCount]; float[] Male = new float[FaceCount]; float[] Female = new float[FaceCount]; int[] Numbers = new int[FaceCount]; for (int k = 0; k < FaceSearchList.Count; k++) { float Similarity = 0.0f; float ConfidenceSmile = 0.0f; float ConfidenceEyesOpen = 0.0f; float ConfidenceMale = 0.0f; float ConfidenceFemale = 0.0f; TFaceRecord SearchFace = FaceSearchList[k]; FSDK.MatchFaces(ref DbSubject.Template, ref SearchFace.Template, ref Similarity); long MaxSizeInBytes = 100000; string ExpressionValues = ""; string GenderValues = ""; FSDK.CImage CurrentImage = SearchFace.image; FSDK.TPoint[] Facefeatures = null; FSDK.DetectFacialFeatures(SearchFace.faceImage.ImageHandle, out Facefeatures); if (Facefeatures != null) { FSDK.DetectFacialAttributeUsingFeatures(SearchFace.faceImage.ImageHandle, ref Facefeatures, "Expression", out ExpressionValues, MaxSizeInBytes); FSDK.GetValueConfidence(ExpressionValues, "Smile", ref ConfidenceSmile); FSDK.GetValueConfidence(ExpressionValues, "EyesOpen", ref ConfidenceEyesOpen); FSDK.DetectFacialAttributeUsingFeatures(SearchFace.faceImage.ImageHandle, ref Facefeatures, "Gender", out GenderValues, MaxSizeInBytes); FSDK.GetValueConfidence(GenderValues, "Male", ref ConfidenceMale); FSDK.GetValueConfidence(GenderValues, "Female", ref ConfidenceFemale); if (Similarity >= FaceDetectionThreshold) { Similarities[MatchedCount] = Similarity; Smile[MatchedCount] = ConfidenceSmile; EyesOpen[MatchedCount] = ConfidenceEyesOpen; Male[MatchedCount] = ConfidenceMale; Female[MatchedCount] = ConfidenceFemale; Numbers[MatchedCount] = k; ++MatchedCount; } } else { if (Similarity >= FaceDetectionThreshold) { Similarities[MatchedCount] = Similarity; Smile[MatchedCount] = 0; EyesOpen[MatchedCount] = 0; Male[MatchedCount] = 0; Female[MatchedCount] = 0; Numbers[MatchedCount] = k; ++MatchedCount; } } } if (MatchedCount == 0) { MessageBox.Show("No matches found. You can try to increase the FAR parameter in the Options dialog box.", "No matches"); } else { for (int j = 0; j < MatchedCount; j++) { if ((Similarities[j] * 100.0f) >= 30.0f) { resultImagelist.Images.Add(FaceSearchList[j].faceImage.ToCLRImage()); Image img1 = FaceSearchList[Numbers[j]].faceImage.ToCLRImage(); img1 = (Image)(new Bitmap(img1, new Size(100, 100))); Image img2 = Image.FromFile(SubjectList[i].ImageFileName); img2 = (Image)(new Bitmap(img2, new Size(100, 100))); string feature = DbSubject.suspectName + " \r\n\nSimilarity = " + (Similarities[j] * 100).ToString() + " Smile:" + Smile[j] * 100 + " Eyes Open: " + EyesOpen[j] * 100 + " Male:" + Male[j] * 100 + " Female: " + Female[j] * 100; Object[] row = new Object[] { img1, img2, feature }; this.dataGridView1.Invoke(new MethodInvoker(() => this.dataGridView1.Rows.Add(row))); } } } } } }
private void loadVideo(string fileName) { if (FaceSearchList.Count > 0) { FaceSearchList.Clear(); FaceSearchImageList.Images.Clear(); listView1.Clear(); } string username = Environment.UserName; string framePath = @"C:\Users\" + username + @"\Desktop\Frames"; if (!Directory.Exists(framePath)) { Directory.CreateDirectory(framePath); } FFMpegConverter ffmpeg = new FFMpegConverter(); TFaceRecord fr = new TFaceRecord(); for (int i = 0; i < Duration(fileName); i++) { string fn = framePath + @"\" + i + ".jpeg"; ffmpeg.GetVideoThumbnail(fileName.ToString(), fn, i); fr.ImageFileName = fn; fr.FacePosition = new FSDK.TFacePosition(); fr.FacialFeatures = new FSDK.TPoint[2]; fr.Template = new byte[FSDK.TemplateSize]; fr.image = new FSDK.CImage(fn); fr.FacePosition = fr.image.DetectFace(); if (0 == fr.FacePosition.w) { if (fileName.Length <= 1) { } } else { fr.faceImage = fr.image.CopyRect((int)(fr.FacePosition.xc - Math.Round(fr.FacePosition.w * 0.5)), (int)(fr.FacePosition.yc - Math.Round(fr.FacePosition.w * 0.5)), (int)(fr.FacePosition.xc + Math.Round(fr.FacePosition.w * 0.5)), (int)(fr.FacePosition.yc + Math.Round(fr.FacePosition.w * 0.5))); try { fr.FacialFeatures = fr.image.DetectEyesInRegion(ref fr.FacePosition); } catch (Exception ex2) { MessageBox.Show(ex2.Message, "Error detecting eyes."); } try { fr.Template = fr.image.GetFaceTemplateInRegion(ref fr.FacePosition); // get template with higher precision } catch (Exception ex2) { MessageBox.Show(ex2.Message, "Error retrieving face template."); } FaceSearchList.Add(fr); FaceSearchImageList.Images.Add(fr.faceImage.ToCLRImage()); listView1.Items.Add((FaceSearchImageList.Images.Count - 1).ToString(), fn.Split('\\')[fn.Split('\\').Length - 1], FaceSearchImageList.Images.Count - 1); using (Image img = fr.image.ToCLRImage()) { pictureBox1.Image = img; pictureBox1.Refresh(); } listView1.Refresh(); } } pictureBox1.Image = null; }
private void loadSubjectFolder(string folderName) { FaceSearchList.Clear(); string DirectoryUrl = folderName; DirectoryInfo di = new DirectoryInfo(DirectoryUrl); FileInfo[] Images = di.GetFiles("*.jpg", SearchOption.AllDirectories); for (int i = 0; i < Images.Length; i++) { string fn = Images[i].FullName; TFaceRecord fr = new TFaceRecord(); fr.ImageFileName = fn; fr.FacePosition = new FSDK.TFacePosition(); fr.FacialFeatures = new FSDK.TPoint[FSDK.FSDK_FACIAL_FEATURE_COUNT]; fr.Template = new byte[FSDK.TemplateSize]; try { fr.image = new FSDK.CImage(fn); } catch (Exception ex) { MessageBox.Show(ex.Message, "Error loading file"); } fr.FacePosition = fr.image.DetectFace(); if (0 == fr.FacePosition.w) { } else { fr.faceImage = fr.image.CopyRect((int)(fr.FacePosition.xc - Math.Round(fr.FacePosition.w * 0.5)), (int)(fr.FacePosition.yc - Math.Round(fr.FacePosition.w * 0.5)), (int)(fr.FacePosition.xc + Math.Round(fr.FacePosition.w * 0.5)), (int)(fr.FacePosition.yc + Math.Round(fr.FacePosition.w * 0.5))); bool eyesDetected = false; try { fr.FacialFeatures = fr.image.DetectEyesInRegion(ref fr.FacePosition); eyesDetected = true; } catch (Exception ex) { MessageBox.Show(ex.Message, "Error detecting eyes."); } if (eyesDetected) { fr.Template = fr.image.GetFaceTemplateInRegion(ref fr.FacePosition); // get template with higher precision } FaceSearchList.Add(fr); FaceSearchImageList.Images.Add(fr.faceImage.ToCLRImage()); listView1.Items.Add((FaceSearchImageList.Images.Count - 1).ToString(), folderName.Split('\\')[folderName.Split('\\').Length - 1], FaceSearchImageList.Images.Count - 1); using (Image img = fr.image.ToCLRImage()) { pictureBox1.Image = img; pictureBox1.Refresh(); listView1.Refresh(); } } } pictureBox1.Image = null; }
public List <TFaceRecord> LoadSubject(string conString) { InitializeSDK(); SubjectImageList = new ImageList(); List <TFaceRecord> SubjectList = new List <TFaceRecord>(); try { using (conn = new SqlCeConnection(conString)) { conn.Open(); using (cmd = new SqlCeCommand(@"Select * From FaceList", conn)) { SqlCeDataReader reader = cmd.ExecuteReader(); while (reader.Read()) { TFaceRecord fr = new TFaceRecord(); fr.ImageFileName = reader.GetString(0); fr.suspectName = reader.GetString(1); fr.FacePosition = new FSDK.TFacePosition(); fr.FacePosition.xc = reader.GetInt32(2); fr.FacePosition.yc = reader.GetInt32(3); fr.FacePosition.w = reader.GetInt32(4); fr.FacePosition.angle = reader.GetFloat(5); fr.FacialFeatures = new FSDK.TPoint[2]; fr.FacialFeatures[0] = new FSDK.TPoint(); fr.FacialFeatures[0].x = reader.GetInt32(6); fr.FacialFeatures[0].y = reader.GetInt32(7); fr.FacialFeatures[1] = new FSDK.TPoint(); fr.FacialFeatures[1].x = reader.GetInt32(8); fr.FacialFeatures[1].y = reader.GetInt32(9); fr.Template = new byte[FSDK.TemplateSize]; reader.GetBytes(10, 0, fr.Template, 0, FSDK.TemplateSize); Image img = Image.FromStream(new System.IO.MemoryStream(reader.GetSqlBinary(11).Value)); Image img_face = Image.FromStream(new System.IO.MemoryStream(reader.GetSqlBinary(12).Value)); fr.image = new FSDK.CImage(img); fr.faceImage = new FSDK.CImage(img_face); SubjectList.Add(fr); SubjectImageList.Images.Add(fr.faceImage.ToCLRImage()); img.Dispose(); img_face.Dispose(); } } conn.Close(); } } catch (Exception ex) { MessageBox.Show(ex.Message, "Exception on loading database"); } return(SubjectList); }