//bool _toContinue = true; public bool check_reg(byte[] ttp) { try { bool val = false; foreach (byte[] t in faceTemplates) { float similarity = 0.0f; byte[] t1 = t; FSDK.MatchFaces(ref ttp, ref t1, ref similarity); float threshold = 0.0f; FSDK.GetMatchingThresholdAtFAR(0.001f, ref threshold); // set FAR to 1%--0.1% if (similarity > threshold) { val = true; setV(t1); break; } Application.DoEvents(); } return(val); } catch (InvalidOperationException inov) { return(false); } }
private int recongnize(FaceTemplate template) { bool match = false; int i; for (i = 0; i < UserTemplates.Count; i++) { foreach (FaceTemplate t in UserTemplates[i]) { float Siminarity = 0.0f; FaceTemplate t1 = t; FSDK.MatchFaces(ref template.templateData, ref t1.templateData, ref Siminarity); float threashold = 0.0f; ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// FSDK.GetMatchingThresholdAtFAR(0.25f, ref threashold); //0.1表示程序误认为不同的人为检测对象的几率是0.1 该取值范围0~1 //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// if (Siminarity > threashold) { match = true; break; } } if (match) { return(i); } } return(-1); }
/// <summary> /// Xử Lý /// </summary> /// <param name="SearchFace"></param> public void xuLy(TFaceRecord SearchFace, int index) { float Threshold = 0.0f; FSDK.GetMatchingThresholdAtFAR(Form1.FARValue / 100, ref Threshold); int MatchedCount = 0; int FaceCount = Form2.FaceList.Count; float[] Similarities = new float[FaceCount]; int[] Numbers = new int[FaceCount]; for (int j = 0; j < SearchFace.Template.Count; j++) { for (int i = 0; i < Form2.FaceList.Count; i++) { float Similarity = 0.0f; TFaceRecord CurrentFace = Form2.FaceList[i]; byte[] anh1 = new byte[FSDK.TemplateSize]; byte[] anh2 = new byte[FSDK.TemplateSize]; anh1 = SearchFace.Template[j]; anh2 = Form2.FaceList[i].Template[0]; FSDK.MatchFaces(ref anh1, ref anh2, ref Similarity); if (Similarity >= Threshold) { SearchFace.Template[0] = SearchFace.Template[j]; Form2.FaceList.Add(SearchFace); imageList2.Images.Add(SearchFace.image.ToCLRImage()); items = new ListViewItem(); items.ImageIndex = demlv2++; items.Text = SearchFace.ImageFileName; items.ToolTipText = SearchFace.ImageFileName; items.Tag = SearchFace; listView2.Items.Add(items); // MessageBox.Show(index.ToString()+" "+AnhList.Count+" "+FaceListID.Count); AnhList[index].TrangThai = true;///sfsdfds return; ////Similarities[MatchedCount] = Similarity; ////Numbers[MatchedCount] = i; ////++MatchedCount; } } } }
public bool check_reg(byte[] ttp) { bool val = false; foreach (byte[] t in faceTemplates) { float similarity = 0.0f; byte[] t1 = t; FSDK.MatchFaces(ref ttp, ref t1, ref similarity); float threshold = 0.0f; FSDK.GetMatchingThresholdAtFAR(0.01f, ref threshold); if (similarity > threshold) { val = true; setV(t1); break; } } return(val); }
void Go(ref TFaceRecords SearchFace) { System.Drawing.Image img = SearchFace.faceImage.ToCLRImage(); float Threshold = 0.0f; FSDK.GetMatchingThresholdAtFAR(70 / 100, ref Threshold); int MatchedCount = 0; int FaceCount = FaceList.Count(); Double[] Similarities = new Double[FaceCount]; int[] Numbers = new int[FaceCount]; MessageBox.Show("Facelist = " + FaceList.Count); for (int i = 0; i <= FaceList.Count - 1; i++) { float Similarity = 0.0F; TFaceRecords CurrentFace = FaceList[i]; FSDK.MatchFaces(ref SearchFace.Template, ref CurrentFace.Template, ref Similarity); MessageBox.Show("Similarity = " + Similarity + "Threshold = " + Threshold); if (Similarity >= Threshold) { Similarities[MatchedCount] = Similarity; Numbers[MatchedCount] = i; MatchedCount += 1; } } if (MatchedCount == 0) { MessageBox.Show("No matches found.\nTry Again !!!" + MatchedCount, "No matches"); } else { MessageBox.Show("Staff Record found in database..."); } /* */ t.Abort(); // button1.Enabled = true; }
public void Go(TFaceRecord SearchFace) { Image img = SearchFace.image.ToCLRImage(); pictureBox1.Image = img; pictureBox1.Height = img.Height; pictureBox1.Width = img.Width; imageList1 = new ImageList(); Size size100x100 = new Size(); size100x100.Height = 100; size100x100.Width = 100; imageList1.ImageSize = size100x100; imageList1.ColorDepth = ColorDepth.Depth24Bit; listView1.OwnerDraw = false; listView1.View = View.LargeIcon; listView1.Dock = DockStyle.Bottom; listView1.LargeImageList = imageList1; label1.Dock = DockStyle.Bottom; float Threshold = 0.0f; FSDK.GetMatchingThresholdAtFAR(Form1.FARValue / 100, ref Threshold); int MatchedCount = 0; int FaceCount = Form1.FaceList.Count; float[] Similarities = new float[FaceCount]; int[] Numbers = new int[FaceCount]; for (int i = 0; i < Form1.FaceList.Count; i++) { float Similarity = 0.0f; TFaceRecord CurrentFace = Form1.FaceList[i]; FSDK.MatchFaces(ref SearchFace.Template, ref CurrentFace.Template, ref Similarity); if (Similarity >= Threshold) { Similarities[MatchedCount] = Similarity; Numbers[MatchedCount] = i; ++MatchedCount; } } if (MatchedCount == 0) { MessageBox.Show("No matches found. You can try to increase the FAR parameter in the Options dialog box.", "No matches"); } else { floatReverseComparer cmp = new floatReverseComparer(); Array.Sort(Similarities, Numbers, 0, MatchedCount, (IComparer <float>)cmp); label1.Text = "Faces Matched: " + MatchedCount.ToString(); for (int i = 0; i < MatchedCount; i++) { imageList1.Images.Add(Form1.FaceList[Numbers[i]].faceImage.ToCLRImage()); listView1.Items.Add((Similarities[i] * 100.0f).ToString(System.Globalization.CultureInfo.InvariantCulture.NumberFormat), Form1.FaceList[Numbers[i]].ImageFileName.Split('\\')[Form1.FaceList[Numbers[i]].ImageFileName.Split('\\').Length - 1] + "\r\nSimilarity = " + (Similarities[i] * 100).ToString(), imageList1.Images.Count - 1); } } this.Show(); }
private void button1_Click(object sender, EventArgs e) { this.button1.Enabled = false; int cameraHandle = 0; int r = FSDKCam.OpenVideoCamera(ref cameraName, ref cameraHandle); if (r != FSDK.FSDKE_OK) { MessageBox.Show("Error opening the first camera", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error); Application.Exit(); } // set realtime face detection parameters FSDK.SetFaceDetectionParameters(false, false, 100); FSDK.SetFaceDetectionThreshold(3); // list where we store face templates // faceTemplates = new List(); faceTemplates = new List <FaceTemplate>(); while (!needClose) { // faceTemplates.Clear(); Int32 imageHandle = 0; if (FSDK.FSDKE_OK != FSDKCam.GrabFrame(cameraHandle, ref imageHandle)) // grab the current frame from the camera { Application.DoEvents(); continue; } FSDK.CImage image = new FSDK.CImage(imageHandle); Image frameImage = image.ToCLRImage(); Graphics gr = Graphics.FromImage(frameImage); FSDK.TFacePosition facePosition = image.DetectFace(); // if a face is detected, we can recognize it if (facePosition.w != 0) { gr.DrawRectangle(Pens.LightGreen, facePosition.xc - facePosition.w / 2, facePosition.yc - facePosition.w / 2, facePosition.w, facePosition.w); // create a new face template FaceTemplate template = new FaceTemplate(); template.templateData = new byte[FSDK.TemplateSize]; FaceTemplate template1 = new FaceTemplate(); if (programState == ProgramState.psRemember || programState == ProgramState.psRecognize) { template.templateData = image.GetFaceTemplateInRegion(ref facePosition); } switch (programState) { case ProgramState.psNormal: // normal state - do nothing break; case ProgramState.psRemember: // Remember Me state - store facial templates label1.Text = "Templates stored: " + faceTemplates.Count.ToString(); faceTemplates.Add(template); if (faceTemplates.Count > 9) { // get the user name InputName inputName = new InputName(); inputName.ShowDialog(); userName = inputName.userName; cmd = new SqlCommand("insert into facetb values(@Name,@face)", con); cmd.Parameters.AddWithValue("@Name", userName); cmd.Parameters.AddWithValue("@face", template.templateData); con.Open(); cmd.ExecuteNonQuery(); con.Close(); MessageBox.Show("Record Save!"); programState = ProgramState.psRecognize; } break; case ProgramState.psRecognize: // recognize the user bool match = false; con.Open(); cmd = new SqlCommand("select * from facetb ORDER BY id ASC ", con); SqlDataReader dr = cmd.ExecuteReader(); while (dr.Read()) { template1.templateData = (byte[])dr["face"]; faceTemplates.Add(template1); strList.Add(dr["Name"].ToString()); } con.Close(); int ii = 0; foreach (FaceTemplate t in faceTemplates) { float similarity = 0.0f; FaceTemplate t1 = t; FSDK.MatchFaces(ref template.templateData, ref t1.templateData, ref similarity); float threshold = 0.0f; FSDK.GetMatchingThresholdAtFAR(0.01f, ref threshold); // set FAR to 1% if (similarity > threshold) { userName = strList[ii].ToString(); label3.Text = strList[ii].ToString(); match = true; break; } ii++; } con.Close(); if (match) { StringFormat format = new StringFormat(); format.Alignment = StringAlignment.Center; gr.DrawString(userName, new System.Drawing.Font("Arial", 16), new System.Drawing.SolidBrush(System.Drawing.Color.LightGreen), facePosition.xc, facePosition.yc + facePosition.w * 0.55f, format); // abc = 0; send(); } else { abc = 0; label3.Text = "UnKnow FACE"; } break; } } // display current frame pictureBox1.Image = frameImage; GC.Collect(); // collect the garbage after the deletion // make UI controls accessible Application.DoEvents(); } FSDKCam.CloseVideoCamera(cameraHandle); FSDKCam.FinalizeCapturing(); }
public FaceRecognitionResult MatchFace(Image <Bgr, byte> a_face) { try { if (a_face == null) { throw new ArgumentNullException(); } //CREATE A FACE OBJECT Face unknown_face = new Face(); unknown_face.face_position = new FacePosition(); unknown_face.facial_features = new FSDK.TPoint[FSDK.FSDK_FACIAL_FEATURE_COUNT]; unknown_face.face_template = new byte[FSDK.TemplateSize]; unknown_face.image = new FSDK.CImage(a_face.ToBitmap()); //GET THE POSITION OF THE FACE IN THE IAGE unknown_face.face_position = FacePosition.FromFSDK(unknown_face.image.DetectFace()); unknown_face.face_image = unknown_face.Clone().image; FSDK.TFacePosition face_pos = unknown_face.face_position.Clone(); //CHECK IF A FACE HAS BEEN DETECTED if (0 == face_pos.w) { face_pos = null; Debug.WriteLine("No Face Found"); return(face_recog_results); } try { FSDK.TFacePosition face_pos_1 = unknown_face.face_position.Clone(); //GET THE FACIAL FEATURES OF THE FACE LIKE EYES NOSE ETC unknown_face.facial_features = unknown_face.Clone().image.DetectEyesInRegion(ref face_pos_1); face_pos_1 = null; } catch (Exception) { } try { FSDK.TFacePosition face_pos_2 = unknown_face.face_position.Clone(); //GET A TEMPLATE OF THE FACE TO BE USED FOR COMPARISON unknown_face.face_template = unknown_face.Clone().image.GetFaceTemplateInRegion(ref face_pos_2); face_pos_2 = null; } catch (Exception) { } //THRESHOLD INDICATING HOW SIMILAR THE TWO FACS MUST BE TO BE CONSIDERED SAME float similarity_threshold = 0.0f; //TO DETERMINE IF THE MATCHED TEMPLATES BELONG TO THE SAME PERSON (WITH A SPECIFIED ERROR POSSIBILITY), //YOU CAN COMPARE THE FACIAL SIMILARITY VALUE WITH A THRESHOLD CALCULATED BY FSDK.GetMatchingThresholdAtFAR(FARValue / 100, ref similarity_threshold); //NUMBER OF MATCHES FOUND int matches_count = 0; //COUNT OF ALL FACES ENROLLED int faces_count = known_faces_list.Count; //HOLDS A FLOAT INDICATING HOW SIMILAR GIVEN FACE IS TO THAT IN THE SAME INDEX IN THE FACE_LIST float[] similarities = new float[faces_count]; int[] numbers = new int[faces_count]; List <KeyValuePair <Face, float> > face_to_similarity_map = new List <KeyValuePair <Face, float> >(); //LOOP THRU THE KNOWN FACES COMPARING EACH FACE WITH THE UNKNOWN FACE for (int i = 0; i < known_faces_list.Count; i++) { //VALUE INDICATING HOW SIMILAR THE 2 FACES ARE float similarity = 0.0f; //GET THE NEXT FACE IN THE FACE_LIST Face next_face = known_faces_list[i]; //GET TEMPLATES FOR BOTH THE UNKNOWN FACE AND NEXT FACE Byte[] unknown_face_template = unknown_face.face_template; Byte[] known_face_template = next_face.face_template; //COMPARE THE 2 FACES FOR SIMILARITY BETWEEN THEM FSDK.MatchFaces(ref unknown_face_template, ref known_face_template, ref similarity); unknown_face_template = null; known_face_template = null; if (similarity >= similarity_threshold) { similarities[matches_count] = similarity; numbers[matches_count] = i; face_to_similarity_map.Add(new KeyValuePair <Face, float>(next_face, similarity)); ++matches_count; } } //Dispose of Face //SORT THE SIMILARITIES IN DESCENDING ORDER INORDERTO FIND THE MOST SIMILAR FACE FloatsDescendingOrder floats_descending_order = new FloatsDescendingOrder(); Array.Sort(similarities, numbers, 0, matches_count, (IComparer <float>)floats_descending_order); //GET THE PERPETRATOR ASSOCIATED WITH THE FACE face_recog_results = new FaceRecognitionResult(); face_recog_results = GetOfMostSimilarFace(similarities, face_to_similarity_map); face_recog_results.original_detected_face = a_face; face_to_similarity_map = null; //RETURN RESULTS OF FACE RECOGNITION OPERATION return(face_recog_results); } catch (Exception e) { Debug.WriteLine(e.Message); } return(face_recog_results); }
private void matchesFace() { if (dataGridView1.Rows.Count > 0) { this.dataGridView1.Invoke(new MethodInvoker(() => this.dataGridView1.Rows.Clear())); this.dataGridView1.Invoke(new MethodInvoker(() => this.dataGridView1.Refresh())); } for (int i = 0; i < SubjectList.Count; i++) { if (SubjectList.Count >= 1) { FSDK.GetMatchingThresholdAtFAR(FARValue / 100, ref FaceDetectionThreshold); TFaceRecord DbSubject = SubjectList[i]; int MatchedCount = 0; int FaceCount = FaceSearchList.Count; float[] Similarities = new float[FaceCount]; float[] Smile = new float[FaceCount]; float[] EyesOpen = new float[FaceCount]; float[] Male = new float[FaceCount]; float[] Female = new float[FaceCount]; int[] Numbers = new int[FaceCount]; for (int k = 0; k < FaceSearchList.Count; k++) { float Similarity = 0.0f; float ConfidenceSmile = 0.0f; float ConfidenceEyesOpen = 0.0f; float ConfidenceMale = 0.0f; float ConfidenceFemale = 0.0f; TFaceRecord SearchFace = FaceSearchList[k]; FSDK.MatchFaces(ref DbSubject.Template, ref SearchFace.Template, ref Similarity); long MaxSizeInBytes = 100000; string ExpressionValues = ""; string GenderValues = ""; FSDK.CImage CurrentImage = SearchFace.image; FSDK.TPoint[] Facefeatures = null; FSDK.DetectFacialFeatures(SearchFace.faceImage.ImageHandle, out Facefeatures); if (Facefeatures != null) { FSDK.DetectFacialAttributeUsingFeatures(SearchFace.faceImage.ImageHandle, ref Facefeatures, "Expression", out ExpressionValues, MaxSizeInBytes); FSDK.GetValueConfidence(ExpressionValues, "Smile", ref ConfidenceSmile); FSDK.GetValueConfidence(ExpressionValues, "EyesOpen", ref ConfidenceEyesOpen); FSDK.DetectFacialAttributeUsingFeatures(SearchFace.faceImage.ImageHandle, ref Facefeatures, "Gender", out GenderValues, MaxSizeInBytes); FSDK.GetValueConfidence(GenderValues, "Male", ref ConfidenceMale); FSDK.GetValueConfidence(GenderValues, "Female", ref ConfidenceFemale); if (Similarity >= FaceDetectionThreshold) { Similarities[MatchedCount] = Similarity; Smile[MatchedCount] = ConfidenceSmile; EyesOpen[MatchedCount] = ConfidenceEyesOpen; Male[MatchedCount] = ConfidenceMale; Female[MatchedCount] = ConfidenceFemale; Numbers[MatchedCount] = k; ++MatchedCount; } } else { if (Similarity >= FaceDetectionThreshold) { Similarities[MatchedCount] = Similarity; Smile[MatchedCount] = 0; EyesOpen[MatchedCount] = 0; Male[MatchedCount] = 0; Female[MatchedCount] = 0; Numbers[MatchedCount] = k; ++MatchedCount; } } } if (MatchedCount == 0) { MessageBox.Show("No matches found. You can try to increase the FAR parameter in the Options dialog box.", "No matches"); } else { for (int j = 0; j < MatchedCount; j++) { if ((Similarities[j] * 100.0f) >= 30.0f) { resultImagelist.Images.Add(FaceSearchList[j].faceImage.ToCLRImage()); Image img1 = FaceSearchList[Numbers[j]].faceImage.ToCLRImage(); img1 = (Image)(new Bitmap(img1, new Size(100, 100))); Image img2 = Image.FromFile(SubjectList[i].ImageFileName); img2 = (Image)(new Bitmap(img2, new Size(100, 100))); string feature = DbSubject.suspectName + " \r\n\nSimilarity = " + (Similarities[j] * 100).ToString() + " Smile:" + Smile[j] * 100 + " Eyes Open: " + EyesOpen[j] * 100 + " Male:" + Male[j] * 100 + " Female: " + Female[j] * 100; Object[] row = new Object[] { img1, img2, feature }; this.dataGridView1.Invoke(new MethodInvoker(() => this.dataGridView1.Rows.Add(row))); } } } } } }
private void handleClients(object threadId) { int clientId = (int)threadId; TcpClient tcpClient = tcpClients[clientId]; string remoteEndPoint = tcpClient.Client.RemoteEndPoint.ToString(); string clientIp = remoteEndPoint.Substring(0, remoteEndPoint.IndexOf(":")); string clientPort = remoteEndPoint.Substring(remoteEndPoint.IndexOf(":") + 1); add("Client " + remoteEndPoint + " connected"); //Client client = new Client("Client " + clientIp + " @ " + clientPort); //client = new Client("Client " + clientIp + " @ " + clientPort); // this.BeginInvoke((Action)(() => client.Show())); NetworkStream stream = tcpClient.GetStream(); string clientData = string.Empty; Byte[] bytes = new Byte[256]; int i; bool init = false; try { while ((i = stream.Read(bytes, 0, bytes.Length)) != 0) { if (!init) { clientData += Encoding.ASCII.GetString(bytes, 0, i); } if (clientData.Contains("<EOF>") && !init) { init = true; string result = clientData.Substring(0, clientData.IndexOf("<EOF>")); clientData = string.Empty; byte[] imageBytes = Convert.FromBase64String(result); MemoryStream ms = new MemoryStream(imageBytes, 0, imageBytes.Length); ms.Position = 0; Image img = Image.FromStream(ms); //if (client == null || client.IsDisposed) //{ // client = new Client("Client " + clientIp + " @ " + clientPort); // this.BeginInvoke((Action)(() => client.Show())); //} //this.BeginInvoke((Action)(() => client.label1.Visible = false)); //this.BeginInvoke((Action)(() => client.pictureBox1.Image = img)); if (FaceList.Count == 0) { add("Please enroll faces first"); init = false; } else { TFaceRecord fr = new TFaceRecord(); fr.FacePosition = new FSDK.TFacePosition(); fr.FacialFeatures = new FSDK.TPoint[FSDK.FSDK_FACIAL_FEATURE_COUNT]; fr.Template = new byte[FSDK.TemplateSize]; fr.image = new FSDK.CImage(img); //img.Dispose(); fr.FacePosition = fr.image.DetectFace(); if (0 == fr.FacePosition.w) { add("No faces found. Try to lower the Minimal Face Quality parameter in the Options dialog box."); init = false; } else { fr.faceImage = fr.image.CopyRect((int)(fr.FacePosition.xc - Math.Round(fr.FacePosition.w * 0.5)), (int)(fr.FacePosition.yc - Math.Round(fr.FacePosition.w * 0.5)), (int)(fr.FacePosition.xc + Math.Round(fr.FacePosition.w * 0.5)), (int)(fr.FacePosition.yc + Math.Round(fr.FacePosition.w * 0.5))); bool eyesDetected = false; try { fr.FacialFeatures = fr.image.DetectEyesInRegion(ref fr.FacePosition); eyesDetected = true; } catch { add("Detecting eyes failed."); init = false; } if (eyesDetected) { fr.Template = fr.image.GetFaceTemplateInRegion(ref fr.FacePosition); // get template with higher precision float Threshold = 0.0f; FSDK.GetMatchingThresholdAtFAR(FARValue / 100, ref Threshold); int MatchedCount = 0; int FaceCount = FaceList.Count; float[] Similarities = new float[FaceCount]; for (int x = 0; x < FaceList.Count; x++) { float Similarity = 0.0f; TFaceRecord CurrentFace = FaceList[x]; FSDK.MatchFaces(ref fr.Template, ref CurrentFace.Template, ref Similarity); if (Similarity >= Threshold) { Similarities[MatchedCount] = Similarity; ++MatchedCount; } } if (MatchedCount != 0) { float finalResult = Similarities.Max(); if (finalResult * 100 < 95.0) { SendBackToClient(tcpClient, "R=NONE;"); add("No morethan 95.0% matches found in database faces"); //init = false; } else { int index = 0; for (int x = 0; x < MatchedCount; x++) { if (Similarities[x] == finalResult) { index = x; } } SendBackToClient(tcpClient, "R=" + FaceName[index]); //init = false; } } else { SendBackToClient(tcpClient, "R=NONE;"); add("No matches found. You can try to increase the FAR parameter in the Options dialog box."); //init = false; } } else { add("No eyes detected in photos."); init = false; } } } } else { if (clientData.Contains(";")) { string result = clientData.Substring(0, clientData.IndexOf(";")); if (result == "OK") { init = false; } } } } tcpClients.Remove(clientId); tcpClient.Client.Shutdown(SocketShutdown.Both); tcpClient.Client.Close(); tcpClient.Client.Dispose(); int idx = 0; for (int x = 0; x < threadClients.Count(); x++) { if (threadClients[x].ManagedThreadId == clientId) { idx = x; } } threadClients.RemoveAt(idx); add("Client " + remoteEndPoint + " disconnected"); //if (client.Visible) //{ // this.BeginInvoke((Action)(() => client.Close())); //} } catch { add("Client " + remoteEndPoint + " error"); } }