internal Bitmap drawFaceRectangle(NleDetectionDetails thisFace, Bitmap myImage, Pen p) { g = Graphics.FromImage(myImage); g.DrawRectangle(p, thisFace.Face.Rectangle.X, thisFace.Face.Rectangle.Y, thisFace.Face.Rectangle.Width, thisFace.Face.Rectangle.Height); g.Dispose(); return(myImage); }
internal Bitmap drawInsetRectangle(NleDetectionDetails thisFace, Bitmap myImage, Pen p) { // This is a duplicate of drawFaceRectangle() above. It is here because of occational crossthreading conflicts. (needs to be an async method) g = Graphics.FromImage(myImage); g.DrawRectangle(p, thisFace.Face.Rectangle.X, thisFace.Face.Rectangle.Y, thisFace.Face.Rectangle.Width, thisFace.Face.Rectangle.Height); g.Dispose(); return(myImage); }
internal Bitmap snipFace(Bitmap myImage, NleDetectionDetails nleDetectionDetails) { int _width = (int)(nleDetectionDetails.Face.Rectangle.Width * 2); int _height = (int)(nleDetectionDetails.Face.Rectangle.Height * 2); Bitmap snippedImage = new Bitmap(_width, _height); g = Graphics.FromImage(snippedImage); Rectangle destination = new Rectangle(0, 0, _width, _height); Rectangle source = new Rectangle((nleDetectionDetails.Face.Rectangle.X - _width / 4), (nleDetectionDetails.Face.Rectangle.Y - _height / 4), _width, _height); g.DrawImage(myImage, destination, source, GraphicsUnit.Pixel); g.Dispose(); return(snippedImage); }
private bool createTemplate(Bitmap enrollmentBmp, bool largeTemplate, out NleDetectionDetails detDetails) { NLExtractor templateExtractor = new NLExtractor(); if (largeTemplate) { templateExtractor.TemplateSize = NleTemplateSize.Large; } else { templateExtractor.TemplateSize = NleTemplateSize.Medium; } NImage enrollmentImage = NImage.FromBitmap(enrollmentBmp); NGrayscaleImage enrollmentGrayscale = enrollmentImage.ToGrayscale(); NleDetectionDetails _detDetails = null; try { verifyLicense(); NleExtractionStatus extractionStatus; facialTemplate = templateExtractor.Extract(enrollmentGrayscale, out _detDetails, out extractionStatus); if (extractionStatus != NleExtractionStatus.TemplateCreated) { MessageBox.Show("Face Template Extraction Failed!\nPlease try again.\n" + extractionStatus.ToString()); detDetails = _detDetails; return(false); } else { detDetails = _detDetails; return(true); } } catch (Exception ex) { MessageBox.Show("" + ex); detDetails = null; return(false); } finally { NLicense.ReleaseComponents(Components); if (templateExtractor != null) { templateExtractor.Dispose(); } } }
private NleDetectionDetails detectDetails() { NGrayscaleImage grayImage = NImage.FromBitmap(globalInsetFaceBmp).ToGrayscale(); NleFace _faceInImage; NLExtractor myExtractor = new NLExtractor(); NleDetectionDetails _detectionDetails = new NleDetectionDetails(); if (myExtractor.DetectAllFeaturePoints == false) { myExtractor.DetectAllFeaturePoints = true; } if (grayImage == null || !myExtractor.DetectFace(grayImage, out _faceInImage)) { _detectionDetails = null; return(_detectionDetails); } _detectionDetails = myExtractor.DetectFacialFeatures(grayImage, _faceInImage); return(_detectionDetails); }
protected void Button1_Click(object sender, EventArgs e) { // System.Windows.Forms.OpenFileDialog openFaceImageDlg = new OpenFileDialog(); // openFaceImageDlg.ShowDialog(); string savePath = Server.MapPath("~/image/");//指定上传文件在服务器上的保存路径; if (FileUpload1.PostedFile.FileName != null) { if (FileUpload1.HasFile) { //检查服务器上是否存在这个物理路径,如果不存在则创建 if (!System.IO.Directory.Exists(savePath)) { System.IO.Directory.CreateDirectory(savePath); } savePath = savePath + "\\" + FileUpload1.FileName; FileUpload1.SaveAs(savePath); } } // openFaceImageDlg.Filter = NImages.GetOpenFileFilterString(true, true); // if (openFaceImageDlg.ShowDialog() == DialogResult.OK) // { // if (_image != null) _image.Dispose(); // _image = null; // try // { // TextBox1.Text = FileUpload1.PostedFile.FileName; // Read image _image = NImage.FromFile(savePath); // DetectFace(_image); // } // catch (Exception ex) // { // Utils.ShowException(ex); // } NleFace nlef; nle.DetectFace(_image.ToGrayscale(), out nlef); nle.DetectAllFeaturePoints = true; nle.DetectBlink = true; nle.DetectEmotion = true; nle.DetectExpression = true; nle.DetectGender = true; nle.DetectGlasses = true; nle.DetectDarkGlasses = true; nle.DetectMouthOpen = true; nle.MaxRollAngleDeviation = short.Parse(cbRollAngle.SelectedValue); nle.MaxYawAngleDeviation = short.Parse(cbYawAngle.SelectedValue); string Blink = ""; string Emotion = ""; string Expression = ""; string Gender = ""; string Glasses = ""; string Mouth = ""; NleDetectionDetails detail = nle.DetectFacialFeatures(_image.ToGrayscale(), nlef); NleDetectionDetails detail2; NleExtractionStatus Status; nle.Extract(_image.ToGrayscale(), out detail2, out Status); List <NLFeaturePoint> points = new List <NLFeaturePoint>(); points.Add(detail.LeftEyeCenter); points.Add(detail.MouthCenter); points.Add(detail.RightEyeCenter); for (int i = 0; i < detail.Points.Length; i++) { points.Add(detail.Points[i]); } Bitmap bit = new Bitmap(savePath); Graphics g = Graphics.FromImage(bit); Brush b = new SolidBrush(Color.Green); Pen p = new Pen(b); for (int i = 0; i < points.Count; i++) { g.DrawRectangle(p, points.ElementAt(i).X - 2.5f, points.ElementAt(i).Y - 2.5f, 5, 5); } string dirpath = "D:/img2.JPG"; if (System.IO.File.Exists(dirpath) == true) { System.IO.File.Delete(dirpath); } bit.Save(dirpath, System.Drawing.Imaging.ImageFormat.Jpeg); if (detail2.EmotionAngerConfidence > 50 && detail2.EmotionAngerConfidence != 254 && detail2.EmotionAngerConfidence != 255) { Emotion += " Anger "; } if (detail2.EmotionDisgustConfidence > 50 && detail2.EmotionDisgustConfidence != 254 && detail2.EmotionDisgustConfidence != 255) { Emotion += " Disgust "; } if (detail2.EmotionFearConfidence > 50 && detail2.EmotionFearConfidence != 254 && detail2.EmotionFearConfidence != 255) { Emotion += " Fear "; } if (detail2.EmotionHappinessConfidence > 50 && detail2.EmotionHappinessConfidence != 254 && detail2.EmotionHappinessConfidence != 255) { Emotion += " Happyness "; } if (detail2.EmotionNeutralConfidence > 50 && detail2.EmotionNeutralConfidence != 254 && detail2.EmotionNeutralConfidence != 255) { Emotion += " Netral "; } if (detail2.EmotionSadnessConfidence > 50 && detail2.EmotionSadnessConfidence != 254 && detail2.EmotionSadnessConfidence != 255) { Emotion += " Sadness "; } if (detail2.EmotionSurpriseConfidence > 50 && detail2.EmotionSurpriseConfidence != 254 && detail2.EmotionSurpriseConfidence != 255) { Emotion += " Surprise "; } Expression += detail2.Expression.ToString(); if (detail2.DarkGlassesConfidence != 254 && detail2.DarkGlassesConfidence != 255) { if (detail2.DarkGlassesConfidence > 50) { Glasses += " wearing glasses "; } else { Glasses += " not wearing glasses "; } } if (detail2.BlinkConfidence != 254 && detail2.BlinkConfidence != 255) { if (detail2.BlinkConfidence > 50) { Blink += " Eye open "; } else { Blink += " Eye close "; } } Gender += detail2.Gender.ToString(); if (detail2.MouthOpenConfidence != 254 && detail2.MouthOpenConfidence != 255) { if (detail2.MouthOpenConfidence < 51) { Mouth += " Mouth close "; } else { Mouth += " Mouth open "; } } //Response.Write("<script>alert('"+Blink+Emotion+Expression+Glasses+Gender+Mouth+"')</script>"); // Response.Write("<script>alert('" + detail2.BlinkConfidence + Emotion + Expression + Gender + detail2.MouthOpenConfidence + "')</script>"); DataSet dst = new DataSet(); dst.DataSetName = "result"; DataTable dt = new DataTable(); dst.Tables.Add(dt); dt.Columns.Add("blink"); dt.Columns.Add("emotion"); dt.Columns.Add("expression"); dt.Columns.Add("glasses"); dt.Columns.Add("gender"); dt.Columns.Add("mouth"); DataRow row1 = dt.NewRow(); row1["blink"] = Blink; row1["emotion"] = Emotion; row1["expression"] = Expression; row1["glasses"] = detail2.DarkGlassesConfidence; row1["gender"] = Gender; row1["mouth"] = Mouth; dt.Rows.Add(row1); // dst.Tables.Add(dt); Session["result"] = dst; ListView1.DataSource = dst; ListView1.DataBind(); _image.Dispose(); bit.Dispose(); g.Dispose(); }
private void backgroundWorker_DoWork(object sender, DoWorkEventArgs e) { bool extractionStarted = false; try { NImage frame = null; NGrayscaleImage grayscaleImage = null; int frameNumber = 0; int bestFrame; int frameCount = Tools.LiveEnrolFrameCount; _extractor.DetectAllFeaturePoints = false; while (backgroundWorker.CancellationPending == false) { if (_pendingFormat != null && _fromCamera) { _camera.SetCurrentFormat(_pendingFormat); _pendingFormat = null; } if (!_fromCamera && _pause) { System.Threading.Thread.Sleep(500); continue; } try { TimeSpan duration = TimeSpan.Zero; TimeSpan timeStamp = TimeSpan.Zero; if (_fromCamera) { frame = _camera.GetFrame(); } else { lock (_readerLock) { frame = _videoReader.ReadVideoSample(out timeStamp, out duration); } } if (frame == null) //camera unplugged or end of file { createFaceRecord = false; SetImageToView(null, null, null, NleExtractionStatus.None, -1, timeStamp); return; } using (grayscaleImage = frame.ToGrayscale()) { if (createFaceRecord) { NleDetectionDetails details; NLTemplate template = null; //NBuffer template = null; if (!extractionStarted) { UpdateExtractorTemplateSize(); frameCount = Tools.LiveEnrolFrameCount; _extractor.ExtractStart(); extractionStarted = true; frameNumber = 0; ClearCapturedImages(); } frameNumber++; NleExtractionStatus status = _extractor.ExtractNext(grayscaleImage, out details); capturedImages.Add((NImage)frame.Clone()); if (status != NleExtractionStatus.None || frameNumber >= frameCount) { template = _extractor.ExtractEnd(out bestFrame, out status); if (status == NleExtractionStatus.TemplateCreated) { NTemplate nTemplate = new NTemplate(); NImage bestImage = frame; if (bestFrame < capturedImages.Count && bestFrame >= 0) { bestImage = capturedImages[bestFrame]; } _newRecord = new FaceRecord(template, bestImage, details); _newRecord.AddToTemplate(nTemplate); template.Dispose(); capturedImages.Remove(bestImage); _capturedTemplateList = new List <byte[]>(); _capturedTemplateList.Add(nTemplate.Save().ToByteArray()); score = Identify(_capturedTemplateList, _enrolledTemplateList); LogLine(string.Format("Face match details: score {0}.", score), true); backgroundWorker.CancelAsync(); } else { _newRecord = null; } extractionStarted = false; createFaceRecord = false; } if (!createFaceRecord) { ClearCapturedImages(); } SetImageToView(nlView2, frame.ToBitmap(), new NleDetectionDetails[] { details }, status, (int)(frameNumber * 100.0 / frameCount), timeStamp); if (status != NleExtractionStatus.None && status != NleExtractionStatus.TemplateCreated) { backgroundWorker.CancelAsync(); score = 0; } } else { NleDetectionDetails[] details = null; try { NleFace[] faces = _extractor.DetectFaces(grayscaleImage); if (faces != null) { details = new NleDetectionDetails[faces.Length]; for (int i = 0; i < faces.Length; i++) { details[i] = _extractor.DetectFacialFeatures(grayscaleImage, faces[i]); } } } finally { SetImageToView(nlView2, frame.ToBitmap(), details, NleExtractionStatus.None, -1, timeStamp); } } } //using } // try finally { if (frame != null) { frame.Dispose(); } } }// while } catch (Exception ex) { foreach (NImage img in capturedImages) { img.Dispose(); } capturedImages.Clear(); ShowError(ex.Message); } finally { try { int baseFrameIndex; NleExtractionStatus status; if (extractionStarted) { _extractor.ExtractEnd(out baseFrameIndex, out status); } if (_fromCamera && _camera != null) { _camera.StopCapturing(); } if (!_fromCamera && _videoReader != null) { _videoReader.Stop(); } } catch { } } }
private void SetImageToView(NLView nlView, Bitmap image, NleDetectionDetails[] details, NleExtractionStatus status, int extractionPercentDone, TimeSpan timeStamp) { if (InvokeRequired) { if (_mode != ProgramMode.Enroll) BeginInvoke(new SetImageToViewDelegate(SetImageToView), nlView, image, details, status, extractionPercentDone, timeStamp); return; } Bitmap bmp = nlView.Image; nlView.Image = image; if (bmp != null && bmp != image) bmp.Dispose(); nlView.DetectionDetails = details; if (extractionPercentDone > 0 && extractionPercentDone < 100) { toolStripProgressBar.Value = extractionPercentDone; //toolStripProgressBar.Visible = true; } else { toolStripProgressBar.Value = 0; //pbExtractionProgress.Visible = false; } if (_mode == ProgramMode.Enroll) { if (_newRecord == null) { int count = 0; if (details != null) count = details.Length; LogLine(string.Format("Live view: {0} face(s) detected.", count), true); } else { if (details != null) LogLine(string.Format("Template created. Live view: {0} face(s) detected.", details.Length), true); } } String descr = getStatusDescription(status); if (descr != String.Empty) ShowError(descr); }
private void backgroundWorker_DoWork(object sender, DoWorkEventArgs e) { bool extractionStarted = false; try { NImage frame = null; NGrayscaleImage grayscaleImage = null; int frameNumber = 0; int bestFrame; int frameCount = Tools.LiveEnrolFrameCount; _extractor.DetectAllFeaturePoints = false; while (backgroundWorker.CancellationPending == false) { if (_pendingFormat != null && _fromCamera) { _camera.SetCurrentFormat(_pendingFormat); _pendingFormat = null; } if (!_fromCamera && _pause) { System.Threading.Thread.Sleep(500); continue; } try { TimeSpan duration = TimeSpan.Zero; TimeSpan timeStamp = TimeSpan.Zero; if (_fromCamera) { frame = _camera.GetFrame(); } else { lock (_readerLock) { frame = _videoReader.ReadVideoSample(out timeStamp, out duration); } } if (frame == null) //camera unplugged or end of file { createFaceRecord = false; SetImageToView(null, null, null, NleExtractionStatus.None, -1, timeStamp); return; } using (grayscaleImage = frame.ToGrayscale()) { if (createFaceRecord) { NleDetectionDetails details; NLTemplate template = null; //NBuffer template = null; if (!extractionStarted) { UpdateExtractorTemplateSize(); frameCount = Tools.LiveEnrolFrameCount; _extractor.ExtractStart(); extractionStarted = true; frameNumber = 0; ClearCapturedImages(); } frameNumber++; NleExtractionStatus status = _extractor.ExtractNext(grayscaleImage, out details); capturedImages.Add((NImage)frame.Clone()); if (status != NleExtractionStatus.None || frameNumber >= frameCount) { template = _extractor.ExtractEnd(out bestFrame, out status); if (status == NleExtractionStatus.TemplateCreated) { NTemplate nTemplate = new NTemplate(); NImage bestImage = frame; if (bestFrame < capturedImages.Count && bestFrame >= 0) bestImage = capturedImages[bestFrame]; _newRecord = new FaceRecord(template, bestImage, details); _newRecord.AddToTemplate(nTemplate); template.Dispose(); capturedImages.Remove(bestImage); _capturedTemplateList = new List<byte[]>(); _capturedTemplateList.Add(nTemplate.Save().ToByteArray()); score = Identify(_capturedTemplateList, _enrolledTemplateList); LogLine(string.Format("Face match details: score {0}.", score), true); backgroundWorker.CancelAsync(); } else { _newRecord = null; } extractionStarted = false; createFaceRecord = false; } if (!createFaceRecord) { ClearCapturedImages(); } SetImageToView(nlView2, frame.ToBitmap(), new NleDetectionDetails[] { details }, status, (int)(frameNumber * 100.0 / frameCount), timeStamp); if (status != NleExtractionStatus.None && status != NleExtractionStatus.TemplateCreated) { backgroundWorker.CancelAsync(); score = 0; } } else { NleDetectionDetails[] details = null; try { NleFace[] faces = _extractor.DetectFaces(grayscaleImage); if (faces != null) { details = new NleDetectionDetails[faces.Length]; for (int i = 0; i < faces.Length; i++) { details[i] = _extractor.DetectFacialFeatures(grayscaleImage, faces[i]); } } } finally { SetImageToView(nlView2, frame.ToBitmap(), details, NleExtractionStatus.None, -1, timeStamp); } } }//using }// try finally { if (frame != null) frame.Dispose(); } }// while } catch (Exception ex) { foreach (NImage img in capturedImages) { img.Dispose(); } capturedImages.Clear(); ShowError(ex.Message); } finally { try { int baseFrameIndex; NleExtractionStatus status; if (extractionStarted) _extractor.ExtractEnd(out baseFrameIndex, out status); if (_fromCamera && _camera != null) _camera.StopCapturing(); if (!_fromCamera && _videoReader != null) _videoReader.Stop(); } catch { } } }
private void SetImageToView(NLView nlView, Bitmap image, NleDetectionDetails[] details, NleExtractionStatus status, int extractionPercentDone, TimeSpan timeStamp) { //if (_mode == ProgramMode.Enroll) // LogLine(string.Format("InvokeRequired {0}.", InvokeRequired), true); if (InvokeRequired) { if (_mode != ProgramMode.Enroll) BeginInvoke(new SetImageToViewDelegate(SetImageToView), nlView, image, details, status, extractionPercentDone, timeStamp); return; } Bitmap bmp = nlView.Image; nlView.Image = image; if (bmp != null && bmp != image) bmp.Dispose(); nlView.DetectionDetails = details; if (extractionPercentDone > 0 && extractionPercentDone < 100) { toolStripProgressBar.Value = extractionPercentDone; //toolStripProgressBar.Visible = true; } else { toolStripProgressBar.Value = 0; //pbExtractionProgress.Visible = false; } if (_mode == ProgramMode.Enroll) { if (_newRecord == null) { int count = 0; if (details != null) count = details.Length; LogLine(string.Format("Live view: {0} face(s) detected.", count), true); } else { if (details != null) LogLine(string.Format("Template created. Live view: {0} face(s) detected.", details.Length), true); //else // LogLine(string.Format("Template created."), true); } } String descr = getStatusDescription(status); if (descr != String.Empty) ShowError(descr); /* switch (status) { case NleExtractionStatus.EyesNotDetected: ShowError("Eyes not detected"); break; case NleExtractionStatus.FaceNotDetected: ShowError("Face not detected"); break; case NleExtractionStatus.FaceTooCloseToImageBorder: ShowError("Face too close to image border"); break; case NleExtractionStatus.GeneralizationFailed: ShowError("Generalization failed"); break; case NleExtractionStatus.LivenessCheckFailed: ShowError("Liveness check failed"); break; case NleExtractionStatus.QualityCheckExposureFailed: ShowError("Quality check failed"); break; case NleExtractionStatus.QualityCheckGrayscaleDensityFailed: ShowError("Quality check: grayscale density failed"); break; case NleExtractionStatus.QualityCheckSharpnessFailed: ShowError("Quality check: sharpness failed"); break; case NleExtractionStatus.TemplateCreated: // ShowError("Template created"); break; case NleExtractionStatus.None: //ShowError(""); break; default: ShowError(status.ToString()); break; } */ if (status != NleExtractionStatus.None && status != NleExtractionStatus.TemplateCreated) { //lblExtractionResult.Visible = true; //tmrShowMessage.Enabled = true; } /* if (timeStamp == TimeSpan.Zero) { trbPosition.Value = 0; } else { if ((int)timeStamp.TotalMilliseconds > trbPosition.Maximum) trbPosition.Value = trbPosition.Maximum; else trbPosition.Value = (int)timeStamp.TotalMilliseconds; } */ }
private void enroll(Bitmap globalBitmap) { NleDetectionDetails detDetails = null; string destination = null; string permissionLevel = "User"; bool lgTemplate = true; Draw clipping = new Draw(); if (firstNameTextBox.Text == "" || lastNameTextBox.Text == "") { MessageBox.Show("Please fill in the first and last name fields."); return; } // Strip extra spaces from name entries. string firstName = RemoveWhitespace(firstNameTextBox.Text); string lastName = RemoveWhitespace(lastNameTextBox.Text); if (!myDdInterface.userExists(firstName, lastName)) { string enrolleeId = createUserId(firstName, lastName); if (!createTemplate(globalBitmap, lgTemplate, out detDetails)) { myThread = new Thread(getLiveVideo); myThread.Start(); videoFileLoc = null; videoFileTxt.Text = null; return; } if (videoFileLoc == null) { MessageBox.Show("The default video will play when you are recognized.\n You can change it later."); videoFileLoc = defaultVideoFile; } else { destination = Path.Combine(Environment.CurrentDirectory, "videoFiles\\" + videoFileTxt.Text); if (!File.Exists(destination)) // Check to see if the file is already there { File.Copy(videoFileLoc, destination); } } string videoFileName = Path.GetFileName(videoFileLoc); if (!myDdInterface.adminExists()) { permissionLevel = "Admin"; } // insert user info into database myDdInterface.insertEntry(firstName, lastName, permissionLevel, enrolleeId, globalBitmap, facialTemplate, videoFileName); facialTemplate = null; // set the enrolled image if (enrolledImagePictureBox.InvokeRequired) { enrolledImagePictureBox.Invoke(new Action(() => enrolledImagePictureBox.Image = globalBitmap)); enrolledImageNameLabel.Invoke(new Action(() => enrolledImageNameLabel.Text = (firstNameTextBox.Text + " " + lastNameTextBox.Text))); } else { enrolledImagePictureBox.Image = globalBitmap; enrolledImageNameLabel.Text = (firstName + " " + lastName); } // make it visible if (!enrolledImagePictureBox.Visible) { enrolledPicVisToggle(); } videoFileLoc = null; videoFileTxt.Text = null; profileFNameTxt.Text = null; profileLNameTxt.Text = null; myThread = new Thread(getLiveVideo); myThread.Start(); MessageBox.Show("Enrollment Successful. Your userId is " + enrolleeId); } else { MessageBox.Show("That user is already enrolled"); myThread = new Thread(getLiveVideo); myThread.Start(); return; } }