private NleDetectionDetails detectDetails() { NGrayscaleImage grayImage = NImage.FromBitmap(globalInsetFaceBmp).ToGrayscale(); NleFace _faceInImage; NLExtractor myExtractor = new NLExtractor(); NleDetectionDetails _detectionDetails = new NleDetectionDetails(); if (myExtractor.DetectAllFeaturePoints == false) { myExtractor.DetectAllFeaturePoints = true; } if (grayImage == null || !myExtractor.DetectFace(grayImage, out _faceInImage)) { _detectionDetails = null; return(_detectionDetails); } _detectionDetails = myExtractor.DetectFacialFeatures(grayImage, _faceInImage); return(_detectionDetails); }
protected void Button1_Click(object sender, EventArgs e) { // System.Windows.Forms.OpenFileDialog openFaceImageDlg = new OpenFileDialog(); // openFaceImageDlg.ShowDialog(); string savePath = Server.MapPath("~/image/");//指定上传文件在服务器上的保存路径; if (FileUpload1.PostedFile.FileName != null) { if (FileUpload1.HasFile) { //检查服务器上是否存在这个物理路径,如果不存在则创建 if (!System.IO.Directory.Exists(savePath)) { System.IO.Directory.CreateDirectory(savePath); } savePath = savePath + "\\" + FileUpload1.FileName; FileUpload1.SaveAs(savePath); } } // openFaceImageDlg.Filter = NImages.GetOpenFileFilterString(true, true); // if (openFaceImageDlg.ShowDialog() == DialogResult.OK) // { // if (_image != null) _image.Dispose(); // _image = null; // try // { // TextBox1.Text = FileUpload1.PostedFile.FileName; // Read image _image = NImage.FromFile(savePath); // DetectFace(_image); // } // catch (Exception ex) // { // Utils.ShowException(ex); // } NleFace nlef; nle.DetectFace(_image.ToGrayscale(), out nlef); nle.DetectAllFeaturePoints = true; nle.DetectBlink = true; nle.DetectEmotion = true; nle.DetectExpression = true; nle.DetectGender = true; nle.DetectGlasses = true; nle.DetectDarkGlasses = true; nle.DetectMouthOpen = true; nle.MaxRollAngleDeviation = short.Parse(cbRollAngle.SelectedValue); nle.MaxYawAngleDeviation = short.Parse(cbYawAngle.SelectedValue); string Blink = ""; string Emotion = ""; string Expression = ""; string Gender = ""; string Glasses = ""; string Mouth = ""; NleDetectionDetails detail = nle.DetectFacialFeatures(_image.ToGrayscale(), nlef); NleDetectionDetails detail2; NleExtractionStatus Status; nle.Extract(_image.ToGrayscale(), out detail2, out Status); List <NLFeaturePoint> points = new List <NLFeaturePoint>(); points.Add(detail.LeftEyeCenter); points.Add(detail.MouthCenter); points.Add(detail.RightEyeCenter); for (int i = 0; i < detail.Points.Length; i++) { points.Add(detail.Points[i]); } Bitmap bit = new Bitmap(savePath); Graphics g = Graphics.FromImage(bit); Brush b = new SolidBrush(Color.Green); Pen p = new Pen(b); for (int i = 0; i < points.Count; i++) { g.DrawRectangle(p, points.ElementAt(i).X - 2.5f, points.ElementAt(i).Y - 2.5f, 5, 5); } string dirpath = "D:/img2.JPG"; if (System.IO.File.Exists(dirpath) == true) { System.IO.File.Delete(dirpath); } bit.Save(dirpath, System.Drawing.Imaging.ImageFormat.Jpeg); if (detail2.EmotionAngerConfidence > 50 && detail2.EmotionAngerConfidence != 254 && detail2.EmotionAngerConfidence != 255) { Emotion += " Anger "; } if (detail2.EmotionDisgustConfidence > 50 && detail2.EmotionDisgustConfidence != 254 && detail2.EmotionDisgustConfidence != 255) { Emotion += " Disgust "; } if (detail2.EmotionFearConfidence > 50 && detail2.EmotionFearConfidence != 254 && detail2.EmotionFearConfidence != 255) { Emotion += " Fear "; } if (detail2.EmotionHappinessConfidence > 50 && detail2.EmotionHappinessConfidence != 254 && detail2.EmotionHappinessConfidence != 255) { Emotion += " Happyness "; } if (detail2.EmotionNeutralConfidence > 50 && detail2.EmotionNeutralConfidence != 254 && detail2.EmotionNeutralConfidence != 255) { Emotion += " Netral "; } if (detail2.EmotionSadnessConfidence > 50 && detail2.EmotionSadnessConfidence != 254 && detail2.EmotionSadnessConfidence != 255) { Emotion += " Sadness "; } if (detail2.EmotionSurpriseConfidence > 50 && detail2.EmotionSurpriseConfidence != 254 && detail2.EmotionSurpriseConfidence != 255) { Emotion += " Surprise "; } Expression += detail2.Expression.ToString(); if (detail2.DarkGlassesConfidence != 254 && detail2.DarkGlassesConfidence != 255) { if (detail2.DarkGlassesConfidence > 50) { Glasses += " wearing glasses "; } else { Glasses += " not wearing glasses "; } } if (detail2.BlinkConfidence != 254 && detail2.BlinkConfidence != 255) { if (detail2.BlinkConfidence > 50) { Blink += " Eye open "; } else { Blink += " Eye close "; } } Gender += detail2.Gender.ToString(); if (detail2.MouthOpenConfidence != 254 && detail2.MouthOpenConfidence != 255) { if (detail2.MouthOpenConfidence < 51) { Mouth += " Mouth close "; } else { Mouth += " Mouth open "; } } //Response.Write("<script>alert('"+Blink+Emotion+Expression+Glasses+Gender+Mouth+"')</script>"); // Response.Write("<script>alert('" + detail2.BlinkConfidence + Emotion + Expression + Gender + detail2.MouthOpenConfidence + "')</script>"); DataSet dst = new DataSet(); dst.DataSetName = "result"; DataTable dt = new DataTable(); dst.Tables.Add(dt); dt.Columns.Add("blink"); dt.Columns.Add("emotion"); dt.Columns.Add("expression"); dt.Columns.Add("glasses"); dt.Columns.Add("gender"); dt.Columns.Add("mouth"); DataRow row1 = dt.NewRow(); row1["blink"] = Blink; row1["emotion"] = Emotion; row1["expression"] = Expression; row1["glasses"] = detail2.DarkGlassesConfidence; row1["gender"] = Gender; row1["mouth"] = Mouth; dt.Rows.Add(row1); // dst.Tables.Add(dt); Session["result"] = dst; ListView1.DataSource = dst; ListView1.DataBind(); _image.Dispose(); bit.Dispose(); g.Dispose(); }
private void backgroundWorker_DoWork(object sender, DoWorkEventArgs e) { bool extractionStarted = false; try { NImage frame = null; NGrayscaleImage grayscaleImage = null; int frameNumber = 0; int bestFrame; int frameCount = Tools.LiveEnrolFrameCount; _extractor.DetectAllFeaturePoints = false; while (backgroundWorker.CancellationPending == false) { if (_pendingFormat != null && _fromCamera) { _camera.SetCurrentFormat(_pendingFormat); _pendingFormat = null; } if (!_fromCamera && _pause) { System.Threading.Thread.Sleep(500); continue; } try { TimeSpan duration = TimeSpan.Zero; TimeSpan timeStamp = TimeSpan.Zero; if (_fromCamera) { frame = _camera.GetFrame(); } else { lock (_readerLock) { frame = _videoReader.ReadVideoSample(out timeStamp, out duration); } } if (frame == null) //camera unplugged or end of file { createFaceRecord = false; SetImageToView(null, null, null, NleExtractionStatus.None, -1, timeStamp); return; } using (grayscaleImage = frame.ToGrayscale()) { if (createFaceRecord) { NleDetectionDetails details; NLTemplate template = null; //NBuffer template = null; if (!extractionStarted) { UpdateExtractorTemplateSize(); frameCount = Tools.LiveEnrolFrameCount; _extractor.ExtractStart(); extractionStarted = true; frameNumber = 0; ClearCapturedImages(); } frameNumber++; NleExtractionStatus status = _extractor.ExtractNext(grayscaleImage, out details); capturedImages.Add((NImage)frame.Clone()); if (status != NleExtractionStatus.None || frameNumber >= frameCount) { template = _extractor.ExtractEnd(out bestFrame, out status); if (status == NleExtractionStatus.TemplateCreated) { NTemplate nTemplate = new NTemplate(); NImage bestImage = frame; if (bestFrame < capturedImages.Count && bestFrame >= 0) { bestImage = capturedImages[bestFrame]; } _newRecord = new FaceRecord(template, bestImage, details); _newRecord.AddToTemplate(nTemplate); template.Dispose(); capturedImages.Remove(bestImage); _capturedTemplateList = new List <byte[]>(); _capturedTemplateList.Add(nTemplate.Save().ToByteArray()); score = Identify(_capturedTemplateList, _enrolledTemplateList); LogLine(string.Format("Face match details: score {0}.", score), true); backgroundWorker.CancelAsync(); } else { _newRecord = null; } extractionStarted = false; createFaceRecord = false; } if (!createFaceRecord) { ClearCapturedImages(); } SetImageToView(nlView2, frame.ToBitmap(), new NleDetectionDetails[] { details }, status, (int)(frameNumber * 100.0 / frameCount), timeStamp); if (status != NleExtractionStatus.None && status != NleExtractionStatus.TemplateCreated) { backgroundWorker.CancelAsync(); score = 0; } } else { NleDetectionDetails[] details = null; try { NleFace[] faces = _extractor.DetectFaces(grayscaleImage); if (faces != null) { details = new NleDetectionDetails[faces.Length]; for (int i = 0; i < faces.Length; i++) { details[i] = _extractor.DetectFacialFeatures(grayscaleImage, faces[i]); } } } finally { SetImageToView(nlView2, frame.ToBitmap(), details, NleExtractionStatus.None, -1, timeStamp); } } } //using } // try finally { if (frame != null) { frame.Dispose(); } } }// while } catch (Exception ex) { foreach (NImage img in capturedImages) { img.Dispose(); } capturedImages.Clear(); ShowError(ex.Message); } finally { try { int baseFrameIndex; NleExtractionStatus status; if (extractionStarted) { _extractor.ExtractEnd(out baseFrameIndex, out status); } if (_fromCamera && _camera != null) { _camera.StopCapturing(); } if (!_fromCamera && _videoReader != null) { _videoReader.Stop(); } } catch { } } }
private void backgroundWorker_DoWork(object sender, DoWorkEventArgs e) { try { activeCamera = (NCamera)e.Argument; activeCamera.StartCapturing(); while (activeCamera.IsCapturing) { if (backgroundWorker.CancellationPending) { activeCamera.StopCapturing(); } if (activeCamera != null && activeCamera.IsCapturing) { using (NImage image = activeCamera.GetFrame()) { video = image.ToBitmap(); using (NLExtractor extractor = new NLExtractor()) { // convert image to grayscale NGrayscaleImage grayscale = (NGrayscaleImage)NImage.FromImage(NPixelFormat.Grayscale, 0, image); extractor.MaxRecordsPerTemplate = 1; // detect all faces that are suitable for face recognition in the image NleFace[] faces = extractor.DetectFaces(grayscale); //NleDetectionDetails[] detectionDetails facedetectdetails = new NleDetectionDetails[faces.Length]; for (int i = 0; i < facedetectdetails.Length; i++) { facedetectdetails[i] = extractor.DetectFacialFeatures(grayscale, faces[i]); } facesView.DrawConfidenceForEyes = true; facesView.DrawFaceConfidence = true; facesView.DetectionDetails = facedetectdetails; for (int i = 0; i < facedetectdetails.Length; i++) { faceAvailabilityStatus = facedetectdetails[i].FaceAvailable; } if (facesView.DrawConfidenceForEyes == true & facesView.DrawFaceConfidence == true) { faceAvailabilityStatus = true; } else { faceAvailabilityStatus = false; } } } } } } catch (Exception exp) { // MessageBox.Show("Error Capturing Image - Close and re-open browser window"); // Logger.LogError(exp.ToString()); //errorLog.Append(exp.ToString() + Environment.NewLine); e.Cancel = true; } }
private void getLiveVideo() { // what if the camera is disconnected during feed? verifyLicense(); NLExtractor liveExtractor = new NLExtractor(); NleFace [] theseFaces = null; List <NleDetectionDetails> faceDetails = new List <NleDetectionDetails> (); liveExtractor.DetectAllFeaturePoints = false; // False, will only detect eyes. NGrayscaleImage liveGrayImage; Bitmap displayBmp; Draw drawfeatures = new Draw(); myPen = new System.Drawing.Pen(System.Drawing.Color.Blue, 2); System.Drawing.Point faceConfLoc = new System.Drawing.Point(); System.Drawing.Point rEye = new System.Drawing.Point(); System.Drawing.Point lEye = new System.Drawing.Point(); var timer = new System.Diagnostics.Stopwatch(); int timeSpan; int elapsed = 0; int frameDelay = 1000 / frameRate; int autoDetectDelay = 0; int largestFaceNumber = 0; camera.StartCapturing(); while (isLive == true) { // this loop only draws on the live display box. Largest face is detected elsewhere. try { currentImage = camera.GetFrame(); if (currentImage != null) { currentImage.FlipHorizontally(); // create grayscale image for extractor operations liveGrayImage = currentImage.ToGrayscale(); displayBmp = currentImage.ToBitmap(); theseFaces = liveExtractor.DetectFaces(liveGrayImage); int largestFaceWidth = 0; int liveFaceCount = theseFaces.Count(); if (liveFaceCount > 0) { if (faceDetails.Count() != 0) { faceDetails.Clear(); } for (int i = 0; i < theseFaces.Length; i++) { faceDetails.Add(liveExtractor.DetectFacialFeatures(liveGrayImage, theseFaces [i])); faceConfLoc.X = faceDetails [i].Face.Rectangle.Left; faceConfLoc.Y = faceDetails [i].Face.Rectangle.Bottom; rEye.X = faceDetails [i].RightEyeCenter.X; rEye.Y = faceDetails [i].RightEyeCenter.Y; lEye.X = faceDetails [i].LeftEyeCenter.X; lEye.Y = faceDetails [i].LeftEyeCenter.Y; if (boundingBoxOn) { displayBmp = drawfeatures.drawFaceRectangle(faceDetails [i], displayBmp, myPen); } if (faceConfCheckBox.Checked) { displayBmp = drawfeatures.faceConfidence(displayBmp, (int)faceDetails [i].Face.Confidence, faceConfLoc, myPen); } if (drawEyesCheckBox.Checked) { displayBmp = drawfeatures.connect(displayBmp, rEye, lEye, myPen); } if (showEyeCheckBox.Checked) { displayBmp = drawfeatures.confidence(displayBmp, faceDetails [i].LeftEyeCenter.Confidence, lEye, myPen); displayBmp = drawfeatures.confidence(displayBmp, faceDetails [i].RightEyeCenter.Confidence, rEye, myPen); } if (faceDetails [i].Face.Rectangle.Width > largestFaceWidth) { largestFaceNumber = i; } globalInsetFaceBmp = drawfeatures.snipFace(currentImage.ToBitmap(), faceDetails [largestFaceNumber]); //make face clipping here } if (autoDetect) { autoDetectDelay++; } } liveGrayImage.Dispose(); currentImage.Dispose(); if (matchNow || autoDetectDelay == 50) { autoDetectDelay = 0; attemptMatch(); } // display image on pictureBox if (mainFeedPictureBox.InvokeRequired) { mainFeedPictureBox.Invoke(new Action(() => mainFeedPictureBox.Image = displayBmp)); } else { mainFeedPictureBox.Image = displayBmp; } timer.Stop(); elapsed = (Int32)timer.ElapsedMilliseconds; timeSpan = frameDelay - elapsed; if (timeSpan < 0) { timeSpan = 0; } Thread.Sleep(timeSpan); timer.Reset(); theseFaces = null; } } catch (Exception ex) { // do nothing } } camera.StopCapturing(); }