private void enrollFromImage(bool toView2) { NImage nImage = null; if (System.Configuration.ConfigurationManager.AppSettings["Enroll"] != "file") { byte[] buffer = null; DBHelper.DBUtil db = new DBHelper.DBUtil(); if (System.Configuration.ConfigurationManager.AppSettings["Enroll"] == "service") buffer = db.GetImageFromWebService(IMAGE_TYPE.picture, this.userId); else buffer = db.GetImage(IMAGE_TYPE.picture, this.userId); MemoryStream ms = null; try { if (buffer != null) ms = new MemoryStream(buffer); if (ms != null) nImage = NImage.FromStream(ms); } catch (Exception ex) { ShowError(string.Format("Error creating image retrieved from database {0}", ex.Message)); return; } finally { if (ms != null) ms.Dispose(); } } else { if (!toView2) nImage = NImage.FromFile(Path.GetDirectoryName(Application.ExecutablePath) + "\\Roman.jpg"); else nImage = NImage.FromFile(Path.GetDirectoryName(Application.ExecutablePath) + "\\Roman2.jpg"); } try { if (!toView2) nlView1.Image = nImage.ToBitmap(); else nlView2.Image = nImage.ToBitmap(); using (NGrayscaleImage gray = nImage.ToGrayscale()) { NleDetectionDetails details; NleExtractionStatus status; try { _extractor.DetectAllFeaturePoints = Settings.Default.ExtractorDetectAllFeaturesNonLive; } catch { } NLTemplate template = _extractor.Extract(gray, out details, out status); if (status != NleExtractionStatus.TemplateCreated) { _newRecord = null; // ShowError(string.Format("Template extraction failed: {0}", status)); String descr = getStatusDescription(status); if (descr != String.Empty) ShowError(string.Format("Template extraction failed: {0}", descr)); else ShowError(string.Format("Template extraction failed: {0}", status)); return; } else { _newRecord = new FaceRecord(template, nImage, details); if (!toView2) { _enrolledTemplateList = new List<byte[]>(); _enrolledTemplateList.Add(template.Save().ToByteArray()); } else { _capturedTemplateList = new List<byte[]>(); _capturedTemplateList.Add(template.Save().ToByteArray()); } template.Dispose(); } if (!toView2) SetImageToView(nlView1, nImage.ToBitmap(), new NleDetectionDetails[] { details }, status, 100, TimeSpan.Zero); else SetImageToView(nlView2, nImage.ToBitmap(), new NleDetectionDetails[] { details }, status, 100, TimeSpan.Zero); } } catch (Exception ex) { ShowError(ex.Message); } return; }
private void backgroundWorker_DoWork(object sender, DoWorkEventArgs e) { bool extractionStarted = false; try { NImage frame = null; NGrayscaleImage grayscaleImage = null; int frameNumber = 0; int bestFrame; int frameCount = Tools.LiveEnrolFrameCount; _extractor.DetectAllFeaturePoints = false; while (backgroundWorker.CancellationPending == false) { if (_pendingFormat != null && _fromCamera) { _camera.SetCurrentFormat(_pendingFormat); _pendingFormat = null; } if (!_fromCamera && _pause) { System.Threading.Thread.Sleep(500); continue; } try { TimeSpan duration = TimeSpan.Zero; TimeSpan timeStamp = TimeSpan.Zero; if (_fromCamera) { frame = _camera.GetFrame(); } else { lock (_readerLock) { frame = _videoReader.ReadVideoSample(out timeStamp, out duration); } } if (frame == null) //camera unplugged or end of file { createFaceRecord = false; SetImageToView(null, null, null, NleExtractionStatus.None, -1, timeStamp); return; } using (grayscaleImage = frame.ToGrayscale()) { if (createFaceRecord) { NleDetectionDetails details; NLTemplate template = null; //NBuffer template = null; if (!extractionStarted) { UpdateExtractorTemplateSize(); frameCount = Tools.LiveEnrolFrameCount; _extractor.ExtractStart(); extractionStarted = true; frameNumber = 0; ClearCapturedImages(); } frameNumber++; NleExtractionStatus status = _extractor.ExtractNext(grayscaleImage, out details); capturedImages.Add((NImage)frame.Clone()); if (status != NleExtractionStatus.None || frameNumber >= frameCount) { template = _extractor.ExtractEnd(out bestFrame, out status); if (status == NleExtractionStatus.TemplateCreated) { NTemplate nTemplate = new NTemplate(); NImage bestImage = frame; if (bestFrame < capturedImages.Count && bestFrame >= 0) bestImage = capturedImages[bestFrame]; _newRecord = new FaceRecord(template, bestImage, details); _newRecord.AddToTemplate(nTemplate); template.Dispose(); capturedImages.Remove(bestImage); _capturedTemplateList = new List<byte[]>(); _capturedTemplateList.Add(nTemplate.Save().ToByteArray()); score = Identify(_capturedTemplateList, _enrolledTemplateList); LogLine(string.Format("Face match details: score {0}.", score), true); backgroundWorker.CancelAsync(); } else { _newRecord = null; } extractionStarted = false; createFaceRecord = false; } if (!createFaceRecord) { ClearCapturedImages(); } SetImageToView(nlView2, frame.ToBitmap(), new NleDetectionDetails[] { details }, status, (int)(frameNumber * 100.0 / frameCount), timeStamp); if (status != NleExtractionStatus.None && status != NleExtractionStatus.TemplateCreated) { backgroundWorker.CancelAsync(); score = 0; } } else { NleDetectionDetails[] details = null; try { NleFace[] faces = _extractor.DetectFaces(grayscaleImage); if (faces != null) { details = new NleDetectionDetails[faces.Length]; for (int i = 0; i < faces.Length; i++) { details[i] = _extractor.DetectFacialFeatures(grayscaleImage, faces[i]); } } } finally { SetImageToView(nlView2, frame.ToBitmap(), details, NleExtractionStatus.None, -1, timeStamp); } } }//using }// try finally { if (frame != null) frame.Dispose(); } }// while } catch (Exception ex) { foreach (NImage img in capturedImages) { img.Dispose(); } capturedImages.Clear(); ShowError(ex.Message); } finally { try { int baseFrameIndex; NleExtractionStatus status; if (extractionStarted) _extractor.ExtractEnd(out baseFrameIndex, out status); if (_fromCamera && _camera != null) _camera.StopCapturing(); if (!_fromCamera && _videoReader != null) _videoReader.Stop(); } catch { } } }