public byte[] ExtractTemplate(DataSetMain.ImageDataTable table) { if (extractor == null) { return(new byte[0]); } NLTemplate template = null; try { // Start extraction of face template from a stream (sequence of images) extractor.ExtractStart(table.Rows.Count); NleExtractionStatus extractionStatus = NleExtractionStatus.None; // process stream of images foreach (DataSetMain.ImageRow row in table) { // read image NImage image = NImage.FromStream(new MemoryStream(row.Image)); //????????????????????? Changing to NPixelFormat.Grayscale using (NGrayscaleImage grayscaleImage = (NGrayscaleImage)NImage.FromImage(NPixelFormat.Grayscale, 0, image)) { if (image != null) { image.Dispose(); } // use image as another frame of stream NleDetectionDetails details; int baseFrameIndex; extractionStatus = extractor.ExtractNext(grayscaleImage, out details, out baseFrameIndex, out template); } } if (extractionStatus == NleExtractionStatus.TemplateCreated) { // return compressed template return(template.Save().ToByteArray()); } } catch (Exception e) { string msg = e.InnerException != null ? e.InnerException.Message : e.Message; throw new VerilookToolException(errormsg + msg); } finally { if (template != null) { template.Dispose(); } } return(new byte[0]); }
private String getStatusDescription(NleExtractionStatus status) { switch (status) { case NleExtractionStatus.EyesNotDetected: return("Eyes not detected"); case NleExtractionStatus.FaceNotDetected: return("Face not detected"); case NleExtractionStatus.FaceTooCloseToImageBorder: return("Face too close to image border"); case NleExtractionStatus.GeneralizationFailed: return("Generalization failed"); case NleExtractionStatus.LivenessCheckFailed: return("Liveness check failed"); case NleExtractionStatus.QualityCheckExposureFailed: return("Quality check failed"); case NleExtractionStatus.QualityCheckGrayscaleDensityFailed: return("Quality check: grayscale density failed"); case NleExtractionStatus.QualityCheckSharpnessFailed: return("Quality check: sharpness failed"); case NleExtractionStatus.TemplateCreated: // ShowError("Template created"); return(String.Empty); case NleExtractionStatus.None: //ShowError(""); return(String.Empty); default: return(status.ToString()); } }
private void backgroundWorker_DoWork(object sender, DoWorkEventArgs e) { bool extractionStarted = false; try { NImage frame = null; NGrayscaleImage grayscaleImage = null; int frameNumber = 0; int bestFrame; int frameCount = Tools.LiveEnrolFrameCount; _extractor.DetectAllFeaturePoints = false; while (backgroundWorker.CancellationPending == false) { if (_pendingFormat != null && _fromCamera) { _camera.SetCurrentFormat(_pendingFormat); _pendingFormat = null; } if (!_fromCamera && _pause) { System.Threading.Thread.Sleep(500); continue; } try { TimeSpan duration = TimeSpan.Zero; TimeSpan timeStamp = TimeSpan.Zero; if (_fromCamera) { frame = _camera.GetFrame(); } else { lock (_readerLock) { frame = _videoReader.ReadVideoSample(out timeStamp, out duration); } } if (frame == null) //camera unplugged or end of file { createFaceRecord = false; SetImageToView(null, null, null, NleExtractionStatus.None, -1, timeStamp); return; } using (grayscaleImage = frame.ToGrayscale()) { if (createFaceRecord) { NleDetectionDetails details; NLTemplate template = null; //NBuffer template = null; if (!extractionStarted) { UpdateExtractorTemplateSize(); frameCount = Tools.LiveEnrolFrameCount; _extractor.ExtractStart(); extractionStarted = true; frameNumber = 0; ClearCapturedImages(); } frameNumber++; NleExtractionStatus status = _extractor.ExtractNext(grayscaleImage, out details); capturedImages.Add((NImage)frame.Clone()); if (status != NleExtractionStatus.None || frameNumber >= frameCount) { template = _extractor.ExtractEnd(out bestFrame, out status); if (status == NleExtractionStatus.TemplateCreated) { NTemplate nTemplate = new NTemplate(); NImage bestImage = frame; if (bestFrame < capturedImages.Count && bestFrame >= 0) { bestImage = capturedImages[bestFrame]; } _newRecord = new FaceRecord(template, bestImage, details); _newRecord.AddToTemplate(nTemplate); template.Dispose(); capturedImages.Remove(bestImage); _capturedTemplateList = new List <byte[]>(); _capturedTemplateList.Add(nTemplate.Save().ToByteArray()); score = Identify(_capturedTemplateList, _enrolledTemplateList); LogLine(string.Format("Face match details: score {0}.", score), true); backgroundWorker.CancelAsync(); } else { _newRecord = null; } extractionStarted = false; createFaceRecord = false; } if (!createFaceRecord) { ClearCapturedImages(); } SetImageToView(nlView2, frame.ToBitmap(), new NleDetectionDetails[] { details }, status, (int)(frameNumber * 100.0 / frameCount), timeStamp); if (status != NleExtractionStatus.None && status != NleExtractionStatus.TemplateCreated) { backgroundWorker.CancelAsync(); score = 0; } } else { NleDetectionDetails[] details = null; try { NleFace[] faces = _extractor.DetectFaces(grayscaleImage); if (faces != null) { details = new NleDetectionDetails[faces.Length]; for (int i = 0; i < faces.Length; i++) { details[i] = _extractor.DetectFacialFeatures(grayscaleImage, faces[i]); } } } finally { SetImageToView(nlView2, frame.ToBitmap(), details, NleExtractionStatus.None, -1, timeStamp); } } } //using } // try finally { if (frame != null) { frame.Dispose(); } } }// while } catch (Exception ex) { foreach (NImage img in capturedImages) { img.Dispose(); } capturedImages.Clear(); ShowError(ex.Message); } finally { try { int baseFrameIndex; NleExtractionStatus status; if (extractionStarted) { _extractor.ExtractEnd(out baseFrameIndex, out status); } if (_fromCamera && _camera != null) { _camera.StopCapturing(); } if (!_fromCamera && _videoReader != null) { _videoReader.Stop(); } } catch { } } }
private void SetImageToView(NLView nlView, Bitmap image, NleDetectionDetails[] details, NleExtractionStatus status, int extractionPercentDone, TimeSpan timeStamp) { if (InvokeRequired) { if (_mode != ProgramMode.Enroll) { BeginInvoke(new SetImageToViewDelegate(SetImageToView), nlView, image, details, status, extractionPercentDone, timeStamp); } return; } Bitmap bmp = nlView.Image; nlView.Image = image; if (bmp != null && bmp != image) { bmp.Dispose(); } nlView.DetectionDetails = details; if (extractionPercentDone > 0 && extractionPercentDone < 100) { toolStripProgressBar.Value = extractionPercentDone; //toolStripProgressBar.Visible = true; } else { toolStripProgressBar.Value = 0; //pbExtractionProgress.Visible = false; } if (_mode == ProgramMode.Enroll) { if (_newRecord == null) { int count = 0; if (details != null) { count = details.Length; } LogLine(string.Format("Live view: {0} face(s) detected.", count), true); } else { if (details != null) { LogLine(string.Format("Template created. Live view: {0} face(s) detected.", details.Length), true); } } } String descr = getStatusDescription(status); if (descr != String.Empty) { ShowError(descr); } }
private void SetImageToView(NLView nlView, Bitmap image, NleDetectionDetails[] details, NleExtractionStatus status, int extractionPercentDone, TimeSpan timeStamp) { if (InvokeRequired) { if (_mode != ProgramMode.Enroll) BeginInvoke(new SetImageToViewDelegate(SetImageToView), nlView, image, details, status, extractionPercentDone, timeStamp); return; } Bitmap bmp = nlView.Image; nlView.Image = image; if (bmp != null && bmp != image) bmp.Dispose(); nlView.DetectionDetails = details; if (extractionPercentDone > 0 && extractionPercentDone < 100) { toolStripProgressBar.Value = extractionPercentDone; //toolStripProgressBar.Visible = true; } else { toolStripProgressBar.Value = 0; //pbExtractionProgress.Visible = false; } if (_mode == ProgramMode.Enroll) { if (_newRecord == null) { int count = 0; if (details != null) count = details.Length; LogLine(string.Format("Live view: {0} face(s) detected.", count), true); } else { if (details != null) LogLine(string.Format("Template created. Live view: {0} face(s) detected.", details.Length), true); } } String descr = getStatusDescription(status); if (descr != String.Empty) ShowError(descr); }
private String getStatusDescription(NleExtractionStatus status) { switch (status) { case NleExtractionStatus.EyesNotDetected: return "Eyes not detected"; case NleExtractionStatus.FaceNotDetected: return "Face not detected"; case NleExtractionStatus.FaceTooCloseToImageBorder: return "Face too close to image border"; case NleExtractionStatus.GeneralizationFailed: return "Generalization failed"; case NleExtractionStatus.LivenessCheckFailed: return "Liveness check failed"; case NleExtractionStatus.QualityCheckExposureFailed: return "Quality check failed"; case NleExtractionStatus.QualityCheckGrayscaleDensityFailed: return "Quality check: grayscale density failed"; case NleExtractionStatus.QualityCheckSharpnessFailed: return "Quality check: sharpness failed"; case NleExtractionStatus.TemplateCreated: // ShowError("Template created"); return String.Empty; case NleExtractionStatus.None: //ShowError(""); return String.Empty; default: return status.ToString(); } }
private void SetImageToView(NLView nlView, Bitmap image, NleDetectionDetails[] details, NleExtractionStatus status, int extractionPercentDone, TimeSpan timeStamp) { //if (_mode == ProgramMode.Enroll) // LogLine(string.Format("InvokeRequired {0}.", InvokeRequired), true); if (InvokeRequired) { if (_mode != ProgramMode.Enroll) BeginInvoke(new SetImageToViewDelegate(SetImageToView), nlView, image, details, status, extractionPercentDone, timeStamp); return; } Bitmap bmp = nlView.Image; nlView.Image = image; if (bmp != null && bmp != image) bmp.Dispose(); nlView.DetectionDetails = details; if (extractionPercentDone > 0 && extractionPercentDone < 100) { toolStripProgressBar.Value = extractionPercentDone; //toolStripProgressBar.Visible = true; } else { toolStripProgressBar.Value = 0; //pbExtractionProgress.Visible = false; } if (_mode == ProgramMode.Enroll) { if (_newRecord == null) { int count = 0; if (details != null) count = details.Length; LogLine(string.Format("Live view: {0} face(s) detected.", count), true); } else { if (details != null) LogLine(string.Format("Template created. Live view: {0} face(s) detected.", details.Length), true); //else // LogLine(string.Format("Template created."), true); } } String descr = getStatusDescription(status); if (descr != String.Empty) ShowError(descr); /* switch (status) { case NleExtractionStatus.EyesNotDetected: ShowError("Eyes not detected"); break; case NleExtractionStatus.FaceNotDetected: ShowError("Face not detected"); break; case NleExtractionStatus.FaceTooCloseToImageBorder: ShowError("Face too close to image border"); break; case NleExtractionStatus.GeneralizationFailed: ShowError("Generalization failed"); break; case NleExtractionStatus.LivenessCheckFailed: ShowError("Liveness check failed"); break; case NleExtractionStatus.QualityCheckExposureFailed: ShowError("Quality check failed"); break; case NleExtractionStatus.QualityCheckGrayscaleDensityFailed: ShowError("Quality check: grayscale density failed"); break; case NleExtractionStatus.QualityCheckSharpnessFailed: ShowError("Quality check: sharpness failed"); break; case NleExtractionStatus.TemplateCreated: // ShowError("Template created"); break; case NleExtractionStatus.None: //ShowError(""); break; default: ShowError(status.ToString()); break; } */ if (status != NleExtractionStatus.None && status != NleExtractionStatus.TemplateCreated) { //lblExtractionResult.Visible = true; //tmrShowMessage.Enabled = true; } /* if (timeStamp == TimeSpan.Zero) { trbPosition.Value = 0; } else { if ((int)timeStamp.TotalMilliseconds > trbPosition.Maximum) trbPosition.Value = trbPosition.Maximum; else trbPosition.Value = (int)timeStamp.TotalMilliseconds; } */ }