public byte[] ExtractTemplate(DataSetMain.ImageDataTable table)
        {
            if (extractor == null)
            {
                return(new byte[0]);
            }

            NLTemplate template = null;

            try
            {
                // Start extraction of face template from a stream (sequence of images)
                extractor.ExtractStart(table.Rows.Count);

                NleExtractionStatus extractionStatus = NleExtractionStatus.None;
                // process stream of images
                foreach (DataSetMain.ImageRow row in table)
                {
                    // read image
                    NImage image = NImage.FromStream(new MemoryStream(row.Image));
                    //????????????????????? Changing  to NPixelFormat.Grayscale
                    using (NGrayscaleImage grayscaleImage = (NGrayscaleImage)NImage.FromImage(NPixelFormat.Grayscale, 0, image))
                    {
                        if (image != null)
                        {
                            image.Dispose();
                        }
                        // use image as another frame of stream
                        NleDetectionDetails details;
                        int baseFrameIndex;
                        extractionStatus = extractor.ExtractNext(grayscaleImage, out details, out baseFrameIndex, out template);
                    }
                }
                if (extractionStatus == NleExtractionStatus.TemplateCreated)
                {
                    // return compressed template
                    return(template.Save().ToByteArray());
                }
            }
            catch (Exception e)
            {
                string msg = e.InnerException != null ? e.InnerException.Message : e.Message;
                throw new VerilookToolException(errormsg + msg);
            }
            finally
            {
                if (template != null)
                {
                    template.Dispose();
                }
            }

            return(new byte[0]);
        }
示例#2
0
        private void backgroundWorker_DoWork(object sender, DoWorkEventArgs e)
        {
            bool extractionStarted = false;

            try
            {
                NImage          frame          = null;
                NGrayscaleImage grayscaleImage = null;
                int             frameNumber    = 0;
                int             bestFrame;
                int             frameCount = Tools.LiveEnrolFrameCount;
                _extractor.DetectAllFeaturePoints = false;

                while (backgroundWorker.CancellationPending == false)
                {
                    if (_pendingFormat != null && _fromCamera)
                    {
                        _camera.SetCurrentFormat(_pendingFormat);
                        _pendingFormat = null;
                    }

                    if (!_fromCamera && _pause)
                    {
                        System.Threading.Thread.Sleep(500);
                        continue;
                    }

                    try
                    {
                        TimeSpan duration  = TimeSpan.Zero;
                        TimeSpan timeStamp = TimeSpan.Zero;

                        if (_fromCamera)
                        {
                            frame = _camera.GetFrame();
                        }
                        else
                        {
                            lock (_readerLock)
                            {
                                frame = _videoReader.ReadVideoSample(out timeStamp, out duration);
                            }
                        }

                        if (frame == null) //camera unplugged or end of file
                        {
                            createFaceRecord = false;
                            SetImageToView(null, null, null, NleExtractionStatus.None, -1, timeStamp);
                            return;
                        }

                        using (grayscaleImage = frame.ToGrayscale())
                        {
                            if (createFaceRecord)
                            {
                                NleDetectionDetails details;
                                NLTemplate          template = null;
                                //NBuffer template = null;
                                if (!extractionStarted)
                                {
                                    UpdateExtractorTemplateSize();
                                    frameCount = Tools.LiveEnrolFrameCount;
                                    _extractor.ExtractStart();
                                    extractionStarted = true;
                                    frameNumber       = 0;
                                    ClearCapturedImages();
                                }
                                frameNumber++;
                                NleExtractionStatus status = _extractor.ExtractNext(grayscaleImage, out details);
                                capturedImages.Add((NImage)frame.Clone());

                                if (status != NleExtractionStatus.None || frameNumber >= frameCount)
                                {
                                    template = _extractor.ExtractEnd(out bestFrame, out status);
                                    if (status == NleExtractionStatus.TemplateCreated)
                                    {
                                        NTemplate nTemplate = new NTemplate();
                                        NImage    bestImage = frame;
                                        if (bestFrame < capturedImages.Count && bestFrame >= 0)
                                        {
                                            bestImage = capturedImages[bestFrame];
                                        }
                                        _newRecord = new FaceRecord(template, bestImage, details);
                                        _newRecord.AddToTemplate(nTemplate);
                                        template.Dispose();
                                        capturedImages.Remove(bestImage);
                                        _capturedTemplateList = new List <byte[]>();
                                        _capturedTemplateList.Add(nTemplate.Save().ToByteArray());

                                        score = Identify(_capturedTemplateList, _enrolledTemplateList);
                                        LogLine(string.Format("Face match details: score {0}.", score), true);

                                        backgroundWorker.CancelAsync();
                                    }
                                    else
                                    {
                                        _newRecord = null;
                                    }
                                    extractionStarted = false;
                                    createFaceRecord  = false;
                                }

                                if (!createFaceRecord)
                                {
                                    ClearCapturedImages();
                                }

                                SetImageToView(nlView2, frame.ToBitmap(), new NleDetectionDetails[] { details }, status, (int)(frameNumber * 100.0 / frameCount), timeStamp);

                                if (status != NleExtractionStatus.None && status != NleExtractionStatus.TemplateCreated)
                                {
                                    backgroundWorker.CancelAsync();

                                    score = 0;
                                }
                            }
                            else
                            {
                                NleDetectionDetails[] details = null;
                                try
                                {
                                    NleFace[] faces = _extractor.DetectFaces(grayscaleImage);
                                    if (faces != null)
                                    {
                                        details = new NleDetectionDetails[faces.Length];
                                        for (int i = 0; i < faces.Length; i++)
                                        {
                                            details[i] = _extractor.DetectFacialFeatures(grayscaleImage, faces[i]);
                                        }
                                    }
                                }
                                finally
                                {
                                    SetImageToView(nlView2, frame.ToBitmap(), details, NleExtractionStatus.None, -1, timeStamp);
                                }
                            }
                        } //using
                    }     // try
                    finally
                    {
                        if (frame != null)
                        {
                            frame.Dispose();
                        }
                    }
                }// while
            }
            catch (Exception ex)
            {
                foreach (NImage img in capturedImages)
                {
                    img.Dispose();
                }
                capturedImages.Clear();

                ShowError(ex.Message);
            }
            finally
            {
                try
                {
                    int baseFrameIndex;
                    NleExtractionStatus status;
                    if (extractionStarted)
                    {
                        _extractor.ExtractEnd(out baseFrameIndex, out status);
                    }
                    if (_fromCamera && _camera != null)
                    {
                        _camera.StopCapturing();
                    }
                    if (!_fromCamera && _videoReader != null)
                    {
                        _videoReader.Stop();
                    }
                }
                catch { }
            }
        }