Ejemplo n.º 1
0
        private void backgroundWorker_DoWork(object sender, DoWorkEventArgs e)
        {
            bool extractionStarted = false;

            try
            {
                NImage          frame          = null;
                NGrayscaleImage grayscaleImage = null;
                int             frameNumber    = 0;
                int             bestFrame;
                int             frameCount = Tools.LiveEnrolFrameCount;
                _extractor.DetectAllFeaturePoints = false;

                while (backgroundWorker.CancellationPending == false)
                {
                    if (_pendingFormat != null && _fromCamera)
                    {
                        _camera.SetCurrentFormat(_pendingFormat);
                        _pendingFormat = null;
                    }

                    if (!_fromCamera && _pause)
                    {
                        System.Threading.Thread.Sleep(500);
                        continue;
                    }

                    try
                    {
                        TimeSpan duration  = TimeSpan.Zero;
                        TimeSpan timeStamp = TimeSpan.Zero;

                        if (_fromCamera)
                        {
                            frame = _camera.GetFrame();
                        }
                        else
                        {
                            lock (_readerLock)
                            {
                                frame = _videoReader.ReadVideoSample(out timeStamp, out duration);
                            }
                        }

                        if (frame == null) //camera unplugged or end of file
                        {
                            createFaceRecord = false;
                            SetImageToView(null, null, null, NleExtractionStatus.None, -1, timeStamp);
                            return;
                        }

                        using (grayscaleImage = frame.ToGrayscale())
                        {
                            if (createFaceRecord)
                            {
                                NleDetectionDetails details;
                                NLTemplate          template = null;
                                //NBuffer template = null;
                                if (!extractionStarted)
                                {
                                    UpdateExtractorTemplateSize();
                                    frameCount = Tools.LiveEnrolFrameCount;
                                    _extractor.ExtractStart();
                                    extractionStarted = true;
                                    frameNumber       = 0;
                                    ClearCapturedImages();
                                }
                                frameNumber++;
                                NleExtractionStatus status = _extractor.ExtractNext(grayscaleImage, out details);
                                capturedImages.Add((NImage)frame.Clone());

                                if (status != NleExtractionStatus.None || frameNumber >= frameCount)
                                {
                                    template = _extractor.ExtractEnd(out bestFrame, out status);
                                    if (status == NleExtractionStatus.TemplateCreated)
                                    {
                                        NTemplate nTemplate = new NTemplate();
                                        NImage    bestImage = frame;
                                        if (bestFrame < capturedImages.Count && bestFrame >= 0)
                                        {
                                            bestImage = capturedImages[bestFrame];
                                        }
                                        _newRecord = new FaceRecord(template, bestImage, details);
                                        _newRecord.AddToTemplate(nTemplate);
                                        template.Dispose();
                                        capturedImages.Remove(bestImage);
                                        _capturedTemplateList = new List <byte[]>();
                                        _capturedTemplateList.Add(nTemplate.Save().ToByteArray());

                                        score = Identify(_capturedTemplateList, _enrolledTemplateList);
                                        LogLine(string.Format("Face match details: score {0}.", score), true);

                                        backgroundWorker.CancelAsync();
                                    }
                                    else
                                    {
                                        _newRecord = null;
                                    }
                                    extractionStarted = false;
                                    createFaceRecord  = false;
                                }

                                if (!createFaceRecord)
                                {
                                    ClearCapturedImages();
                                }

                                SetImageToView(nlView2, frame.ToBitmap(), new NleDetectionDetails[] { details }, status, (int)(frameNumber * 100.0 / frameCount), timeStamp);

                                if (status != NleExtractionStatus.None && status != NleExtractionStatus.TemplateCreated)
                                {
                                    backgroundWorker.CancelAsync();

                                    score = 0;
                                }
                            }
                            else
                            {
                                NleDetectionDetails[] details = null;
                                try
                                {
                                    NleFace[] faces = _extractor.DetectFaces(grayscaleImage);
                                    if (faces != null)
                                    {
                                        details = new NleDetectionDetails[faces.Length];
                                        for (int i = 0; i < faces.Length; i++)
                                        {
                                            details[i] = _extractor.DetectFacialFeatures(grayscaleImage, faces[i]);
                                        }
                                    }
                                }
                                finally
                                {
                                    SetImageToView(nlView2, frame.ToBitmap(), details, NleExtractionStatus.None, -1, timeStamp);
                                }
                            }
                        } //using
                    }     // try
                    finally
                    {
                        if (frame != null)
                        {
                            frame.Dispose();
                        }
                    }
                }// while
            }
            catch (Exception ex)
            {
                foreach (NImage img in capturedImages)
                {
                    img.Dispose();
                }
                capturedImages.Clear();

                ShowError(ex.Message);
            }
            finally
            {
                try
                {
                    int baseFrameIndex;
                    NleExtractionStatus status;
                    if (extractionStarted)
                    {
                        _extractor.ExtractEnd(out baseFrameIndex, out status);
                    }
                    if (_fromCamera && _camera != null)
                    {
                        _camera.StopCapturing();
                    }
                    if (!_fromCamera && _videoReader != null)
                    {
                        _videoReader.Stop();
                    }
                }
                catch { }
            }
        }
Ejemplo n.º 2
0
        private void StopVideoCapture()
        {
            try
            {
                backgroundWorker.CancelAsync();
                timer.Stop();


                if (facesView.Face.Image != null & facedetectdetails.Length > 0)
                {
                    HorizontalPst = (((facedetectdetails[0].Eyes.First.X) + (facedetectdetails[0].Eyes.Second.X)) / 2);

                    VerticalPst = (facedetectdetails[0].Eyes.First.Y + facedetectdetails[0].Eyes.Second.Y) / 2;
                    BottomPst   = facedetectdetails[0].Face.Rectangle.Bottom;



                    var image = activeCamera.GetFrame();
                    var ntfi  = new Ntfi();
                    var token = ntfi.CreateTokenFaceImage(image, facedetectdetails[0].Eyes.First, facedetectdetails[0].Eyes.Second);
                    _croppedImage = token.ToBitmap();
                    faceInImage   = true; buttonUseImage.Enabled = true;
                    activeCamera.StopCapturing();

                    /* commented by Ayo
                     * if (VerticalPst < 320)
                     * {
                     *  if (HorizontalPst > 230)
                     *  {
                     *      if (BottomPst < 335)
                     *      {
                     *          NImage image = activeCamera.GetCurrentFrame();
                     *          Ntfi ntfi = new Ntfi();
                     *          NImage token = ntfi.CreateTokenFaceImage(image, facedetectdetails[0].Eyes.First, facedetectdetails[0].Eyes.Second);
                     *          croppedImage = token.ToBitmap();
                     *          faceInImage = true; buttonUseImage.Enabled = true;
                     *          activeCamera.StopCapturing();
                     *      }
                     *      else { MessageBox.Show("Image Not Position Properly!", "Bottom Post"); }
                     *
                     *  }
                     *  else
                     *  {
                     *      MessageBox.Show("Image Not position Properly!", "Horzontal Post");
                     *  }
                     * }
                     * else
                     * {
                     *  MessageBox.Show("Image too close to camera!", "Vertical Post");
                     * }
                     */
                    //end ayo comments
                }
                else
                {
                    faceInImage = false;
                    MessageBox.Show(this, @"No eyes and face detected", @"Face Biometrics", MessageBoxButtons.OK, MessageBoxIcon.Information);
                }
            }
            catch (Exception exp)
            {
                _errorLog.Append(exp.ToString() + Environment.NewLine);
            }

            finally
            {
                btCaptureDevicePhoto.Enabled = true;
                buttonStopCapture.Enabled    = false;
            }
        }
Ejemplo n.º 3
0
        private void backgroundWorker_DoWork(object sender, DoWorkEventArgs e)
        {
            try
            {
                activeCamera = (NCamera)e.Argument;
                activeCamera.StartCapturing();
                while (activeCamera.IsCapturing)
                {
                    if (backgroundWorker.CancellationPending)
                    {
                        activeCamera.StopCapturing();
                    }

                    if (activeCamera != null && activeCamera.IsCapturing)
                    {
                        using (NImage image = activeCamera.GetFrame())
                        {
                            video = image.ToBitmap();

                            using (NLExtractor extractor = new NLExtractor())
                            {
                                // convert image to grayscale
                                NGrayscaleImage grayscale = (NGrayscaleImage)NImage.FromImage(NPixelFormat.Grayscale, 0, image);
                                extractor.MaxRecordsPerTemplate = 1;
                                // detect all faces that are suitable for face recognition in the image
                                NleFace[] faces = extractor.DetectFaces(grayscale);
                                //NleDetectionDetails[] detectionDetails
                                facedetectdetails = new NleDetectionDetails[faces.Length];
                                for (int i = 0; i < facedetectdetails.Length; i++)
                                {
                                    facedetectdetails[i] = extractor.DetectFacialFeatures(grayscale, faces[i]);
                                }
                                facesView.DrawConfidenceForEyes = true;
                                facesView.DrawFaceConfidence    = true;
                                facesView.DetectionDetails      = facedetectdetails;

                                for (int i = 0; i < facedetectdetails.Length; i++)
                                {
                                    faceAvailabilityStatus = facedetectdetails[i].FaceAvailable;
                                }
                                if (facesView.DrawConfidenceForEyes == true & facesView.DrawFaceConfidence == true)
                                {
                                    faceAvailabilityStatus = true;
                                }
                                else
                                {
                                    faceAvailabilityStatus = false;
                                }
                            }
                        }
                    }
                }
            }
            catch (Exception exp)
            {
                //   MessageBox.Show("Error Capturing Image - Close and re-open browser window");
                // Logger.LogError(exp.ToString());
                //errorLog.Append(exp.ToString() + Environment.NewLine);
                e.Cancel = true;
            }
        }
Ejemplo n.º 4
0
        private void getLiveVideo()
        {
            // what if the camera is disconnected during feed?
            verifyLicense();
            NLExtractor liveExtractor = new NLExtractor();

            NleFace [] theseFaces = null;
            List <NleDetectionDetails> faceDetails = new List <NleDetectionDetails> ();

            liveExtractor.DetectAllFeaturePoints = false;             // False, will only detect eyes.
            NGrayscaleImage liveGrayImage;
            Bitmap          displayBmp;
            Draw            drawfeatures = new Draw();

            myPen = new System.Drawing.Pen(System.Drawing.Color.Blue, 2);
            System.Drawing.Point faceConfLoc = new System.Drawing.Point();
            System.Drawing.Point rEye        = new System.Drawing.Point();
            System.Drawing.Point lEye        = new System.Drawing.Point();
            var timer = new System.Diagnostics.Stopwatch();
            int timeSpan;
            int elapsed           = 0;
            int frameDelay        = 1000 / frameRate;
            int autoDetectDelay   = 0;
            int largestFaceNumber = 0;

            camera.StartCapturing();

            while (isLive == true)
            {
                // this loop only draws on the live display box. Largest face is detected elsewhere.
                try {
                    currentImage = camera.GetFrame();
                    if (currentImage != null)
                    {
                        currentImage.FlipHorizontally();
                        // create grayscale image for extractor operations
                        liveGrayImage = currentImage.ToGrayscale();
                        displayBmp    = currentImage.ToBitmap();
                        theseFaces    = liveExtractor.DetectFaces(liveGrayImage);
                        int largestFaceWidth = 0;
                        int liveFaceCount    = theseFaces.Count();
                        if (liveFaceCount > 0)
                        {
                            if (faceDetails.Count() != 0)
                            {
                                faceDetails.Clear();
                            }

                            for (int i = 0; i < theseFaces.Length; i++)
                            {
                                faceDetails.Add(liveExtractor.DetectFacialFeatures(liveGrayImage, theseFaces [i]));
                                faceConfLoc.X = faceDetails [i].Face.Rectangle.Left;
                                faceConfLoc.Y = faceDetails [i].Face.Rectangle.Bottom;
                                rEye.X        = faceDetails [i].RightEyeCenter.X;
                                rEye.Y        = faceDetails [i].RightEyeCenter.Y;
                                lEye.X        = faceDetails [i].LeftEyeCenter.X;
                                lEye.Y        = faceDetails [i].LeftEyeCenter.Y;

                                if (boundingBoxOn)
                                {
                                    displayBmp = drawfeatures.drawFaceRectangle(faceDetails [i], displayBmp, myPen);
                                }
                                if (faceConfCheckBox.Checked)
                                {
                                    displayBmp = drawfeatures.faceConfidence(displayBmp, (int)faceDetails [i].Face.Confidence, faceConfLoc, myPen);
                                }
                                if (drawEyesCheckBox.Checked)
                                {
                                    displayBmp = drawfeatures.connect(displayBmp, rEye, lEye, myPen);
                                }
                                if (showEyeCheckBox.Checked)
                                {
                                    displayBmp = drawfeatures.confidence(displayBmp, faceDetails [i].LeftEyeCenter.Confidence, lEye, myPen);
                                    displayBmp = drawfeatures.confidence(displayBmp, faceDetails [i].RightEyeCenter.Confidence, rEye, myPen);
                                }

                                if (faceDetails [i].Face.Rectangle.Width > largestFaceWidth)
                                {
                                    largestFaceNumber = i;
                                }
                                globalInsetFaceBmp = drawfeatures.snipFace(currentImage.ToBitmap(), faceDetails [largestFaceNumber]);                                  //make face clipping here
                            }
                            if (autoDetect)
                            {
                                autoDetectDelay++;
                            }
                        }
                        liveGrayImage.Dispose();
                        currentImage.Dispose();

                        if (matchNow || autoDetectDelay == 50)
                        {
                            autoDetectDelay = 0;
                            attemptMatch();
                        }
                        // display image on pictureBox
                        if (mainFeedPictureBox.InvokeRequired)
                        {
                            mainFeedPictureBox.Invoke(new Action(() => mainFeedPictureBox.Image = displayBmp));
                        }
                        else
                        {
                            mainFeedPictureBox.Image = displayBmp;
                        }
                        timer.Stop();
                        elapsed  = (Int32)timer.ElapsedMilliseconds;
                        timeSpan = frameDelay - elapsed;
                        if (timeSpan < 0)
                        {
                            timeSpan = 0;
                        }
                        Thread.Sleep(timeSpan);
                        timer.Reset();
                        theseFaces = null;
                    }
                } catch (Exception ex) {
                    // do nothing
                }
            }
            camera.StopCapturing();
        }