public void Initialize()
        {
            bool b = true;

            try
            {
                // check license
                if (!NLicense.ObtainComponents("/local", 5000, components))
                {
                    b = false;
                }

                // Create an extractor
                extractor = new NLExtractor();

                // Set extractor template size (recommended, for enroll to database, is large) (optional)
                extractor.TemplateSize = NleTemplateSize.Large;
            }
            catch (Exception e)
            {
                string msg = e.InnerException != null ? e.InnerException.Message : e.Message;
                throw new VerilookToolException(errormsg + msg);
            }
            if (!b)
            {
                throw new VerilookToolException("Obtaining Verilook License failed");
            }
        }
示例#2
0
        public FaceForm()
        {
            setCulture();
            InitializeComponent();

            _extractor          = Tools.Extractor;
            _deviceManager      = Devices.Instance.Cameras;
            chbLiveView.Checked = false;

            _deviceManager.DeviceAdded   += new EventHandler <NDeviceManagerDeviceEventArgs>(devMan_DeviceAdded);
            _deviceManager.DeviceRemoved += new EventHandler <NDeviceManagerDeviceEventArgs>(devMan_DeviceRemoved);
            ListAllDevices();

            backgroundWorker = new System.ComponentModel.BackgroundWorker();
            backgroundWorker.WorkerSupportsCancellation = true;
            backgroundWorker.DoWork             += new DoWorkEventHandler(backgroundWorker_DoWork);
            backgroundWorker.RunWorkerCompleted += new RunWorkerCompletedEventHandler(backgroundWorker_RunWorkerCompleted);
        }
示例#3
0
        public FaceForm()
        {
            setCulture();
            InitializeComponent();

            _extractor = Tools.Extractor;
            _deviceManager = Devices.Instance.Cameras;
            chbLiveView.Checked = false;

            _deviceManager.DeviceAdded += new EventHandler<NDeviceManagerDeviceEventArgs>(devMan_DeviceAdded);
            _deviceManager.DeviceRemoved += new EventHandler<NDeviceManagerDeviceEventArgs>(devMan_DeviceRemoved);
            ListAllDevices();

            backgroundWorker = new System.ComponentModel.BackgroundWorker();
            backgroundWorker.WorkerSupportsCancellation = true;
            backgroundWorker.DoWork += new DoWorkEventHandler(backgroundWorker_DoWork);
            backgroundWorker.RunWorkerCompleted += new RunWorkerCompletedEventHandler(backgroundWorker_RunWorkerCompleted);
        }
示例#4
0
        private bool createTemplate(Bitmap enrollmentBmp, bool largeTemplate, out NleDetectionDetails detDetails)
        {
            NLExtractor templateExtractor = new NLExtractor();

            if (largeTemplate)
            {
                templateExtractor.TemplateSize = NleTemplateSize.Large;
            }
            else
            {
                templateExtractor.TemplateSize = NleTemplateSize.Medium;
            }
            NImage              enrollmentImage     = NImage.FromBitmap(enrollmentBmp);
            NGrayscaleImage     enrollmentGrayscale = enrollmentImage.ToGrayscale();
            NleDetectionDetails _detDetails         = null;

            try {
                verifyLicense();
                NleExtractionStatus extractionStatus;
                facialTemplate = templateExtractor.Extract(enrollmentGrayscale, out _detDetails, out extractionStatus);

                if (extractionStatus != NleExtractionStatus.TemplateCreated)
                {
                    MessageBox.Show("Face Template Extraction Failed!\nPlease try again.\n" + extractionStatus.ToString());
                    detDetails = _detDetails;
                    return(false);
                }
                else
                {
                    detDetails = _detDetails;
                    return(true);
                }
            } catch (Exception ex) {
                MessageBox.Show("" + ex);
                detDetails = null;
                return(false);
            } finally {
                NLicense.ReleaseComponents(Components);
                if (templateExtractor != null)
                {
                    templateExtractor.Dispose();
                }
            }
        }
示例#5
0
        private NleDetectionDetails detectDetails()
        {
            NGrayscaleImage     grayImage = NImage.FromBitmap(globalInsetFaceBmp).ToGrayscale();
            NleFace             _faceInImage;
            NLExtractor         myExtractor       = new NLExtractor();
            NleDetectionDetails _detectionDetails = new NleDetectionDetails();

            if (myExtractor.DetectAllFeaturePoints == false)
            {
                myExtractor.DetectAllFeaturePoints = true;
            }

            if (grayImage == null || !myExtractor.DetectFace(grayImage, out _faceInImage))
            {
                _detectionDetails = null;
                return(_detectionDetails);
            }
            _detectionDetails = myExtractor.DetectFacialFeatures(grayImage, _faceInImage);
            return(_detectionDetails);
        }
示例#6
0
        private void backgroundWorker_DoWork(object sender, DoWorkEventArgs e)
        {
            try
            {
                activeCamera = (NCamera)e.Argument;
                activeCamera.StartCapturing();
                while (activeCamera.IsCapturing)
                {
                    if (backgroundWorker.CancellationPending)
                    {
                        activeCamera.StopCapturing();
                    }

                    if (activeCamera != null && activeCamera.IsCapturing)
                    {
                        using (NImage image = activeCamera.GetFrame())
                        {
                            video = image.ToBitmap();

                            using (NLExtractor extractor = new NLExtractor())
                            {
                                // convert image to grayscale
                                NGrayscaleImage grayscale = (NGrayscaleImage)NImage.FromImage(NPixelFormat.Grayscale, 0, image);
                                extractor.MaxRecordsPerTemplate = 1;
                                // detect all faces that are suitable for face recognition in the image
                                NleFace[] faces = extractor.DetectFaces(grayscale);
                                //NleDetectionDetails[] detectionDetails
                                facedetectdetails = new NleDetectionDetails[faces.Length];
                                for (int i = 0; i < facedetectdetails.Length; i++)
                                {
                                    facedetectdetails[i] = extractor.DetectFacialFeatures(grayscale, faces[i]);
                                }
                                facesView.DrawConfidenceForEyes = true;
                                facesView.DrawFaceConfidence    = true;
                                facesView.DetectionDetails      = facedetectdetails;

                                for (int i = 0; i < facedetectdetails.Length; i++)
                                {
                                    faceAvailabilityStatus = facedetectdetails[i].FaceAvailable;
                                }
                                if (facesView.DrawConfidenceForEyes == true & facesView.DrawFaceConfidence == true)
                                {
                                    faceAvailabilityStatus = true;
                                }
                                else
                                {
                                    faceAvailabilityStatus = false;
                                }
                            }
                        }
                    }
                }
            }
            catch (Exception exp)
            {
                //   MessageBox.Show("Error Capturing Image - Close and re-open browser window");
                // Logger.LogError(exp.ToString());
                //errorLog.Append(exp.ToString() + Environment.NewLine);
                e.Cancel = true;
            }
        }
示例#7
0
        private void getLiveVideo()
        {
            // what if the camera is disconnected during feed?
            verifyLicense();
            NLExtractor liveExtractor = new NLExtractor();

            NleFace [] theseFaces = null;
            List <NleDetectionDetails> faceDetails = new List <NleDetectionDetails> ();

            liveExtractor.DetectAllFeaturePoints = false;             // False, will only detect eyes.
            NGrayscaleImage liveGrayImage;
            Bitmap          displayBmp;
            Draw            drawfeatures = new Draw();

            myPen = new System.Drawing.Pen(System.Drawing.Color.Blue, 2);
            System.Drawing.Point faceConfLoc = new System.Drawing.Point();
            System.Drawing.Point rEye        = new System.Drawing.Point();
            System.Drawing.Point lEye        = new System.Drawing.Point();
            var timer = new System.Diagnostics.Stopwatch();
            int timeSpan;
            int elapsed           = 0;
            int frameDelay        = 1000 / frameRate;
            int autoDetectDelay   = 0;
            int largestFaceNumber = 0;

            camera.StartCapturing();

            while (isLive == true)
            {
                // this loop only draws on the live display box. Largest face is detected elsewhere.
                try {
                    currentImage = camera.GetFrame();
                    if (currentImage != null)
                    {
                        currentImage.FlipHorizontally();
                        // create grayscale image for extractor operations
                        liveGrayImage = currentImage.ToGrayscale();
                        displayBmp    = currentImage.ToBitmap();
                        theseFaces    = liveExtractor.DetectFaces(liveGrayImage);
                        int largestFaceWidth = 0;
                        int liveFaceCount    = theseFaces.Count();
                        if (liveFaceCount > 0)
                        {
                            if (faceDetails.Count() != 0)
                            {
                                faceDetails.Clear();
                            }

                            for (int i = 0; i < theseFaces.Length; i++)
                            {
                                faceDetails.Add(liveExtractor.DetectFacialFeatures(liveGrayImage, theseFaces [i]));
                                faceConfLoc.X = faceDetails [i].Face.Rectangle.Left;
                                faceConfLoc.Y = faceDetails [i].Face.Rectangle.Bottom;
                                rEye.X        = faceDetails [i].RightEyeCenter.X;
                                rEye.Y        = faceDetails [i].RightEyeCenter.Y;
                                lEye.X        = faceDetails [i].LeftEyeCenter.X;
                                lEye.Y        = faceDetails [i].LeftEyeCenter.Y;

                                if (boundingBoxOn)
                                {
                                    displayBmp = drawfeatures.drawFaceRectangle(faceDetails [i], displayBmp, myPen);
                                }
                                if (faceConfCheckBox.Checked)
                                {
                                    displayBmp = drawfeatures.faceConfidence(displayBmp, (int)faceDetails [i].Face.Confidence, faceConfLoc, myPen);
                                }
                                if (drawEyesCheckBox.Checked)
                                {
                                    displayBmp = drawfeatures.connect(displayBmp, rEye, lEye, myPen);
                                }
                                if (showEyeCheckBox.Checked)
                                {
                                    displayBmp = drawfeatures.confidence(displayBmp, faceDetails [i].LeftEyeCenter.Confidence, lEye, myPen);
                                    displayBmp = drawfeatures.confidence(displayBmp, faceDetails [i].RightEyeCenter.Confidence, rEye, myPen);
                                }

                                if (faceDetails [i].Face.Rectangle.Width > largestFaceWidth)
                                {
                                    largestFaceNumber = i;
                                }
                                globalInsetFaceBmp = drawfeatures.snipFace(currentImage.ToBitmap(), faceDetails [largestFaceNumber]);                                  //make face clipping here
                            }
                            if (autoDetect)
                            {
                                autoDetectDelay++;
                            }
                        }
                        liveGrayImage.Dispose();
                        currentImage.Dispose();

                        if (matchNow || autoDetectDelay == 50)
                        {
                            autoDetectDelay = 0;
                            attemptMatch();
                        }
                        // display image on pictureBox
                        if (mainFeedPictureBox.InvokeRequired)
                        {
                            mainFeedPictureBox.Invoke(new Action(() => mainFeedPictureBox.Image = displayBmp));
                        }
                        else
                        {
                            mainFeedPictureBox.Image = displayBmp;
                        }
                        timer.Stop();
                        elapsed  = (Int32)timer.ElapsedMilliseconds;
                        timeSpan = frameDelay - elapsed;
                        if (timeSpan < 0)
                        {
                            timeSpan = 0;
                        }
                        Thread.Sleep(timeSpan);
                        timer.Reset();
                        theseFaces = null;
                    }
                } catch (Exception ex) {
                    // do nothing
                }
            }
            camera.StopCapturing();
        }