public byte[] ExtractTemplate(DataSetMain.ImageDataTable table)
        {
            if (extractor == null)
            {
                return(new byte[0]);
            }

            NLTemplate template = null;

            try
            {
                // Start extraction of face template from a stream (sequence of images)
                extractor.ExtractStart(table.Rows.Count);

                NleExtractionStatus extractionStatus = NleExtractionStatus.None;
                // process stream of images
                foreach (DataSetMain.ImageRow row in table)
                {
                    // read image
                    NImage image = NImage.FromStream(new MemoryStream(row.Image));
                    //????????????????????? Changing  to NPixelFormat.Grayscale
                    using (NGrayscaleImage grayscaleImage = (NGrayscaleImage)NImage.FromImage(NPixelFormat.Grayscale, 0, image))
                    {
                        if (image != null)
                        {
                            image.Dispose();
                        }
                        // use image as another frame of stream
                        NleDetectionDetails details;
                        int baseFrameIndex;
                        extractionStatus = extractor.ExtractNext(grayscaleImage, out details, out baseFrameIndex, out template);
                    }
                }
                if (extractionStatus == NleExtractionStatus.TemplateCreated)
                {
                    // return compressed template
                    return(template.Save().ToByteArray());
                }
            }
            catch (Exception e)
            {
                string msg = e.InnerException != null ? e.InnerException.Message : e.Message;
                throw new VerilookToolException(errormsg + msg);
            }
            finally
            {
                if (template != null)
                {
                    template.Dispose();
                }
            }

            return(new byte[0]);
        }
Beispiel #2
0
        private int GetImageQuality(NGrayscaleImage image, Label lb)
        {
            NGrayscaleImage resultImage = (NGrayscaleImage)image.Clone();

            try
            {
                NfeExtractionStatus extractionStatus;
                template = Data.NFExtractor.Extract(resultImage, NFPosition.Unknown, NFImpressionType.LiveScanPlain, out extractionStatus);
                if (extractionStatus != NfeExtractionStatus.TemplateCreated)
                {
                    lb.Text      = string.Format("Q: {0:P0}", 0);
                    lb.ForeColor = Color.Red;
                    return(0);
                }
            }
            catch (Exception)
            {
                lb.Text      = string.Format("Q: {0:P0}", 0);
                lb.ForeColor = Color.Red;
                return(0);
            }

            this.template = (NFRecord)template.Clone();
            int i = 0;

            if (template != null)
            {
                i       = Helpers.QualityToPercent(template.Quality);
                lb.Text = string.Format("Q: {0:P0}", i / 100.0);
                if (i > 80)
                {
                    lb.ForeColor = Color.GreenYellow;
                }
                else if (i > 50)
                {
                    lb.ForeColor = Color.Orange;
                }
                else
                {
                    lb.ForeColor = Color.Red;
                }
            }
            else
            {
                lb.Text      = string.Format("Q: {0:P0}", 0);
                lb.ForeColor = Color.Red;
            }

            return(i);
        }
Beispiel #3
0
        private bool enrollFromWSQ(WsqImage wsqImage)
        {
            if (!isUserIdValid())
            {
                return(false);
            }

            MemoryStream ms = null;
            NImage       nImage;

            try
            {
                ms = new MemoryStream(wsqImage.Content);

                nImage = NImageFormat.Wsq.LoadImage(ms);
            }
            catch (Exception ex)
            {
                string text = string.Format("Error creating image retrieved from database {0}", ex.Message);
                ShowErrorMessage(text);

                return(false);
            }
            finally
            {
                if (ms != null)
                {
                    ms.Dispose();
                }
            }

            float horzResolution = nImage.HorzResolution;
            float vertResolution = nImage.VertResolution;

            if (horzResolution < 250)
            {
                horzResolution = 500;
            }
            if (vertResolution < 250)
            {
                vertResolution = 500;
            }

            NGrayscaleImage grayImage = (NGrayscaleImage)NImage.FromImage(NPixelFormat.Grayscale, 0, horzResolution, vertResolution, nImage);

            OnImage(grayImage);

            return(true);
        }
Beispiel #4
0
        private bool createTemplate(Bitmap enrollmentBmp, bool largeTemplate, out NleDetectionDetails detDetails)
        {
            NLExtractor templateExtractor = new NLExtractor();

            if (largeTemplate)
            {
                templateExtractor.TemplateSize = NleTemplateSize.Large;
            }
            else
            {
                templateExtractor.TemplateSize = NleTemplateSize.Medium;
            }
            NImage              enrollmentImage     = NImage.FromBitmap(enrollmentBmp);
            NGrayscaleImage     enrollmentGrayscale = enrollmentImage.ToGrayscale();
            NleDetectionDetails _detDetails         = null;

            try {
                verifyLicense();
                NleExtractionStatus extractionStatus;
                facialTemplate = templateExtractor.Extract(enrollmentGrayscale, out _detDetails, out extractionStatus);

                if (extractionStatus != NleExtractionStatus.TemplateCreated)
                {
                    MessageBox.Show("Face Template Extraction Failed!\nPlease try again.\n" + extractionStatus.ToString());
                    detDetails = _detDetails;
                    return(false);
                }
                else
                {
                    detDetails = _detDetails;
                    return(true);
                }
            } catch (Exception ex) {
                MessageBox.Show("" + ex);
                detDetails = null;
                return(false);
            } finally {
                NLicense.ReleaseComponents(Components);
                if (templateExtractor != null)
                {
                    templateExtractor.Dispose();
                }
            }
        }
        private void OnImage(NGrayscaleImage grayscaleImage)
        {
            switch (_mode)
            {
            case ProgramMode.Enroll:
                //                       doEnroll(this.userId, image);
                doEnroll();
                //nfView2.Zoom = 1F;
                break;

            case ProgramMode.Verify:
                //                        doVerify(this.userId);
                doVerify();
                //nfView2.Zoom = 0.5F;
                break;
            }

            //WaitingForImageToScan();
        }
Beispiel #6
0
        private NleDetectionDetails detectDetails()
        {
            NGrayscaleImage     grayImage = NImage.FromBitmap(globalInsetFaceBmp).ToGrayscale();
            NleFace             _faceInImage;
            NLExtractor         myExtractor       = new NLExtractor();
            NleDetectionDetails _detectionDetails = new NleDetectionDetails();

            if (myExtractor.DetectAllFeaturePoints == false)
            {
                myExtractor.DetectAllFeaturePoints = true;
            }

            if (grayImage == null || !myExtractor.DetectFace(grayImage, out _faceInImage))
            {
                _detectionDetails = null;
                return(_detectionDetails);
            }
            _detectionDetails = myExtractor.DetectFacialFeatures(grayImage, _faceInImage);
            return(_detectionDetails);
        }
        private void backgroundWorker_DoWork(object sender, DoWorkEventArgs e)
        {
            bool extractionStarted = false;

            try
            {
                NImage          frame          = null;
                NGrayscaleImage grayscaleImage = null;
                int             frameNumber    = 0;
                int             bestFrame;
                int             frameCount = Tools.LiveEnrolFrameCount;
                _extractor.DetectAllFeaturePoints = false;

                while (backgroundWorker.CancellationPending == false)
                {
                    if (_pendingFormat != null && _fromCamera)
                    {
                        _camera.SetCurrentFormat(_pendingFormat);
                        _pendingFormat = null;
                    }

                    if (!_fromCamera && _pause)
                    {
                        System.Threading.Thread.Sleep(500);
                        continue;
                    }

                    try
                    {
                        TimeSpan duration  = TimeSpan.Zero;
                        TimeSpan timeStamp = TimeSpan.Zero;

                        if (_fromCamera)
                        {
                            frame = _camera.GetFrame();
                        }
                        else
                        {
                            lock (_readerLock)
                            {
                                frame = _videoReader.ReadVideoSample(out timeStamp, out duration);
                            }
                        }

                        if (frame == null) //camera unplugged or end of file
                        {
                            createFaceRecord = false;
                            SetImageToView(null, null, null, NleExtractionStatus.None, -1, timeStamp);
                            return;
                        }

                        using (grayscaleImage = frame.ToGrayscale())
                        {
                            if (createFaceRecord)
                            {
                                NleDetectionDetails details;
                                NLTemplate          template = null;
                                //NBuffer template = null;
                                if (!extractionStarted)
                                {
                                    UpdateExtractorTemplateSize();
                                    frameCount = Tools.LiveEnrolFrameCount;
                                    _extractor.ExtractStart();
                                    extractionStarted = true;
                                    frameNumber       = 0;
                                    ClearCapturedImages();
                                }
                                frameNumber++;
                                NleExtractionStatus status = _extractor.ExtractNext(grayscaleImage, out details);
                                capturedImages.Add((NImage)frame.Clone());

                                if (status != NleExtractionStatus.None || frameNumber >= frameCount)
                                {
                                    template = _extractor.ExtractEnd(out bestFrame, out status);
                                    if (status == NleExtractionStatus.TemplateCreated)
                                    {
                                        NTemplate nTemplate = new NTemplate();
                                        NImage    bestImage = frame;
                                        if (bestFrame < capturedImages.Count && bestFrame >= 0)
                                        {
                                            bestImage = capturedImages[bestFrame];
                                        }
                                        _newRecord = new FaceRecord(template, bestImage, details);
                                        _newRecord.AddToTemplate(nTemplate);
                                        template.Dispose();
                                        capturedImages.Remove(bestImage);
                                        _capturedTemplateList = new List <byte[]>();
                                        _capturedTemplateList.Add(nTemplate.Save().ToByteArray());

                                        score = Identify(_capturedTemplateList, _enrolledTemplateList);
                                        LogLine(string.Format("Face match details: score {0}.", score), true);

                                        backgroundWorker.CancelAsync();
                                    }
                                    else
                                    {
                                        _newRecord = null;
                                    }
                                    extractionStarted = false;
                                    createFaceRecord  = false;
                                }

                                if (!createFaceRecord)
                                {
                                    ClearCapturedImages();
                                }

                                SetImageToView(nlView2, frame.ToBitmap(), new NleDetectionDetails[] { details }, status, (int)(frameNumber * 100.0 / frameCount), timeStamp);

                                if (status != NleExtractionStatus.None && status != NleExtractionStatus.TemplateCreated)
                                {
                                    backgroundWorker.CancelAsync();

                                    score = 0;
                                }
                            }
                            else
                            {
                                NleDetectionDetails[] details = null;
                                try
                                {
                                    NleFace[] faces = _extractor.DetectFaces(grayscaleImage);
                                    if (faces != null)
                                    {
                                        details = new NleDetectionDetails[faces.Length];
                                        for (int i = 0; i < faces.Length; i++)
                                        {
                                            details[i] = _extractor.DetectFacialFeatures(grayscaleImage, faces[i]);
                                        }
                                    }
                                }
                                finally
                                {
                                    SetImageToView(nlView2, frame.ToBitmap(), details, NleExtractionStatus.None, -1, timeStamp);
                                }
                            }
                        } //using
                    }     // try
                    finally
                    {
                        if (frame != null)
                        {
                            frame.Dispose();
                        }
                    }
                }// while
            }
            catch (Exception ex)
            {
                foreach (NImage img in capturedImages)
                {
                    img.Dispose();
                }
                capturedImages.Clear();

                ShowError(ex.Message);
            }
            finally
            {
                try
                {
                    int baseFrameIndex;
                    NleExtractionStatus status;
                    if (extractionStarted)
                    {
                        _extractor.ExtractEnd(out baseFrameIndex, out status);
                    }
                    if (_fromCamera && _camera != null)
                    {
                        _camera.StopCapturing();
                    }
                    if (!_fromCamera && _videoReader != null)
                    {
                        _videoReader.Stop();
                    }
                }
                catch { }
            }
        }
        private void enrollFromImage(bool toView2)
        {
            NImage nImage = null;

            if (System.Configuration.ConfigurationManager.AppSettings["Enroll"] != "file")
            {
                byte[]          buffer = null;
                DBHelper.DBUtil db     = new DBHelper.DBUtil();
                if (System.Configuration.ConfigurationManager.AppSettings["Enroll"] == "service")
                {
                    buffer = db.GetImageFromWebService(IMAGE_TYPE.picture, this.userId);
                }
                else
                {
                    buffer = db.GetImage(IMAGE_TYPE.picture, this.userId);
                }

                MemoryStream ms = null;
                try
                {
                    if (buffer != null)
                    {
                        ms = new MemoryStream(buffer);
                    }

                    if (ms != null)
                    {
                        nImage = NImage.FromStream(ms);
                    }
                }
                catch (Exception ex)
                {
                    ShowError(string.Format("Error creating image retrieved from database {0}", ex.Message));
                    return;
                }
                finally
                {
                    if (ms != null)
                    {
                        ms.Dispose();
                    }
                }
            }
            else
            {
                if (!toView2)
                {
                    nImage = NImage.FromFile(Path.GetDirectoryName(Application.ExecutablePath) + "\\Roman.jpg");
                }
                else
                {
                    nImage = NImage.FromFile(Path.GetDirectoryName(Application.ExecutablePath) + "\\Roman2.jpg");
                }
            }

            try
            {
                if (!toView2)
                {
                    nlView1.Image = nImage.ToBitmap();
                }
                else
                {
                    nlView2.Image = nImage.ToBitmap();
                }

                using (NGrayscaleImage gray = nImage.ToGrayscale())
                {
                    NleDetectionDetails details;
                    NleExtractionStatus status;
                    try { _extractor.DetectAllFeaturePoints = Settings.Default.ExtractorDetectAllFeaturesNonLive; }
                    catch { }
                    NLTemplate template = _extractor.Extract(gray, out details, out status);
                    if (status != NleExtractionStatus.TemplateCreated)
                    {
                        _newRecord = null;
//                        ShowError(string.Format("Template extraction failed: {0}", status));
                        String descr = getStatusDescription(status);
                        if (descr != String.Empty)
                        {
                            ShowError(string.Format("Template extraction failed: {0}", descr));
                        }
                        else
                        {
                            ShowError(string.Format("Template extraction failed: {0}", status));
                        }
                        return;
                    }
                    else
                    {
                        _newRecord = new FaceRecord(template, nImage, details);
                        if (!toView2)
                        {
                            _enrolledTemplateList = new List <byte[]>();
                            _enrolledTemplateList.Add(template.Save().ToByteArray());
                        }
                        else
                        {
                            _capturedTemplateList = new List <byte[]>();
                            _capturedTemplateList.Add(template.Save().ToByteArray());
                        }
                        template.Dispose();
                    }

                    if (!toView2)
                    {
                        SetImageToView(nlView1, nImage.ToBitmap(), new NleDetectionDetails[] { details }, status, 100, TimeSpan.Zero);
                    }
                    else
                    {
                        SetImageToView(nlView2, nImage.ToBitmap(), new NleDetectionDetails[] { details }, status, 100, TimeSpan.Zero);
                    }
                }
            }
            catch (Exception ex)
            {
                ShowError(ex.Message);
            }
            return;
        }
Beispiel #9
0
        private void backgroundWorker_DoWork(object sender, DoWorkEventArgs e)
        {
            try
            {
                activeCamera = (NCamera)e.Argument;
                activeCamera.StartCapturing();
                while (activeCamera.IsCapturing)
                {
                    if (backgroundWorker.CancellationPending)
                    {
                        activeCamera.StopCapturing();
                    }

                    if (activeCamera != null && activeCamera.IsCapturing)
                    {
                        using (NImage image = activeCamera.GetFrame())
                        {
                            video = image.ToBitmap();

                            using (NLExtractor extractor = new NLExtractor())
                            {
                                // convert image to grayscale
                                NGrayscaleImage grayscale = (NGrayscaleImage)NImage.FromImage(NPixelFormat.Grayscale, 0, image);
                                extractor.MaxRecordsPerTemplate = 1;
                                // detect all faces that are suitable for face recognition in the image
                                NleFace[] faces = extractor.DetectFaces(grayscale);
                                //NleDetectionDetails[] detectionDetails
                                facedetectdetails = new NleDetectionDetails[faces.Length];
                                for (int i = 0; i < facedetectdetails.Length; i++)
                                {
                                    facedetectdetails[i] = extractor.DetectFacialFeatures(grayscale, faces[i]);
                                }
                                facesView.DrawConfidenceForEyes = true;
                                facesView.DrawFaceConfidence    = true;
                                facesView.DetectionDetails      = facedetectdetails;

                                for (int i = 0; i < facedetectdetails.Length; i++)
                                {
                                    faceAvailabilityStatus = facedetectdetails[i].FaceAvailable;
                                }
                                if (facesView.DrawConfidenceForEyes == true & facesView.DrawFaceConfidence == true)
                                {
                                    faceAvailabilityStatus = true;
                                }
                                else
                                {
                                    faceAvailabilityStatus = false;
                                }
                            }
                        }
                    }
                }
            }
            catch (Exception exp)
            {
                //   MessageBox.Show("Error Capturing Image - Close and re-open browser window");
                // Logger.LogError(exp.ToString());
                //errorLog.Append(exp.ToString() + Environment.NewLine);
                e.Cancel = true;
            }
        }
Beispiel #10
0
        private void processEnrolledData(byte[] serializedArrayOfWSQ)
        {
            PictureBox pb;

            ResourceManager rm = new ResourceManager("PSCBioVerification.Form1", this.GetType().Assembly);

            if (serializedArrayOfWSQ == null)
            {
                clearFingerBoxes();

                string text = rm.GetString("msgThePersonHasNotYetBeenEnrolled"); // "The person has not yet been enrolled"

                LogLine(text, true);
                ShowErrorMessage(text);
                return;
            }

            MemoryStream ms = new MemoryStream(serializedArrayOfWSQ);

            //Assembly.Load(string assemblyString)
            // Construct a BinaryFormatter and use it to deserialize the data to the stream.
            BinaryFormatter formatter = new BinaryFormatter();

            try
            {
                formatter.Binder   = new GenericBinder <WsqImage>();
                _fingersCollection = formatter.Deserialize(ms) as ArrayList;
            }
            catch (SerializationException ex)
            {
                LogLine(ex.Message, true);
                ShowErrorMessage(ex.Message);
                return;
            }
            finally
            {
                ms.Close();
            }

            int         bestQuality = 0;
            int         bestQualityRadioButton = 0;
            RadioButton rb = null; Label lab = null; WsqImage wsqImage = null;

            for (int i = 0; i < _fingersCollection.Count; i++)
            {
                Control[] control    = this.Controls.Find("radioButton" + (i + 1).ToString(), true);
                Control[] controlLab = this.Controls.Find("label" + (i + 1).ToString(), true);
                if (control.Length == 0)
                {
                    continue;
                }

                rb  = control[0] as RadioButton;
                lab = controlLab[0] as Label;

                wsqImage = _fingersCollection[i] as WsqImage;
                if (wsqImage == null)
                {
                    rb.Enabled  = false;
                    lab.Enabled = false;
                }
                else
                {
                    rb.Enabled  = true;
                    lab.Enabled = true;
                }

                pb = this.Controls.Find("fpPictureBox" + (i + 1).ToString(), true)[0] as PictureBox;

                if (_fingersCollection[i] != null)
                {
                    try
                    {
                        ms = new MemoryStream(wsqImage.Content);

                        NImage nImage = NImageFormat.Wsq.LoadImage(ms);

                        float horzResolution = nImage.HorzResolution;
                        float vertResolution = nImage.VertResolution;
                        if (horzResolution < 250)
                        {
                            horzResolution = 500;
                        }
                        if (vertResolution < 250)
                        {
                            vertResolution = 500;
                        }

                        NGrayscaleImage grayImage = (NGrayscaleImage)NImage.FromImage(NPixelFormat.Grayscale, 0, horzResolution, vertResolution, nImage);
                        int             q         = GetImageQuality(grayImage, this.Controls.Find("lbFinger" + (i + 1).ToString(), true)[0] as Label);

                        if (bestQuality < q)
                        {
                            bestQuality            = q;
                            bestQualityRadioButton = i;
                        }

                        pb.Image    = nImage.ToBitmap();
                        pb.SizeMode = PictureBoxSizeMode.Zoom;
                    }
                    catch (Exception)
                    {
                        continue;
                    }
                    finally
                    {
                        ms.Close();
                    }
                }
                else
                {
                    pb.Image = null;
                    this.Controls.Find("lbFinger" + (i + 1).ToString(), true)[0].Text = "";
                }
            }

            stopProgressBar();

            rb = this.Controls.Find("radioButton" + (bestQualityRadioButton + 1).ToString(), true)[0] as RadioButton;
            this.BeginInvoke(new MethodInvoker(delegate() { checkRadioButton(rb.Name); }));
        }
Beispiel #11
0
        private void OnImage(NGrayscaleImage image)
        {
            clearView();
            if (nfView1.Image == null)
            {
                nfView1.Image = image.ToBitmap();
            }

            NGrayscaleImage resultImage = (NGrayscaleImage)image.Clone();

            try
            {
                NfeExtractionStatus extractionStatus;
                template = Data.NFExtractor.Extract(resultImage, NFPosition.Unknown, NFImpressionType.LiveScanPlain, out extractionStatus);
                if (extractionStatus != NfeExtractionStatus.TemplateCreated)
                {
                    string text = string.Format("Extraction failed: {0}", extractionStatus.ToString());
                    ShowErrorMessage(text);

                    LogLine(text, true);
                    //LogLine("Waiting for image...", true);

                    pictureBox2.Image = Properties.Resources.redcross;

                    //      MessageBox.Show(text, Text, MessageBoxButtons.OK, MessageBoxIcon.Error);
                    //    sw.Stop();
                    //stopProgressBar();
                    //UseWaitCursor = false;

                    return;
                }
            }
            catch (Exception e)
            {
                string text = string.Format("Extraction error: {0}", e.Message);
                ShowErrorMessage(text);

                LogLine(text, true);

                pictureBox2.Image = Properties.Resources.redcross;

                return;
            }
            finally
            {
                //WaitingForImageToScan();
                //stopProgressBar();                !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
            }

            Bitmap bitmap;

            using (NImage ri = NImages.GetGrayscaleColorWrapper(resultImage, resultImageMinColor, resultImageMaxColor))
            {
                bitmap = ri.ToBitmap();
            }

            this.template       = (NFRecord)template.Clone();
            nfView2.ResultImage = bitmap;
            if (nfView2.Template != null)
            {
                nfView2.Template.Dispose();
            }
            nfView2.Template = this.template;

            if (template == null)
            {
                ResourceManager rm   = new ResourceManager("PSCBioVerification.Form1", this.GetType().Assembly);
                string          text = rm.GetString("msgFingerprintImageIsOfLowQuality"); // "Fingerprint image is of low quality"
                ShowErrorMessage(text);
                LogLine(text, true);

                pictureBox2.Image = Properties.Resources.redcross;

                //MessageBox.Show(text, Text, MessageBoxButtons.OK, MessageBoxIcon.Error);
                return;
            }

            LogLine("Template extracted{0}. G: {1}. Size: {2}", true,
                    Data.NFExtractor.UseQuality ? string.Format(". Quality: {0:P0}", Helpers.QualityToPercent(template.Quality) / 100.0) : null,
                    template.G, Data.SizeToString(template.Save().Length));

            ShowStatusMessage(String.Format("Template extracted{0}. G: {1}. Size: {2}", true,
                                            Data.NFExtractor.UseQuality ? string.Format(". Quality: {0:P0}", Helpers.QualityToPercent(template.Quality) / 100.0) : null,
                                            template.G, Data.SizeToString(template.Save().Length)));

            switch (mode)
            {
            case ProgramMode.Enroll:
                doEnroll();
                nfView2.Zoom = 1F;
                break;

            case ProgramMode.Verify:
                doVerify();
                nfView2.Zoom = 0.5F;
                break;
            }

            WaitingForImageToScan();
        }
        private void OnImage(NGrayscaleImage grayscaleImage)
        {
            switch (_mode)
            {
                case ProgramMode.Enroll:
                    //                       doEnroll(this.userId, image);
                    doEnroll();
                    //nfView2.Zoom = 1F;
                    break;
                case ProgramMode.Verify:
                    //                        doVerify(this.userId);
                    doVerify();
                    //nfView2.Zoom = 0.5F;
                    break;
            }

            //WaitingForImageToScan();
        }