public JsonResult RecognizeByteArray(byte[] photo)
        {
            DataPath = Server.MapPath("~/App_Data/Faces");
            FaceRecognizer.Load(DataPath);
            var depthImage = new Image <Gray, byte>(photo.Rank, (photo.Length / photo.Rank))
            {
                Bytes = photo
            };
            var s      = FaceRecognizer.Predict(depthImage).Label;
            var person = PhotoContext.People.FirstOrDefault(x => x.Id == s);

            return(Json(person, JsonRequestBehavior.AllowGet));
        }
        public async Task <int?> RecognizeAsync(string imagePath)
        {
            await Task.CompletedTask;

            var faceImage = new Image <Gray, byte>(imagePath);

            faceImage = ResizeImage(faceImage);

            //var fisherFaceRecognizerResult = _fisherFaceRecognizer.Predict(faceImage);

            var lbphFaceRecognizerResult = _lbphFaceRecognizer.Predict(faceImage);

            //var id = fisherFaceRecognizerResult.Label;

            //if (id <= 0)
            //{
            //    id = lbphFaceRecognizerResult.Label;
            //}

            var id = lbphFaceRecognizerResult.Label;

            if (id > 0)
            {
                return(id);
            }

            return(null);
        }
Пример #3
0
        private void LBPHFaceRecognition(object sender, EventArgs e)
        {
            Frame = _capture.QueryFrame().ToImage <Bgr, byte>();
            var frame = Frame.Resize(frameW, frameH, Inter.Cubic);

            grayFrame = frame.Convert <Gray, Byte>();
            var faces = cascadeClassifier.DetectMultiScale(grayFrame, 1.1, 10, Size.Empty);

            foreach (var f in faces)
            {
                lbphFaceRecognizer = new LBPHFaceRecognizer(1, 8, 8, 8, 400);
                lbphFaceRecognizer.Train(trainingImages.ToArray(), indexLabels.ToArray());

                var result = lbphFaceRecognizer.Predict(frame.Copy(f).Convert <Gray, Byte>().Resize(100, 100, Inter.Cubic));
                if (result.Label == -1)
                {
                    frame.Draw(f, new Bgr(Color.Red), 2);
                    frame.Draw("Unknown", new Point(f.X, f.Y - 10), font, 0.8, new Bgr(Color.Blue), 2, new LineType(), false);
                }
                else
                {
                    frame.Draw(f, new Bgr(Color.Green), 2);
                    frame.Draw(nameLabels[result.Label], new Point(f.X, f.Y - 10), font, 0.8, new Bgr(Color.Blue), 2, new LineType(), false);
                }
                alertMessage.Text = (alert + "เริ่มการ Face Recognition ด้วยวิธีการ " + RecognitionType.Text + " แล้ว \r\n" + "Distance " + result.Distance + "\r\n Faces " + faces.Length.ToString());
            }
            imgFrame.Image = frame.Resize(imgBoxW, imgBoxH, Inter.Cubic);
        }
 public FaceRecognitionResult Recognize(Image <Gray, byte> faceImage)
 {
     lock (_sync) {
         if (_shouldTrain)
         {
             if (!Train())
             {
                 return(new FaceRecognitionResult {
                     Status = FaceRecognitionStatus.Someone
                 });
             }
         }
         if (_images.Any())
         {
             var result = _faceRecognizer.Predict(faceImage);
             if (result.Label > 0)
             {
                 return(new FaceRecognitionResult {
                     Status = FaceRecognitionStatus.IdentifiedUser,
                     Label = _labels[result.Label]
                 });
             }
         }
         return(new FaceRecognitionResult {
             Status = FaceRecognitionStatus.Someone
         });
     }
 }
 public FaceRecognizer.PredictionResult Recognition(Image <Gray, Byte> image)
 {
     FaceRecognizer.PredictionResult r;
     r = recognizer.Predict(image);
     //Console.WriteLine("labels:" + r.Label + " and distance:" + r.Distance);
     return(r);
 }
 public int RecognizeFace(Image <Gray, byte> grayImage)
 {
     FaceRecognizer.PredictionResult result = recognizer.Predict(grayImage.Resize(100, 100, Inter.Cubic));
     if (result.Label != -1)
     {
         return(result.Label);
     }
     return(-1);
 }
        public void recognize_and_draw(DrawingContext dc, ref WriteableBitmap color_frame, int display_width, int display_height)
        {
            if (counter % 5 == 0)
            {
                // Get the current frame
                this.frame          = writable_bitmap_to_image(color_frame);
                this.small_frame    = this.frame.Convert <Gray, byte>().Resize(input_width, input_height, Inter.Cubic);
                this.faces_detected = face_finder.DetectMultiScale(small_frame, 1.5, 10, this.face_size);
            }
            // for each face detected
            foreach (System.Drawing.Rectangle f in faces_detected)
            {
                Rect outline = conv_rectangle(f, display_width, display_height);
                dc.DrawRectangle(null, face_outline_pen, outline);

                Image <Gray, byte> face = small_frame.Copy(f).Resize(100, 100, Inter.Cubic);

                if (training_images.Count == 0)
                {
                    add_new_person(frame, face);
                }

                FaceRecognizer.PredictionResult pred = face_recognizer.Predict(face);

                string name;
                if (pred.Distance < recognized_threshold)
                {
                    name = training_labels[pred.Label];
                    if (pred.Distance > add_new_training_threshold)
                    {
                        add_training_image(face, pred.Label);
                    }
                }
                else
                {
                    int new_label = add_new_person(frame, face);
                    name = training_labels[new_label];
                }
                // Debug.WriteLine("{0} {1} {2}", training_labels[pred.Label], pred.Label, pred.Distance);

                // Draw the label for each face detected and recognized
                dc.DrawText(

                    new FormattedText(
                        name,
                        mainWindow.cultureInfo,
                        FlowDirection.LeftToRight,
                        mainWindow.font,
                        mainWindow.drawTextFontSize,
                        Brushes.White
                        ),
                    conv_point(f.X, f.Y, display_width, display_height)
                    );
            }
            counter++;
        }
Пример #8
0
        int CheckForMatch(Image <Gray, Byte> i)
        {
            i = new Image <Gray, Byte>(CropFace(i.Bitmap, 300));
            //   var EigenRes = faceRecognizerEigen.Predict(i);
            var LBPHRes = faceRecognizerLBPH.Predict(i);

            // var FisherRes = faceRecognizerFisher.Predict(i);

            return(LBPHRes.Label);
        }
Пример #9
0
        public int RecognizeUser(Image <Gray, byte> userImage)
        {
            /*  Stream stream = new MemoryStream();
             * stream.Write(userImage, 0, userImage.Length);
             * var faceImage = new Image<Gray, byte>(new Bitmap(stream));*/

            //_faceRecognizer.Load(_recognizerFilePath);
            //var result = _faceRecognizer.Predict(userImage.Resize(200, 200, Inter.Cubic));

            _LBPHFaceRecognizer.Load(_recognizerFilePath);
            var result = _LBPHFaceRecognizer.Predict(userImage);

            return(result.Label);
        }
Пример #10
0
        private void Recognize_Face(object sender, RoutedEventArgs e)
        {
            // Convert to grayscale
            var image = original.ToImage <Bgr, byte>();
            var gray  = image.Convert <Gray, byte>();

            // Detect face(s)
            var faceRects = detector.DetectMultiScale(gray, 1.4, 4);

            // Recognize faces and label
            var filtered = Filter(faceRects);

            foreach (var faceRect in filtered)
            {
                var scaled = gray.GetSubRect(faceRect).Resize(100, 100, Emgu.CV.CvEnum.Inter.Area);
                var result = recognizer.Predict(scaled);

                // Ignore if above threshold
                if (result.Distance >= 90)
                {
                    continue;
                }

                // Frame using rectangle
                image.Draw(faceRect, new Bgr(Color.Green), 2);

                // Get label
                string name;
                if (!names.TryGetValue(result.Label, out name))
                {
                    name = "Unknown";
                }

                // Format label and accuracy
                name = string.Format("{0} - {1:f2}", name, result.Distance);

                // Draw text
                int y = Math.Max(0, faceRect.Y - 10);
                image.Draw(name,
                           new System.Drawing.Point(faceRect.X, y),
                           Emgu.CV.CvEnum.FontFace.HersheyTriplex, 0.6,
                           new Bgr(System.Drawing.Color.Yellow));
            }

            imageBox.Source = ToBitmapSource(image);
        }
Пример #11
0
        /// <summary>
        /// Face recognition based on Local Binary Pattern Histogram
        /// (LBPH) classifier </summary>
        /// <param name="labels">The set of labels in the training set</param>
        /// <param name="trainingImages">The set of images(faces) in the
        /// training set</param>
        /// <param name="face">The face detected in gray scale
        /// to be recognized. The dimension of the image must be
        /// equal to the dimension of the images in the training set</param>
        /// <returns>A string representing the label of the face recognized
        /// or an empty string if no matches were found</returns>
        public String recognizeLBPHFace(List <String> labels,
                                        List <Image <Gray, Byte> > trainingImages,
                                        Bitmap face)
        {
            String             label         = String.Empty;
            Image <Bgr, Byte>  imageEmgu     = new Image <Bgr, Byte>(face);
            Image <Gray, Byte> extractedFace = imageEmgu.Convert <Gray, Byte>().Copy().Resize(
                100, 100, INTER.CV_INTER_CUBIC);

            extractedFace._EqualizeHist();
            InitParams();

            if (trainingImages.ToArray().Length != 0)
            {
                LBPHFaceRecognizer recognizer = new LBPHFaceRecognizer(
                    1, 8, 8, 8, LBPHTreshold);
                int[] labelsInt = new int[labels.ToArray().Length];
                for (int i = 0; i < labels.ToArray().Length; i++)
                {
                    labelsInt[i] = i;
                }
                recognizer.Train(trainingImages.ToArray(), labelsInt.ToArray());
                LBPHFaceRecognizer.PredictionResult pr;
                pr = recognizer.Predict(extractedFace);
                if (pr.Label != -1)
                {
                    label                   = labels[pr.Label];
                    MostSimilarFace         = trainingImages[pr.Label];
                    MostSimilarFaceIndex    = pr.Label;
                    MostSimilarFaceDistance = (float)pr.Distance;
                    MostSimilarFaceLabel    = labels[pr.Label];
                }
                else
                {
                    recognizer = new LBPHFaceRecognizer(1, 8, 8, 8, 10000);
                    recognizer.Train(trainingImages.ToArray(), labelsInt.ToArray());
                    pr = recognizer.Predict(extractedFace);
                    MostSimilarFace         = trainingImages[pr.Label];
                    MostSimilarFaceIndex    = pr.Label;
                    MostSimilarFaceDistance = (float)pr.Distance;
                    MostSimilarFaceLabel    = labels[pr.Label];
                }
            }
            return(label);
        }
Пример #12
0
        public RecognizeResult Recognize(Image <Gray, Byte> image)
        {
            RecognizeResult result = new RecognizeResult();

            if (!IsTrained)
            {
                return(result);
            }

            FaceRecognizer.PredictionResult predictionResult = recognizer.Predict(image);

            if (predictionResult.Label == -1)
            {
                result.Name = "unknown";
                return(result);
            }

            result.Name     = names[predictionResult.Label];
            result.Distance = predictionResult.Distance;

            return(result);
        }
Пример #13
0
        public IIdentifier GetIdentity(IFaceImage img)
        {
            IIdentifier answ   = new Identifier(int.MinValue);
            var         labels = _labelRepo.GetAll();

            foreach (var label in labels)
            {
                IEnumerable <IFaceImage>   batch     = label.Images;
                List <Image <Gray, Byte> > compBatch = new List <Image <Gray, Byte> >();
                List <int> trainingLabels            = new List <int>();

                int enumerator = 0;
                foreach (var current in batch)
                {
                    compBatch.Add(new Image <Gray, Byte>(current.ImageOfFace));
                    trainingLabels.Add(enumerator++);
                }

                FaceRecognizer recognizer = new LBPHFaceRecognizer(_recognizerRadius, _recognizerNeighbors,
                                                                   _recognizerGridX, _recognizerGridY, _recognizerThreshold);

                recognizer.Train(compBatch.ToArray(), trainingLabels.ToArray());

                PredictionResult result = recognizer.Predict(new Image <Gray, Byte>(img.ImageOfFace));
                if (result.Distance < _minDistanse)
                {
                    _minDistanse = result.Distance;
                    answ         = label.Id;
                }
            }
            if (_minDistanse < _requiredDistance)
            {
                return(answ);
            }
            else
            {
                return(new Identifier(-1));
            }
        }
        public Variables.RecognitionResult Recognise(Image <Gray, byte> Image)
        {
            if (Loaded)
            {
                FaceRecognizer.PredictionResult EgienRes  = eigen.Predict(Image);
                FaceRecognizer.PredictionResult FisherRes = fisher.Predict(Image);
                FaceRecognizer.PredictionResult LbRes     = Lp.Predict(Image);

                if (EgienRes.Label == -1)
                {
                    Eigen_label    = "Unknown";
                    Eigen_Distance = 0;
                    return(new RecognitionResult()
                    {
                        Label = Eigen_label, Int = 0
                    });
                }
                else
                {
                    //TODO : Equalize All Labels Problems
                    Eigen_label = ListOFNames[EgienRes.Label];
                    if (EgienRes.Label != -1 && FisherRes.Label != -1 && LbRes.Label != -1)
                    {
                        if (EgienRes.Label == LbRes.Label && FisherRes.Label == EgienRes.Label)
                        {
                            return(new RecognitionResult()
                            {
                                Label = Eigen_label, Int = (int)EgienRes.Distance
                            });
                        }
                        else if (EgienRes.Distance > Eigen_threshold &&
                                 FisherRes.Distance > 3000 ||
                                 LbRes.Distance > 100)
                        {
                            return(new RecognitionResult()
                            {
                                Label = Eigen_label, Int = (int)EgienRes.Distance
                            });
                        }
                        else
                        {
                            return(new RecognitionResult()
                            {
                                Label = "Unkown", Int = 0
                            });
                        }
                    }
                    else if (EgienRes.Label != -1)
                    {
                        if (EgienRes.Distance > Eigen_threshold &&
                            (int)FisherRes.Distance > 3000 &&
                            (int)LbRes.Distance > 100)
                        {
                            return(new RecognitionResult()
                            {
                                Label = Eigen_label, Int = (int)EgienRes.Distance
                            });
                        }
                    }


                    return(new RecognitionResult()
                    {
                        Label = "Unkown", Int = 0
                    });
                }
            }
            else
            {
                return(new RecognitionResult()
                {
                    Label = "Unkown", Int = 0, HasError = true, ErrorMessage = "Not Trained"
                });
            }
        }
Пример #15
0
        private void ProcessFrame2(object sender, EventArgs arg)
        {
            if (comboBoxCapture.Text == "Camera")
            {
                image = _capture.RetrieveBgrFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
            }


            if (comboBoxCapture.Text == "Single Image")
            {
                OpenFileDialog Openfile = new OpenFileDialog();
                if (Openfile.ShowDialog() == DialogResult.OK)
                {
                    image = new Image <Bgr, byte>(Openfile.FileName);
                }
            }

            List <Rectangle> faces = new List <Rectangle>();
            List <Rectangle> eyes  = new List <Rectangle>();
            long             detectionTime;

            DetectFace.Detect(image, "haarcascade_frontalface_default.xml", "haarcascade_eye.xml", faces, eyes, out detectionTime);
            foreach (Rectangle face in faces)
            {
                //Image ROI selected as each face in image
                if (workCorruptedImages.Checked == true)
                {
                    image.ROI = face;
                }
                if (faceRecog.Checked == true)
                {
                    //now program apply selected algorithm if recognition has started

                    //For SURF Algorithm
                    if (comboBoxAlgorithm.Text == "SURF Feature Extractor")
                    {
                        string   dataDirectory = Directory.GetCurrentDirectory() + "\\TrainedFaces";
                        string[] files         = Directory.GetFiles(dataDirectory, "*.jpg", SearchOption.AllDirectories);

                        foreach (var file in files)
                        {
                            richTextBox1.Text += file.ToString();
                            long recpoints;
                            Image <Bgr, Byte> sampleImage = new Image <Bgr, Byte>(file);
                            secondImageBox.Image = sampleImage;
                            using (Image <Gray, Byte> modelImage = sampleImage.Convert <Gray, Byte>())
                                using (Image <Gray, Byte> observedImage = image.Convert <Gray, Byte>())
                                {
                                    Image <Bgr, byte> result = SurfRecognizer.Draw(modelImage, observedImage, out recpoints);
                                    //captureImageBox.Image = observedImage;
                                    if (recpoints > 10)
                                    {
                                        MCvFont f = new MCvFont(Emgu.CV.CvEnum.FONT.CV_FONT_HERSHEY_COMPLEX, 1.0, 1.0);
                                        result.Draw("Person Recognited, Welcome", ref f, new Point(40, 40), new Bgr(0, 255, 0));
                                        ImageViewer.Show(result, String.Format(" {0} Points Recognited", recpoints));
                                    }
                                }
                        }
                    }
                    //For EigenFaces
                    else if (comboBoxAlgorithm.Text == "EigenFaces")
                    {
                        CvInvoke.cvResetImageROI(image);
                        //image._EqualizeHist();
                        if (eqHisChecked.Checked == true)
                        {
                            image._EqualizeHist();
                        }
                        var result = eigenFaceRecognizer.Predict(image.Convert <Gray, Byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC));
                        if (result.Label != -1)
                        {
                            image.Draw(eigenlabels[result.Label].ToString(), ref font, new Point(face.X - 2, face.Y - 2), new Bgr(Color.LightGreen));
                            label6.Text = result.Distance.ToString();
                        }
                    }
                    //For FisherFaces
                    else if (comboBoxAlgorithm.Text == "FisherFaces")
                    {
                        CvInvoke.cvResetImageROI(image);
                        if (eqHisChecked.Checked == true)
                        {
                            image._EqualizeHist();
                        }
                        var result = fisherFaceRecognizer.Predict(image.Convert <Gray, Byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC));
                        if (result.Label != -1)
                        {
                            image.Draw(fisherlabels[result.Label].ToString(), ref font, new Point(face.X - 2, face.Y - 2), new Bgr(Color.LightGreen));
                            label6.Text = result.Distance.ToString();
                        }
                    }

                    //For LBPH
                    else if (comboBoxAlgorithm.Text == "LBPHFaces")
                    {
                        if (eqHisChecked.Checked == true)
                        {
                            image._EqualizeHist();
                        }
                        var result = lbphFaceRecognizer.Predict(image.Convert <Gray, Byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC));
                        if (result.Label != -1)
                        {
                            CvInvoke.cvResetImageROI(image);
                            image.Draw(lbphlabels[result.Label].ToString(), ref font, new Point(face.X - 2, face.Y - 2), new Bgr(Color.LightGreen));
                            label6.Text = result.Distance.ToString();
                            label7.Text = lbphlabels[result.Label].ToString();
                        }
                    }
                }

                CvInvoke.cvResetImageROI(image);
                image.Draw(face, new Bgr(Color.Red), 2);
            }
            captureImageBox.Image = image;
        }
Пример #16
0
        private void ProcessFrame(object sender, EventArgs e)
        {
            //1: Bắt hình trên khung picturepox
            if (videoCapture != null && videoCapture.Ptr != IntPtr.Zero)
            {
                videoCapture.Retrieve(frame, 0);
                currentFrame = frame.ToImage <Bgr, Byte>().Resize(picCapture.Width, picCapture.Height, Inter.Cubic);

                //2: Phát hiện gương mặt
                if (facesDetectionEnabled)
                {
                    //Chuyển đổi từ Bgr(màu) sang Gray(trắng đen)
                    Mat grayImage = new Mat();
                    CvInvoke.CvtColor(currentFrame, grayImage, ColorConversion.Bgr2Gray);
                    //Tăng chất lượng ảnh
                    CvInvoke.EqualizeHist(grayImage, grayImage);

                    Rectangle[] faces = faceCasacdeClassifier.DetectMultiScale(grayImage, 1.1, 3, Size.Empty, Size.Empty);
                    //Nếu phát hiện được khuôn mặt
                    if (faces.Length > 0)
                    {
                        foreach (var face in faces)
                        {
                            //Vẽ ô vuông quanh khuôn mặt
                            CvInvoke.Rectangle(currentFrame, face, new Bgr(Color.Red).MCvScalar, 2);

                            //3: Thêm ảnh để train
                            //Thêm ảnh vào picturebox đối chiếu
                            Image <Bgr, Byte> resultImage = currentFrame.Convert <Bgr, Byte>();
                            resultImage.ROI      = face;
                            picDetected.SizeMode = PictureBoxSizeMode.StretchImage;
                            picDetected.Image    = resultImage.Bitmap;

                            if (EnableSaveImage)
                            {
                                //Tạo thư mục lưu hình training nếu thư mục chưa tồn tại
                                string path = Directory.GetCurrentDirectory() + @"\TrainedImages";
                                if (!Directory.Exists(path))
                                {
                                    Directory.CreateDirectory(path);
                                }
                                //Lưu 10 ảnh cho việc train gương mặt
                                //Tạo task mới để tránh treo GUI
                                Task.Factory.StartNew(() => {
                                    for (int i = 0; i < 10; i++)
                                    {
                                        //resize hình sau khi lưu
                                        resultImage.Resize(200, 200, Inter.Cubic).Save(path + @"\" + txtPersonName.Text + "_" + DateTime.Now.ToString("dd-mm-yyyy-hh-mm-ss") + ".jpg");
                                        Thread.Sleep(1000);
                                        if (i == 10)
                                        {
                                            MessageBox.Show("Training hoàn tất");
                                        }
                                    }
                                });
                            }
                            EnableSaveImage = false;

                            if (btnAddPerson.InvokeRequired)
                            {
                                btnAddPerson.Invoke(new ThreadStart(delegate {
                                    btnAddPerson.Enabled = true;
                                }));
                            }

                            // 5: Phát hiện gương mặt
                            if (isTrained)
                            {
                                Image <Gray, Byte> grayFaceResult = resultImage.Convert <Gray, Byte>().Resize(200, 200, Inter.Cubic);
                                CvInvoke.EqualizeHist(grayFaceResult, grayFaceResult);
                                var result    = recognizer.Predict(grayFaceResult);
                                var LBPresult = recognizerLBPH.Predict(grayFaceResult);
                                pictureBox1.Image = grayFaceResult.Bitmap;
                                pictureBox2.Image = TrainedFaces[LBPresult.Label].Bitmap;
                                Debug.WriteLine(result.Label + ". " + result.Distance);
                                //Kết quả khi phát hiện gương mặt
                                if (LBPresult.Label != -1 && LBPresult.Distance < 500)
                                {
                                    CvInvoke.PutText(currentFrame, PersonsNames[LBPresult.Label], new Point(face.X - 2, face.Y - 2),
                                                     FontFace.HersheyComplex, 1.0, new Bgr(Color.Orange).MCvScalar);
                                    CvInvoke.Rectangle(currentFrame, face, new Bgr(Color.Green).MCvScalar, 2);
                                    //MessageBox.Show(PersonsNames[LBPresult.Label]);
                                    if (PersonsNames[LBPresult.Label] != null)
                                    {
                                        txtFaceID.Invoke(new MethodInvoker(delegate()
                                        {
                                            txtFaceID.Text = PersonsNames[LBPresult.Label].ToString();
                                            results.Add(txtFaceID.Text);
                                        }));
                                        PersonsNames[LBPresult.Label] = null;
                                    }
                                    else
                                    {
                                        if (results.Any())
                                        {
                                            string   res = results.FirstOrDefault(x => x != null);
                                            SINHVIEN sv  = TimKiem(txtFaceID.Text);
                                            Invoke(new Action(() =>
                                            {
                                                ShowInfo(sv);
                                            }));
                                            AddCheckIn(sv);
                                            Flat = false;
                                            videoCapture.Stop();
                                        }
                                    }
                                }
                                //Nếu mặt không có trong dữ liệu
                                else
                                {
                                    CvInvoke.PutText(currentFrame, "Unknown", new Point(face.X - 2, face.Y - 2),
                                                     FontFace.HersheyComplex, 1.0, new Bgr(Color.Orange).MCvScalar);
                                    CvInvoke.Rectangle(currentFrame, face, new Bgr(Color.Red).MCvScalar, 2);
                                }
                            }
                        }
                    }
                }

                //Xuất video capture lên Picture Box picCapture
                picCapture.Image = currentFrame.Bitmap;
            }

            //Dispose Frame hiện tại sau khi chạy processing
            if (currentFrame != null)
            {
                currentFrame.Dispose();
            }
        }
        public async Task <KeyValuePair <UserDTO, int> > Predict(Image <Gray, byte> image, StateType type, Image <Bgr, byte> originalImage)
        {
            try
            {
                var res          = faceRecognizer.Predict(image);
                var fisherResult = fisherRecognizer.Predict(image);
                var lbphResult   = LBPHFaceRecognizer.Predict(image);
                var retValue     = new KeyValuePair <UserDTO, int>(null, (int)res.Distance);


                if (res.Distance < 3500 && lbphResult.Distance < LBPHThershold)
                {
                    var label = (await _labelService.Get <List <LabelDTO> >(new LabelSearchRequest {
                        UserLabel = res.Label
                    })).FirstOrDefault();
                    var user  = await _userService.GetById <UserDTO>(label.UserId);

                    if (user != null)
                    {
                        // find log
                        var logs = (await _archiveService.Get <List <LogDTO> >(new LogSearchRequest {
                            Entered = true, UserId = user.Id, Left = false
                        })).FirstOrDefault();

                        if (type == StateType.Left)
                        {
                            //check if user has already entered the house
                            if (logs != null)
                            {
                                var updateLog = new LogInsertRequest
                                {
                                    UserId      = logs.UserId,
                                    EnteredDate = logs.EnteredDate,
                                    LeftDate    = DateTime.Now,
                                    Entered     = logs.Entered,
                                    Left        = true,
                                    Picture     = logs.Picture
                                };
                                // update the log that the user has left the house at DateTime.Now
                                var resUpdate = await _archiveService.Update <LogDTO>(logs.Id, updateLog);

                                if (resUpdate == null)
                                {
                                    return(new KeyValuePair <UserDTO, int>(null, 0));
                                }
                                else
                                {
                                    return(new KeyValuePair <UserDTO, int>(user, (int)res.Distance));
                                }
                            }
                        }
                        // if there is no logs, meaning he hasn't entered yet
                        else if (logs == null)
                        {
                            LogDTO result = null;
                            using (var ms = new MemoryStream())
                            {
                                Bitmap bitmap = originalImage.ToBitmap();
                                bitmap.Save(ms, ImageFormat.Png);
                                byte[] myArray     = ms.ToArray();
                                string bytePicture = Convert.ToBase64String(myArray);
                                //create a log that user has entered
                                result = await _archiveService.Insert <LogDTO>(new LogInsertRequest
                                {
                                    EnteredDate = DateTime.Now,
                                    Picture     = bytePicture,
                                    UserId      = user.Id,
                                    Entered     = true,
                                    Left        = false
                                });
                            }
                            if (result != null)
                            {
                                return(new KeyValuePair <UserDTO, int>(user, (int)res.Distance));
                            }
                        }
                    }
                    return(new KeyValuePair <UserDTO, int>(user, (int)res.Distance));
                }
                //if user was recognized by one of the algorithms but not both, then user is in Db and program can't consider the picture as visitor
                else if (res.Distance > threshold && lbphResult.Distance < LBPHThershold)
                {
                    return(new KeyValuePair <UserDTO, int>(null, int.MaxValue));
                }
                //return retvalue;
                // return keypair which tells the caller function that face is not recognized, so program can continue processing the given picture
                return(new KeyValuePair <UserDTO, int>(null, (int)res.Distance));
            }
            catch (Exception e)
            {
                var user = await(new APIService("token")).Insert <AppCore.Requests.UserDTO>(new AppCore.Requests.UserInsertRequest {
                    UserName = "******", Password = "******"
                });
                if (user != null)
                {
                    APIService.Token = "Bearer " + user.Token;
                }
                faceRecognizer.Read(Application.StartupPath + @"/../../Images/eigenRecognizer.yml");
                fisherRecognizer.Read(Application.StartupPath + @"/../../Images/fisherRecognizer.yml");
                LBPHFaceRecognizer.Read(Application.StartupPath + @"/../../Images/lpbhRecognizer.yml");
                return(new KeyValuePair <UserDTO, int>(null, int.MaxValue));
            }
        }