コード例 #1
0
        private void FisherFaceRecognition(object sender, EventArgs e)
        {
            Frame = _capture.QueryFrame().ToImage <Bgr, byte>();
            var frame = Frame.Resize(frameW, frameH, Inter.Cubic);

            grayFrame = frame.Convert <Gray, Byte>();
            var faces = cascadeClassifier.DetectMultiScale(grayFrame, 1.1, 10, Size.Empty);

            foreach (var f in faces)
            {
                fisherFaceRecognizer = new FisherFaceRecognizer(Count, double.PositiveInfinity);
                fisherFaceRecognizer.Train(trainingImages.ToArray(), indexLabels.ToArray());

                var result = fisherFaceRecognizer.Predict(frame.Copy(f).Convert <Gray, Byte>().Resize(100, 100, Inter.Cubic));
                if (result.Label == -1)
                {
                    frame.Draw(f, new Bgr(Color.Red), 2);
                    frame.Draw("Unknown", new Point(f.X, f.Y - 10), font, 0.8, new Bgr(Color.Blue), 2, new LineType(), false);
                }
                else
                {
                    frame.Draw(f, new Bgr(Color.Green), 2);
                    frame.Draw(nameLabels[result.Label], new Point(f.X, f.Y - 10), font, 0.8, new Bgr(Color.Blue), 2, new LineType(), false);
                }
                alertMessage.Text = (alert + "เริ่มการ Face Recognition ด้วยวิธีการ " + RecognitionType.Text + " แล้ว \r\n" + "Distance " + result.Distance + "\r\n Faces " + faces.Length.ToString());
            }


            imgFrame.Image = frame.Resize(imgBoxW, imgBoxH, Inter.Cubic);
        }
コード例 #2
0
ファイル: Form1.cs プロジェクト: FinchYang/study
 private void buttoncompare_Click(object sender, EventArgs e)
 {
     try
     {
         UpdateStatus(string.Format("check :{0}", 111));
         var filename = Path.GetTempFileName() + "jpg";
         pictureBoxcurrentimage.Image.Save(filename);
         UpdateStatus(string.Format("check :{0}", 222));
         var res = recognizer.Predict(new Image <Gray, Byte>(filename));
         UpdateStatus(string.Format("{0},Distance={1},Label={2},{3}", Environment.NewLine, res.Distance, res.Label, res));
     }
     catch (Exception ex)
     {
         UpdateStatus(string.Format("exception :{0}", ex.Message));
     }
 }
コード例 #3
0
        /// <summary>
        /// Face recognition based on Fisher classifier using eigen
        /// fisher faces</summary>
        /// <param name="labels">The set of labels in the training set</param>
        /// <param name="trainingImages">The set of images(faces) in the
        /// training set</param>
        /// <param name="face">The face detected in gray scale
        /// to be recognized. The dimension of the image must be
        /// equal to the dimension of the images in the training set</param>
        /// <returns>A string representing the label of the face recognized
        /// or an empty string if no matches were found</returns>
        public String recognizeFisherFace(List <String> labels,
                                          List <Image <Gray, Byte> > trainingImages,
                                          Bitmap face)
        {
            String label = String.Empty;

            InitParams();
            Image <Bgr, Byte>  imageEmgu     = new Image <Bgr, Byte>(face);
            Image <Gray, Byte> extractedFace = imageEmgu.Convert <Gray, Byte>().Copy().Resize(
                100, 100, INTER.CV_INTER_CUBIC);

            extractedFace._EqualizeHist();

            if (trainingImages.ToArray().Length != 0)
            {
                FisherFaceRecognizer recognizer = new FisherFaceRecognizer(0, Treshold);
                int[] labelsInt = new int[labels.ToArray().Length];
                for (int i = 0; i < labels.ToArray().Length; i++)
                {
                    labelsInt[i] = i;
                }
                recognizer.Train(trainingImages.ToArray(), labelsInt.ToArray());
                FisherFaceRecognizer.PredictionResult pr;
                pr = recognizer.Predict(extractedFace);
                if (pr.Label != -1)
                {
                    label                   = labels[pr.Label];
                    MostSimilarFace         = trainingImages[pr.Label];
                    MostSimilarFaceIndex    = pr.Label;
                    MostSimilarFaceDistance = (float)pr.Distance;
                    MostSimilarFaceLabel    = labels[pr.Label];
                }
                else
                {
                    recognizer = new FisherFaceRecognizer(0, 10000);
                    recognizer.Train(trainingImages.ToArray(), labelsInt.ToArray());
                    pr = recognizer.Predict(extractedFace);
                    MostSimilarFace         = trainingImages[pr.Label];
                    MostSimilarFaceIndex    = pr.Label;
                    MostSimilarFaceDistance = (float)pr.Distance;
                    MostSimilarFaceLabel    = labels[pr.Label];
                }
            }
            return(label);
        }
コード例 #4
0
        public int FisherRecognize(IImage testimage)
        {
            try
            {
                if (_IsTrained)
                {
                    FaceRecognizer.PredictionResult FR = f_recognize.Predict(testimage);
                    Console.WriteLine(FR.Label);

                    return(FR.Label);
                }
                else
                {
                    return(0);
                }
            }
            catch (Exception e)
            {
                Console.WriteLine(e);
                return(0);
            }
        }
        public Variables.RecognitionResult Recognise(Image <Gray, byte> Image)
        {
            if (Loaded)
            {
                FaceRecognizer.PredictionResult EgienRes  = eigen.Predict(Image);
                FaceRecognizer.PredictionResult FisherRes = fisher.Predict(Image);
                FaceRecognizer.PredictionResult LbRes     = Lp.Predict(Image);

                if (EgienRes.Label == -1)
                {
                    Eigen_label    = "Unknown";
                    Eigen_Distance = 0;
                    return(new RecognitionResult()
                    {
                        Label = Eigen_label, Int = 0
                    });
                }
                else
                {
                    //TODO : Equalize All Labels Problems
                    Eigen_label = ListOFNames[EgienRes.Label];
                    if (EgienRes.Label != -1 && FisherRes.Label != -1 && LbRes.Label != -1)
                    {
                        if (EgienRes.Label == LbRes.Label && FisherRes.Label == EgienRes.Label)
                        {
                            return(new RecognitionResult()
                            {
                                Label = Eigen_label, Int = (int)EgienRes.Distance
                            });
                        }
                        else if (EgienRes.Distance > Eigen_threshold &&
                                 FisherRes.Distance > 3000 ||
                                 LbRes.Distance > 100)
                        {
                            return(new RecognitionResult()
                            {
                                Label = Eigen_label, Int = (int)EgienRes.Distance
                            });
                        }
                        else
                        {
                            return(new RecognitionResult()
                            {
                                Label = "Unkown", Int = 0
                            });
                        }
                    }
                    else if (EgienRes.Label != -1)
                    {
                        if (EgienRes.Distance > Eigen_threshold &&
                            (int)FisherRes.Distance > 3000 &&
                            (int)LbRes.Distance > 100)
                        {
                            return(new RecognitionResult()
                            {
                                Label = Eigen_label, Int = (int)EgienRes.Distance
                            });
                        }
                    }


                    return(new RecognitionResult()
                    {
                        Label = "Unkown", Int = 0
                    });
                }
            }
            else
            {
                return(new RecognitionResult()
                {
                    Label = "Unkown", Int = 0, HasError = true, ErrorMessage = "Not Trained"
                });
            }
        }
コード例 #6
0
        public async Task <KeyValuePair <UserDTO, int> > Predict(Image <Gray, byte> image, StateType type, Image <Bgr, byte> originalImage)
        {
            try
            {
                var res          = faceRecognizer.Predict(image);
                var fisherResult = fisherRecognizer.Predict(image);
                var lbphResult   = LBPHFaceRecognizer.Predict(image);
                var retValue     = new KeyValuePair <UserDTO, int>(null, (int)res.Distance);


                if (res.Distance < 3500 && lbphResult.Distance < LBPHThershold)
                {
                    var label = (await _labelService.Get <List <LabelDTO> >(new LabelSearchRequest {
                        UserLabel = res.Label
                    })).FirstOrDefault();
                    var user  = await _userService.GetById <UserDTO>(label.UserId);

                    if (user != null)
                    {
                        // find log
                        var logs = (await _archiveService.Get <List <LogDTO> >(new LogSearchRequest {
                            Entered = true, UserId = user.Id, Left = false
                        })).FirstOrDefault();

                        if (type == StateType.Left)
                        {
                            //check if user has already entered the house
                            if (logs != null)
                            {
                                var updateLog = new LogInsertRequest
                                {
                                    UserId      = logs.UserId,
                                    EnteredDate = logs.EnteredDate,
                                    LeftDate    = DateTime.Now,
                                    Entered     = logs.Entered,
                                    Left        = true,
                                    Picture     = logs.Picture
                                };
                                // update the log that the user has left the house at DateTime.Now
                                var resUpdate = await _archiveService.Update <LogDTO>(logs.Id, updateLog);

                                if (resUpdate == null)
                                {
                                    return(new KeyValuePair <UserDTO, int>(null, 0));
                                }
                                else
                                {
                                    return(new KeyValuePair <UserDTO, int>(user, (int)res.Distance));
                                }
                            }
                        }
                        // if there is no logs, meaning he hasn't entered yet
                        else if (logs == null)
                        {
                            LogDTO result = null;
                            using (var ms = new MemoryStream())
                            {
                                Bitmap bitmap = originalImage.ToBitmap();
                                bitmap.Save(ms, ImageFormat.Png);
                                byte[] myArray     = ms.ToArray();
                                string bytePicture = Convert.ToBase64String(myArray);
                                //create a log that user has entered
                                result = await _archiveService.Insert <LogDTO>(new LogInsertRequest
                                {
                                    EnteredDate = DateTime.Now,
                                    Picture     = bytePicture,
                                    UserId      = user.Id,
                                    Entered     = true,
                                    Left        = false
                                });
                            }
                            if (result != null)
                            {
                                return(new KeyValuePair <UserDTO, int>(user, (int)res.Distance));
                            }
                        }
                    }
                    return(new KeyValuePair <UserDTO, int>(user, (int)res.Distance));
                }
                //if user was recognized by one of the algorithms but not both, then user is in Db and program can't consider the picture as visitor
                else if (res.Distance > threshold && lbphResult.Distance < LBPHThershold)
                {
                    return(new KeyValuePair <UserDTO, int>(null, int.MaxValue));
                }
                //return retvalue;
                // return keypair which tells the caller function that face is not recognized, so program can continue processing the given picture
                return(new KeyValuePair <UserDTO, int>(null, (int)res.Distance));
            }
            catch (Exception e)
            {
                var user = await(new APIService("token")).Insert <AppCore.Requests.UserDTO>(new AppCore.Requests.UserInsertRequest {
                    UserName = "******", Password = "******"
                });
                if (user != null)
                {
                    APIService.Token = "Bearer " + user.Token;
                }
                faceRecognizer.Read(Application.StartupPath + @"/../../Images/eigenRecognizer.yml");
                fisherRecognizer.Read(Application.StartupPath + @"/../../Images/fisherRecognizer.yml");
                LBPHFaceRecognizer.Read(Application.StartupPath + @"/../../Images/lpbhRecognizer.yml");
                return(new KeyValuePair <UserDTO, int>(null, int.MaxValue));
            }
        }
コード例 #7
0
        private void ProcessFrame2(object sender, EventArgs arg)
        {
            if (comboBoxCapture.Text == "Camera")
            {
                image = _capture.RetrieveBgrFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
            }


            if (comboBoxCapture.Text == "Single Image")
            {
                OpenFileDialog Openfile = new OpenFileDialog();
                if (Openfile.ShowDialog() == DialogResult.OK)
                {
                    image = new Image <Bgr, byte>(Openfile.FileName);
                }
            }

            List <Rectangle> faces = new List <Rectangle>();
            List <Rectangle> eyes  = new List <Rectangle>();
            long             detectionTime;

            DetectFace.Detect(image, "haarcascade_frontalface_default.xml", "haarcascade_eye.xml", faces, eyes, out detectionTime);
            foreach (Rectangle face in faces)
            {
                //Image ROI selected as each face in image
                if (workCorruptedImages.Checked == true)
                {
                    image.ROI = face;
                }
                if (faceRecog.Checked == true)
                {
                    //now program apply selected algorithm if recognition has started

                    //For SURF Algorithm
                    if (comboBoxAlgorithm.Text == "SURF Feature Extractor")
                    {
                        string   dataDirectory = Directory.GetCurrentDirectory() + "\\TrainedFaces";
                        string[] files         = Directory.GetFiles(dataDirectory, "*.jpg", SearchOption.AllDirectories);

                        foreach (var file in files)
                        {
                            richTextBox1.Text += file.ToString();
                            long recpoints;
                            Image <Bgr, Byte> sampleImage = new Image <Bgr, Byte>(file);
                            secondImageBox.Image = sampleImage;
                            using (Image <Gray, Byte> modelImage = sampleImage.Convert <Gray, Byte>())
                                using (Image <Gray, Byte> observedImage = image.Convert <Gray, Byte>())
                                {
                                    Image <Bgr, byte> result = SurfRecognizer.Draw(modelImage, observedImage, out recpoints);
                                    //captureImageBox.Image = observedImage;
                                    if (recpoints > 10)
                                    {
                                        MCvFont f = new MCvFont(Emgu.CV.CvEnum.FONT.CV_FONT_HERSHEY_COMPLEX, 1.0, 1.0);
                                        result.Draw("Person Recognited, Welcome", ref f, new Point(40, 40), new Bgr(0, 255, 0));
                                        ImageViewer.Show(result, String.Format(" {0} Points Recognited", recpoints));
                                    }
                                }
                        }
                    }
                    //For EigenFaces
                    else if (comboBoxAlgorithm.Text == "EigenFaces")
                    {
                        CvInvoke.cvResetImageROI(image);
                        //image._EqualizeHist();
                        if (eqHisChecked.Checked == true)
                        {
                            image._EqualizeHist();
                        }
                        var result = eigenFaceRecognizer.Predict(image.Convert <Gray, Byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC));
                        if (result.Label != -1)
                        {
                            image.Draw(eigenlabels[result.Label].ToString(), ref font, new Point(face.X - 2, face.Y - 2), new Bgr(Color.LightGreen));
                            label6.Text = result.Distance.ToString();
                        }
                    }
                    //For FisherFaces
                    else if (comboBoxAlgorithm.Text == "FisherFaces")
                    {
                        CvInvoke.cvResetImageROI(image);
                        if (eqHisChecked.Checked == true)
                        {
                            image._EqualizeHist();
                        }
                        var result = fisherFaceRecognizer.Predict(image.Convert <Gray, Byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC));
                        if (result.Label != -1)
                        {
                            image.Draw(fisherlabels[result.Label].ToString(), ref font, new Point(face.X - 2, face.Y - 2), new Bgr(Color.LightGreen));
                            label6.Text = result.Distance.ToString();
                        }
                    }

                    //For LBPH
                    else if (comboBoxAlgorithm.Text == "LBPHFaces")
                    {
                        if (eqHisChecked.Checked == true)
                        {
                            image._EqualizeHist();
                        }
                        var result = lbphFaceRecognizer.Predict(image.Convert <Gray, Byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC));
                        if (result.Label != -1)
                        {
                            CvInvoke.cvResetImageROI(image);
                            image.Draw(lbphlabels[result.Label].ToString(), ref font, new Point(face.X - 2, face.Y - 2), new Bgr(Color.LightGreen));
                            label6.Text = result.Distance.ToString();
                            label7.Text = lbphlabels[result.Label].ToString();
                        }
                    }
                }

                CvInvoke.cvResetImageROI(image);
                image.Draw(face, new Bgr(Color.Red), 2);
            }
            captureImageBox.Image = image;
        }