Eigen face recognizer
상속: FaceRecognizer
예제 #1
2
        static void Main(string[] args)
        {
            Image<Gray, Byte>[] trainingImages = new Image<Gray, Byte>[2];
            trainingImages[0] = new Image<Gray, byte>("C:\\Image\\Romy.jpg");
            trainingImages[1] = new Image<Gray, byte>("C:\\Image\\Stevie.jpg");

            int[] labels = new int[] { 0 , 1 };

            MCvTermCriteria termCrit = new MCvTermCriteria(16, 0.001);

            EigenFaceRecognizer recognizer = new EigenFaceRecognizer(0,0.2);
            Image<Gray, Byte> testImage = new Image<Gray, Byte>("C:\\Image\\Stevie.jpg");

            recognizer.Train(trainingImages,labels);
            EigenFaceRecognizer.PredictionResult result = recognizer.Predict(testImage);

            Console.WriteLine(result.Label);
            Console.WriteLine(result.Label);
            Console.ReadKey();
        }
예제 #2
2
 /// <summary>
 /// Trains recognizer on fetched face-label pairs and saves the trained data to recognition variables
 /// </summary>
 public void TrainRecognizer()
 {
     recog = new EigenFaceRecognizer();
     recog.Train<Gray, byte>(imgs.ToArray(),ints);
     MessageBox.Show("aww yes");
      recog.Save("trainingset/test.frl");
     MessageBox.Show("tuwid na daan o");
 }
예제 #3
0
 public RecognizerEngine(String databasePath, String recognizerFilePath)
 {
     this.recognizerFilePath = recognizerFilePath;
     dbAccess = new DBAccess(databasePath);
     faceRecognizer = new EigenFaceRecognizer(80, double.PositiveInfinity);
 }
예제 #4
0
        public string saveEmployee(Image newImage, string name, string middleName, string lastName, string email, FaceRecognizerMethode faceRecognizerMethode)
        {
            unitOfWork = new GenericUnitOfWork();
            GenericRepository<Employee> employeeRepo = unitOfWork.GetRepoInstance<Employee>();

            Employee employee = null;
            try
            {

                employee = (employeeRepo.GetAllRecords().Where<Employee>(e => e.email == email)).First<Employee>();
            }
            catch
            {
                Debug.WriteLine("Nuevo usuario");
            }

            //Add Employee if not exist. The email is unique
            if (employee == null)
            {
                employee = new Employee { name = name, middleName = middleName, lastName = lastName, email = email };
                employeeRepo.Add(employee);
                unitOfWork.SaveChanges();
            }

            //I save the image with a guid as a name
            GenericRepository<DistanceResult> distanceResultRepo = unitOfWork.GetRepoInstance<DistanceResult>();
            Guid guid = Guid.NewGuid();

            var inputImage = new Image<Bgr, Byte>(new Bitmap(newImage));
            Rectangle[] rectangleFace = detection(inputImage, pathXMLHaarcascade);

            //The function detection(..) can extract N faces

            if (rectangleFace.Length <= 0)
            {
                return Result.NoDetected.ToString();
            }
            else if (rectangleFace.Length > 1)
            {
                return Result.MultipleFacesDetected.ToString();
            }
            else
            {
                Image<Gray, byte> grayFrame = toGrayEqualizeFrame(inputImage);

                Image<Gray, Byte> faceEMGUCV = formatRectangleFaces(grayFrame.ToBitmap(), rectangleFace[0]);

                faceEMGUCV._EqualizeHist();

                faceEMGUCV.Save(pathImg + @"\" + guid.ToString()+".Jpeg");

                FaceRecognizer faceRecognition;

                switch (faceRecognizerMethode.ToString())
                {
                    case "EigenFaceRecognizerMethode": faceRecognition = new EigenFaceRecognizer(numComponentsEigen, thresholdEigen); //try catch aca
                        break;
                    case "FisherFaceRecognizerMethode": faceRecognition = new FisherFaceRecognizer(numComponentsFisher, thresholdFisher);
                        break;
                    case "LBPHFaceRecognizerMethode": faceRecognition = new LBPHFaceRecognizer(radiusLBPH, neighborsLBPH, gridXLBPH, gridYLBPH, thresholdLBPH);
                        break;
                    default: return null;
                };

                double distance = 2;

                //Save register
                DistanceResult dist = new DistanceResult();
                dist.algorithm = RecognizeBLL.FaceRecognizerMethode.FisherFaceRecognizerMethode.ToString();
                dist.employeeId = employee.employeeId;
                dist.photoName = guid.ToString();
                dist.distance = distance;
                distanceResultRepo.Add(dist);

                unitOfWork.SaveChanges();

                int lengthArrays = distanceResultRepo.GetAllRecords().Count();
                imagesDB = new Image<Gray, Byte>[lengthArrays];
                labels = new int[lengthArrays];
                int i = 0;
                foreach (DistanceResult di in distanceResultRepo.GetAllRecords())
                {
                    //This is to recalculate the faceRecognition and save it, but I think is not necesari declare imageDB and labels as global
                    imagesDB[i] = new Image<Gray, Byte>(pathImg + @"\" + di.photoName + ".Jpeg");
                    labels[i] = di.employeeId;
                    i++;
                }

                if (employeeRepo.GetAllRecords().Count() > 1)
                {
                    faceRecognition.Train(imagesDB, labels);
                    faceRecognition.Save(pathImg + @"\" + "TrainingSet");
                }
                return Result.Saved.ToString();
            }
            //return Result.Error.ToString();
        }
예제 #5
0
        public EmployeeStructure[] recognizeMultipleFaces(Image newImage, FaceRecognizerMethode faceRecognizerMethode)
        {
            var inputImage = new Image<Bgr, Byte>(new Bitmap(newImage));
            Rectangle[] rectangleFace = detection(inputImage, this.pathXMLHaarcascade);
            EmployeeStructure[] employeeStructure;

            if (rectangleFace.Length <= 0)
            {
                employeeStructure = new EmployeeStructure[0];
                employeeStructure[0].result = Result.NoDetected.ToString();
                return employeeStructure;
            }
            else
            {

                Image<Gray, byte> grayFrame = toGrayEqualizeFrame(inputImage);
                employeeStructure = new EmployeeStructure[rectangleFace.Length];

                FaceRecognizer faceRecognition;

                switch (faceRecognizerMethode.ToString())
                {
                    case "EigenFaceRecognizerMethode": faceRecognition = new EigenFaceRecognizer(numComponentsEigen, thresholdEigen); //try catch aca
                        break;
                    case "FisherFaceRecognizerMethode": faceRecognition = new FisherFaceRecognizer(numComponentsFisher, thresholdFisher);
                        break;
                    case "LBPHFaceRecognizerMethode": faceRecognition = new LBPHFaceRecognizer(radiusLBPH, neighborsLBPH, gridXLBPH, gridYLBPH, thresholdLBPH);
                        break;
                    default: faceRecognition = new EigenFaceRecognizer(numComponentsEigen, thresholdEigen);
                        break;
                };

                faceRecognition.Load(pathImg + @"\" + "TrainingSet");

                Parallel.For(0, rectangleFace.Length, i =>
                {
                    Image<Gray, byte> faceEMGUCV = formatRectangleFaces(grayFrame.ToBitmap(), rectangleFace[i]);

                    FaceRecognizer.PredictionResult ER = faceRecognition.Predict(faceEMGUCV);

                    if (ER.Label != -1 /*&& ER.Distance > thresholdEigen*/)
                    {
                        int label = ER.Label;

                        GenericRepository<Employee> emplyeeRepo = unitOfWork.GetRepoInstance<Employee>();
                        Employee em = emplyeeRepo.GetFirstOrDefault(label);

                        employeeStructure[i] = new EmployeeStructure(Result.Recognized.ToString(), em.name, em.middleName, em.lastName, em.email, rectangleFace[0].X, rectangleFace[0].Y, rectangleFace[0].Width, rectangleFace[0].Height);
                    }
                    employeeStructure[i].result = Result.Unknown.ToString();

                });

                return employeeStructure;
            }
        }
예제 #6
0
        //0: Default, 1:to Accuracy 2: Middium, 3: Imprecise, 4:Ambiguous
        //tmpNumComponentsFisher. If you leave this at the default (0), set it to a value less than 0, or greater than the number of your training inputs, it will be set to the correct number (your training inputs - 1) automatically
        public void estimateParametersFisher(Image<Gray, Byte> imagesInput, int accuracy)
        {
            int tmpNumComponentsFisher;
            double tmpThresholdFisher;
            FaceRecognizer faceRecognition;

            for (tmpThresholdFisher = 1000; tmpThresholdFisher < 10000; tmpThresholdFisher += 100)
            {
                for (tmpNumComponentsFisher = 50; tmpNumComponentsFisher < 100; tmpNumComponentsFisher += 10)
                {
                    faceRecognition = new EigenFaceRecognizer(tmpNumComponentsFisher, tmpThresholdFisher);
                    GenericRepository<DistanceResult> distanceResultRepo = unitOfWork.GetRepoInstance<DistanceResult>();

                    int lengthArrays = distanceResultRepo.GetAllRecords().Count();
                    imagesDB = new Image<Gray, Byte>[lengthArrays];
                    labels = new int[lengthArrays];

                    int i = 0;
                    foreach (DistanceResult di in distanceResultRepo.GetAllRecords())
                    {
                        //This is to recalculate the faceRecognition and save it, but I think is not necesari declare imageDB and labels as global
                        imagesDB[i] = new Image<Gray, Byte>(pathImg + @"\" + di.photoName + ".Jpeg");
                        labels[i] = di.employeeId;
                        i++;
                    }

                    faceRecognition.Train(imagesDB, labels);

                    //faceRecognition.Load(pathImg + @"\" + "TrainingSet");
                    FaceRecognizer.PredictionResult ER = faceRecognition.Predict(imagesInput);

                    if (ER.Label != -1)
                    {
                        if (accuracy == 1)
                        {
                            numComponentsEigen = tmpNumComponentsFisher;
                            thresholdEigen = tmpThresholdFisher;
                            return;
                        }
                        else if (accuracy == 2)
                        {
                            numComponentsEigen = tmpNumComponentsFisher;
                            thresholdEigen = tmpThresholdFisher + 300;
                            return;
                        }
                        else if (accuracy == 3)
                        {
                            numComponentsEigen = tmpNumComponentsFisher;
                            thresholdEigen = tmpThresholdFisher + 600;
                            return;
                        }
                        else if (accuracy == 4)
                        {
                            numComponentsEigen = tmpNumComponentsFisher;
                            thresholdEigen = tmpThresholdFisher + 900;
                            return;
                        }
                        else if (accuracy > 4)
                        {
                            thresholdEigen = Double.PositiveInfinity;
                        }
                        else return;
                    }
                    faceRecognition.Dispose();
                }
            }
        }
예제 #7
0
        private void button_Click(object sender, RoutedEventArgs e)
        {
            OpenFileDialog openFileDialog = new OpenFileDialog();
            openFileDialog.ShowDialog();
              var filePath =   openFileDialog.FileName;
            Image<Bgr, Byte> image = new Image<Bgr, byte>(filePath); //Read the files as an 8-bit Bgr image
            List<System.Drawing.Rectangle> faces = new List<System.Drawing.Rectangle>();
            List<System.Drawing.Rectangle> eyes = new List<System.Drawing.Rectangle>();

            Detect(image, "haarcascade_frontalface_default.xml", "haarcascade_eye.xml", faces, eyes);

            foreach (System.Drawing.Rectangle face in faces)
                image.Draw(face, new Bgr(System.Drawing.Color.Red), 2);
            foreach (System.Drawing.Rectangle eye in eyes)
                image.Draw(eye, new Bgr(System.Drawing.Color.Blue), 2);

            ImageViewer.Show(image);
            File.WriteAllBytes("test.jpg", image.ToJpegData());

            Image<Gray, Byte> smileImage = new Image<Gray, byte>("happy.jpg"); //Read the files as an 8-bit Bgr image
            Image<Gray, Byte> sadImage = new Image<Gray, byte>("sad.jpg"); //Read the files as an 8-bit Bgr image

            List<Image<Gray, Byte>> trainingList = new List<Image<Gray, byte>>();
            trainingList.Add(smileImage);
            trainingList.Add(sadImage);

            List<string> labelList = new List<string>();
            labelList.Add("happy");
            labelList.Add("sad");
            // labelList.Add(2);

            MCvTermCriteria termCrit = new MCvTermCriteria(10, 0.001);

                //Eigen face recognizer
                EigenObjectRecognizer recognizer = new EigenObjectRecognizer(
                trainingList.ToArray(),
                labelList.ToArray(),
                5000,
                ref termCrit);

            Image<Gray, Byte> inputImage = new Image<Gray, byte>(filePath); //Read the files as an 8-bit Bgr image
            var resizedImage = inputImage.Resize(smileImage.Width, smileImage.Height, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);

            var name = recognizer.Recognize(resizedImage).Label;

            List<int> temp = new List<int>();
            temp.Add(1);
            temp.Add(2);

            EigenFaceRecognizer recogizer2 = new EigenFaceRecognizer(80, double.PositiveInfinity);
            recogizer2.Train(trainingList.ToArray(), temp.ToArray());
               var dd = recogizer2.Predict(resizedImage);

            ImageViewer.Show(resizedImage);
        }