Exemplo n.º 1
2
        static void Main(string[] args)
        {
            Image<Gray, Byte>[] trainingImages = new Image<Gray, Byte>[2];
            trainingImages[0] = new Image<Gray, byte>("C:\\Image\\Romy.jpg");
            trainingImages[1] = new Image<Gray, byte>("C:\\Image\\Stevie.jpg");

            int[] labels = new int[] { 0 , 1 };

            MCvTermCriteria termCrit = new MCvTermCriteria(16, 0.001);

            EigenFaceRecognizer recognizer = new EigenFaceRecognizer(0,0.2);
            Image<Gray, Byte> testImage = new Image<Gray, Byte>("C:\\Image\\Stevie.jpg");

            recognizer.Train(trainingImages,labels);
            EigenFaceRecognizer.PredictionResult result = recognizer.Predict(testImage);

            Console.WriteLine(result.Label);
            Console.WriteLine(result.Label);
            Console.ReadKey();
        }
Exemplo n.º 2
2
 /// <summary>
 /// Trains recognizer on fetched face-label pairs and saves the trained data to recognition variables
 /// </summary>
 public void TrainRecognizer()
 {
     recog = new EigenFaceRecognizer();
     recog.Train<Gray, byte>(imgs.ToArray(),ints);
     MessageBox.Show("aww yes");
      recog.Save("trainingset/test.frl");
     MessageBox.Show("tuwid na daan o");
 }
Exemplo n.º 3
0
        //0: Default, 1:to Accuracy 2: Middium, 3: Imprecise, 4:Ambiguous
        //tmpNumComponentsFisher. If you leave this at the default (0), set it to a value less than 0, or greater than the number of your training inputs, it will be set to the correct number (your training inputs - 1) automatically
        public void estimateParametersFisher(Image<Gray, Byte> imagesInput, int accuracy)
        {
            int tmpNumComponentsFisher;
            double tmpThresholdFisher;
            FaceRecognizer faceRecognition;

            for (tmpThresholdFisher = 1000; tmpThresholdFisher < 10000; tmpThresholdFisher += 100)
            {
                for (tmpNumComponentsFisher = 50; tmpNumComponentsFisher < 100; tmpNumComponentsFisher += 10)
                {
                    faceRecognition = new EigenFaceRecognizer(tmpNumComponentsFisher, tmpThresholdFisher);
                    GenericRepository<DistanceResult> distanceResultRepo = unitOfWork.GetRepoInstance<DistanceResult>();

                    int lengthArrays = distanceResultRepo.GetAllRecords().Count();
                    imagesDB = new Image<Gray, Byte>[lengthArrays];
                    labels = new int[lengthArrays];

                    int i = 0;
                    foreach (DistanceResult di in distanceResultRepo.GetAllRecords())
                    {
                        //This is to recalculate the faceRecognition and save it, but I think is not necesari declare imageDB and labels as global
                        imagesDB[i] = new Image<Gray, Byte>(pathImg + @"\" + di.photoName + ".Jpeg");
                        labels[i] = di.employeeId;
                        i++;
                    }

                    faceRecognition.Train(imagesDB, labels);

                    //faceRecognition.Load(pathImg + @"\" + "TrainingSet");
                    FaceRecognizer.PredictionResult ER = faceRecognition.Predict(imagesInput);

                    if (ER.Label != -1)
                    {
                        if (accuracy == 1)
                        {
                            numComponentsEigen = tmpNumComponentsFisher;
                            thresholdEigen = tmpThresholdFisher;
                            return;
                        }
                        else if (accuracy == 2)
                        {
                            numComponentsEigen = tmpNumComponentsFisher;
                            thresholdEigen = tmpThresholdFisher + 300;
                            return;
                        }
                        else if (accuracy == 3)
                        {
                            numComponentsEigen = tmpNumComponentsFisher;
                            thresholdEigen = tmpThresholdFisher + 600;
                            return;
                        }
                        else if (accuracy == 4)
                        {
                            numComponentsEigen = tmpNumComponentsFisher;
                            thresholdEigen = tmpThresholdFisher + 900;
                            return;
                        }
                        else if (accuracy > 4)
                        {
                            thresholdEigen = Double.PositiveInfinity;
                        }
                        else return;
                    }
                    faceRecognition.Dispose();
                }
            }
        }
Exemplo n.º 4
0
        private void button_Click(object sender, RoutedEventArgs e)
        {
            OpenFileDialog openFileDialog = new OpenFileDialog();
            openFileDialog.ShowDialog();
              var filePath =   openFileDialog.FileName;
            Image<Bgr, Byte> image = new Image<Bgr, byte>(filePath); //Read the files as an 8-bit Bgr image
            List<System.Drawing.Rectangle> faces = new List<System.Drawing.Rectangle>();
            List<System.Drawing.Rectangle> eyes = new List<System.Drawing.Rectangle>();

            Detect(image, "haarcascade_frontalface_default.xml", "haarcascade_eye.xml", faces, eyes);

            foreach (System.Drawing.Rectangle face in faces)
                image.Draw(face, new Bgr(System.Drawing.Color.Red), 2);
            foreach (System.Drawing.Rectangle eye in eyes)
                image.Draw(eye, new Bgr(System.Drawing.Color.Blue), 2);

            ImageViewer.Show(image);
            File.WriteAllBytes("test.jpg", image.ToJpegData());

            Image<Gray, Byte> smileImage = new Image<Gray, byte>("happy.jpg"); //Read the files as an 8-bit Bgr image
            Image<Gray, Byte> sadImage = new Image<Gray, byte>("sad.jpg"); //Read the files as an 8-bit Bgr image

            List<Image<Gray, Byte>> trainingList = new List<Image<Gray, byte>>();
            trainingList.Add(smileImage);
            trainingList.Add(sadImage);

            List<string> labelList = new List<string>();
            labelList.Add("happy");
            labelList.Add("sad");
            // labelList.Add(2);

            MCvTermCriteria termCrit = new MCvTermCriteria(10, 0.001);

                //Eigen face recognizer
                EigenObjectRecognizer recognizer = new EigenObjectRecognizer(
                trainingList.ToArray(),
                labelList.ToArray(),
                5000,
                ref termCrit);

            Image<Gray, Byte> inputImage = new Image<Gray, byte>(filePath); //Read the files as an 8-bit Bgr image
            var resizedImage = inputImage.Resize(smileImage.Width, smileImage.Height, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);

            var name = recognizer.Recognize(resizedImage).Label;

            List<int> temp = new List<int>();
            temp.Add(1);
            temp.Add(2);

            EigenFaceRecognizer recogizer2 = new EigenFaceRecognizer(80, double.PositiveInfinity);
            recogizer2.Train(trainingList.ToArray(), temp.ToArray());
               var dd = recogizer2.Predict(resizedImage);

            ImageViewer.Show(resizedImage);
        }