Exemplo n.º 1
0
        public static void Test()
        {
            using (Mat image = new Mat("testGamePic.jpg"))
            {
                using (Mat uimg = new Mat())
                {
                    using (CascadeClassifier face = new CascadeClassifier("haarcascade_frontalface_default.xml"))
                    {
                        CvInvoke.CvtColor(image, uimg, ColorConversion.Bgr2Gray);
                        CvInvoke.EqualizeHist(uimg, uimg);

                        Rectangle[] facesDetected = face.DetectMultiScale(uimg, 1.1, 10, new Size(20, 20));


                        EigenFaceRecognizer efr = new EigenFaceRecognizer();

                        efr.Train(new VectorOfMat(uimg), new VectorOfInt(new int[] { 1 }));
                        var res = efr.Predict(uimg);


                        return;
                    }
                }
            }
        }
Exemplo n.º 2
0
        public FormMain()
        {
            InitializeComponent();

            _qz2018 = new Quiz2018();
            _qz2018.Init(this);

            cbTRFFaces.Items.AddRange(_qz2018.GetNames());
            cbTFFaces.Items.AddRange(_qz2018.GetNames());
            cbTFFaces.Items.Add("ALL");

            cbTGGroups.Items.AddRange(_qz2018.GetGroups());

            cbQNbr.Items.AddRange(_qz2018.GetQuestions());
            cbQGrp.Items.AddRange(_qz2018.GetGroups());



            FaceRecognition = new EigenFaceRecognizer(80, double.PositiveInfinity);
            FaceDetection   = new CascadeClassifier(System.IO.Path.GetFullPath(@"../../Algo/haarcascade_frontalface_default.xml"));
            Frame           = new Mat();
            Faces           = new List <Image <Gray, byte> >();
            Ids             = new List <int>();
            Names           = new List <string>();

            //BeginWebcam();
        }
Exemplo n.º 3
0
        public void TrainRecognizer()
        {
            //  FaceRecognizer _faceRecognizer = new LBPHFaceRecognizer(1, 8, 8, 8, double.PositiveInfinity);
            FaceRecognizer  _faceRecognizer  = new EigenFaceRecognizer(80, double.PositiveInfinity);
            DataStoreAccess _datastoreaccess = new DataStoreAccess();
            var             allFaces         = _datastoreaccess.callfaces();

            //  MessageBox.Show(allFaces.Count.ToString());
            if (allFaces != null)
            {
                var faceImages = new Image <Gray, byte> [allFaces.Count];
                var faceLabels = new int[allFaces.Count];
                for (int i = 0; i < allFaces.Count; i++)
                {
                    Stream stream = new MemoryStream();
                    stream.Write(allFaces[i].Image, 0, allFaces[i].Image.Length);
                    var faceImage = new Image <Gray, byte>(new Bitmap(stream));
                    // faceImages[i] = faceImage.Resize(100, 100, Inter.Cubic);
                    faceImages[i] = faceImage.Resize(100, 100, Inter.Cubic);
                    faceLabels[i] = Convert.ToInt32(allFaces[i].Label);
                }

                string filepath = Application.StartupPath + "/traineddata";
                // var filePath = Application.StartupPath + String.Format("/{0}.bmp", face);
                _faceRecognizer.Train(faceImages, faceLabels);

                _faceRecognizer.Save(filepath);
                // MessageBox.Show(allFaces[0].Label);
            }
            else
            {
                //MessageBox.Show("adasfsf");
            }
        }
Exemplo n.º 4
0
        static OpenCvTrainer()
        {
            Subjects       = new Dictionary <int, string>();
            SubjectSamples = new List <Tuple <int, Image <Gray, byte> > >();
            var haarPath = @"haarcascade_frontalface_default.xml";

            DetectionClassifier = new CascadeClassifier(haarPath);
            FaceRecognizerData  = new EigenFaceRecognizer();
            if (File.Exists(Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "trainingFile.ocv")))
            {
                FaceRecognizerData.Load(Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "trainingFile.ocv"));
            }
            if (File.Exists(Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "trainingSubjects.txt")))
            {
                var lines = File.ReadAllLines((Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "trainingSubjects.txt")));
                foreach (var line in lines)
                {
                    if (!(string.IsNullOrEmpty(line)) && line.Contains(":"))
                    {
                        var items = line.Split(':');
                        Subjects.Add(int.Parse(items[0]), items[1]);
                    }
                }
            }
        }
Exemplo n.º 5
0
        /// <summary>
        /// Création d'un fichier d’entraînement
        /// </summary>
        /// <param name="labels">Dictionnaire Id de libellé / libellé</param>
        /// <param name="files">Liste des couples fichier / Id de libellé</param>
        /// <param name="labelsFileName">Nom du fichier des correspondances Id de libellé / Libellé</param>
        /// <param name="recognizerFileName">Nom du fichier de reconnaissance</param>
        public void CreateTrainingFaceRecognizerFile(Dictionary <int, string> labels, List <Tuple <string, int> > files, string labelsFileName, string recognizerFileName)
        {
            List <Image <Gray, byte> > images = new List <Image <Gray, byte> >();
            List <int> labelIds = new List <int>();

            try
            {
                foreach (var item in files)
                {
                    images.Add(new Image <Bgr, byte>(item.Item1).Convert <Gray, byte>());
                    labelIds.Add(item.Item2);
                }

                using (EigenFaceRecognizer frz = new EigenFaceRecognizer(images.Count, 3000))
                {
                    frz.Train(images.ToArray(), labelIds.ToArray());
                    frz.Write(recognizerFileName);
                }
            }
            finally
            {
                images.ForEach(i => i.Dispose());
                images.Clear();
            }

            StringBuilder sb = new StringBuilder();

            foreach (var item in labels)
            {
                sb.AppendLine($"{item.Key}|{item.Value}");
            }
            File.WriteAllText(labelsFileName, sb.ToString());
        }
Exemplo n.º 6
0
        private void EigenFaceRecognition(object sender, EventArgs e)
        {
            Frame = _capture.QueryFrame().ToImage <Bgr, byte>();
            var frame = Frame.Resize(frameW, frameH, Inter.Cubic);

            grayFrame = frame.Convert <Gray, Byte>();
            var faces = cascadeClassifier.DetectMultiScale(grayFrame, 1.1, 10, Size.Empty);

            foreach (var f in faces)
            {
                eigenFaceRecognizer = new EigenFaceRecognizer(Count, double.PositiveInfinity);
                eigenFaceRecognizer.Train(trainingImages.ToArray(), indexLabels.ToArray());

                var result = eigenFaceRecognizer.Predict(frame.Copy(f).Convert <Gray, Byte>().Resize(100, 100, Inter.Cubic));
                if (result.Label == -1)
                {
                    frame.Draw(f, new Bgr(Color.Red), 2);
                    frame.Draw("Unknown", new Point(f.X, f.Y - 10), font, 0.8, new Bgr(Color.Blue), 2, new LineType(), false);
                }
                else
                {
                    frame.Draw(f, new Bgr(Color.Green), 2);
                    frame.Draw(nameLabels[result.Label], new Point(f.X, f.Y - 10), font, 0.8, new Bgr(Color.Blue), 2, new LineType(), false);
                }
                alertMessage.Text = (alert + "เริ่มการ Face Recognition ด้วยวิธีการ " + RecognitionType.Text + " แล้ว \r\n" + "Distance " + result.Distance + "\r\n Faces " + faces.Length.ToString());
            }
            imgFrame.Image = frame.Resize(imgBoxW, imgBoxH, Inter.Cubic);
        }
Exemplo n.º 7
0
        private void BtnPredict_Click(object sender, EventArgs e)
        {
            Frame = Capture.QueryFrame();
            var imageFrame = Frame.ToImage <Gray, byte>();

            if (imageFrame != null)
            {
                var faces        = FaceDetection.DetectMultiScale(imageFrame, 1.3, 5);
                var userDetected = new List <string>();

                foreach (var face in faces)
                {
                    var processImage = imageFrame.Copy(face).Resize(ProcessImageWidth, ProcessImageHeight, Inter.Cubic);
                    var resultEigen  = EigenFaceRecognizer.Predict(processImage);

                    if (resultEigen.Label != -1)
                    {
                        userDetected.Add(DataStoreAccess.GetUsername(resultEigen.Label));
                    }
                    else
                    {
                        userDetected.Add("Unknown");
                    }
                }

                lbResult.Text = string.Join(",", userDetected);
            }
        }
Exemplo n.º 8
0
        private void DetectAndTrain_Click(object sender, EventArgs e)
        {
            using (var capture = new VideoCapture()) {
                Thread.Sleep(PluginOptions.CameraDelayMs);
                using (Image <Bgr, byte> imageFrame = capture.QueryFrame().ToImage <Bgr, byte>()) {
                    if (imageFrame == null)
                    {
                        return;
                    }
                    using (Image <Gray, byte> grayframe = imageFrame.Convert <Gray, byte>()) {
                        if (PluginOptions.UseImageCorrection)
                        {
                            grayframe._EqualizeHist();
                        }
                        Rectangle[] part1;
                        using (var classifier = new CascadeClassifier(PluginOptions.CascadesPath + "haarcascade_frontalface_default.xml")) {
                            part1 = classifier.DetectMultiScale(grayframe, 1.1, 10);
                        }
                        if (part1.Length > 0)
                        {
                            Rectangle face = part1[0]; //выбор первого лица
                            using (Image <Gray, byte> resultingImage = imageFrame.Copy(face).Convert <Gray, byte>().Resize(100, 100, Inter.Cubic)) {
                                imageFrame.Draw(face, new Bgr(Color.Blue), 2);
                                TestImage.Image        = imageFrame;
                                DetectedGrayFace.Image = resultingImage;

                                #region Добавление лица

                                if (string.IsNullOrEmpty(FaceName.Text))
                                {
                                    MessageBox.Show(
                                        "Сначала введите имя распознанного лица", "Имя лица не указано", MessageBoxButtons.OK, MessageBoxIcon.Warning
                                        );
                                    return;
                                }
                                TrainedImages.Add(resultingImage);
                                PluginOptions.PeopleFaces.Add(
                                    PluginOptions.PeopleFaces.Count + 1,
                                    FaceName.Text
                                    );
                                TrainedImages.Last().Save($"{PluginOptions.PluginPath}Faces\\face{TrainedImages.Count}.bmp");

                                #endregion
                            }
                            PluginOptions.SaveOptionsToXml();

                            using (FaceRecognizer recognizer = new EigenFaceRecognizer()) {
                                recognizer.Train(TrainedImages.ToArray(), PluginOptions.PeopleFaces.Keys.ToArray());
                                recognizer.Write(PluginOptions.PluginPath + "SavedCascade.xml");
                            }
                            MessageBox.Show("Лицо успешно добавлено", "Training OK", MessageBoxButtons.OK, MessageBoxIcon.Information);
                        }
                        else
                        {
                            MessageBox.Show("лиц не найдено", "Training OK", MessageBoxButtons.OK, MessageBoxIcon.Warning);
                        }
                    }
                }
            }
        }
Exemplo n.º 9
0
        private bool TrainImage()
        {
            int    imageCount = 0;
            double Threshold  = -1;

            Trained.Clear();
            PersonLabel.Clear();
            try
            {
                string   path  = Directory.GetCurrentDirectory() + @"\TrainedFaces";
                string[] files = Directory.GetFiles(path, "*.jpg", SearchOption.AllDirectories);
                foreach (var file in files)
                {
                    Image <Gray, Byte> trainedImage = new Image <Gray, byte>(file);
                    Trained.Add(trainedImage);
                    PersonLabel.Add(imageCount);
                    PersonLabel.Add(imageCount);

                    imageCount++;
                }
                EigenFaceRecognizer recognizer = new EigenFaceRecognizer(imageCount, Threshold);
                recognizer.Train(Trained.ToArray(), PersonLabel.ToArray());

                isTrained = true;
                Debug.WriteLine(imageCount);
                Debug.WriteLine(isTrained);
                return(isTrained = true);
            }
            catch (Exception ex)
            {
                isTrained = false;
                MessageBox.Show("Erro" + ex.Message);
                return(false);
            }
        }
Exemplo n.º 10
0
        public void TrainRecognizer()
        {
            var allFaces = new FRService().All();

            if (allFaces.Count > 0)
            {
                var faceImages = new Image <Gray, byte> [allFaces.Count];
                var faceLabels = new int[allFaces.Count];
                for (int i = 0; i < allFaces.Count; i++)
                {
                    Stream stream = new MemoryStream();
                    stream.Write(allFaces[i].Face, 0, allFaces[i].Face.Length);
                    var faceImage = new Image <Gray, byte>(new Bitmap(stream));
                    faceImages[i] = faceImage.Resize(100, 100, Inter.Cubic);
                    faceLabels[i] = (int)(allFaces[i].Id);
                }

                // can also try :LBPHFaceRecognizer
                var fr = new EigenFaceRecognizer();
                fr.Train(faceImages, faceLabels);

                var retPath   = ConfigurationManager.AppSettings["trainedPath"];
                var savedFile = retPath + $"{DateTime.Now.ToString("yyyy-MM-dd HH-mm-ss")}_frModel";
                fr.Save(savedFile);

                MessageBox.Show($"Model trained successfully. saved into {savedFile}");
            }
            else
            {
                MessageBox.Show("No face found in db");
            }
        }
        // Start training from the collected faces.
        // The face recognition algorithm can be one of these and perhaps more, depending on your version of OpenCV, which must be atleast v2.4.1:
        //    "FaceRecognizer.Eigenfaces":  Eigenfaces, also referred to as PCA (Turk and Pentland, 1991).
        //    "FaceRecognizer.Fisherfaces": Fisherfaces, also referred to as LDA (Belhumeur et al, 1997).
        //    "FaceRecognizer.LBPH":        Local Binary Pattern Histograms (Ahonen et al, 2006).
        public static BasicFaceRecognizer LearnCollectedFaces(List <Mat> preprocessedFaces, List <int> faceLabels, string facerecAlgorithm = "FaceRecognizer.Eigenfaces")
        {
            BasicFaceRecognizer model = null;

            Debug.Log("Learning the collected faces using the [" + facerecAlgorithm + "] algorithm ...");

            if (facerecAlgorithm == "FaceRecognizer.Fisherfaces")
            {
                model = FisherFaceRecognizer.create();
            }
            else if (facerecAlgorithm == "FaceRecognizer.Eigenfaces")
            {
                model = EigenFaceRecognizer.create();
            }

            if (model == null)
            {
                Debug.LogError("ERROR: The FaceRecognizer algorithm [" + facerecAlgorithm + "] is not available in your version of OpenCV. Please update to OpenCV v2.4.1 or newer.");
                //exit(1);
            }

            // Do the actual training from the collected faces. Might take several seconds or minutes depending on input!
            MatOfInt labels = new MatOfInt();

            labels.fromList(faceLabels);
            model.train(preprocessedFaces, labels);

            return(model);
        }
Exemplo n.º 12
0
        //  private static readonly int threshold = 10000;
        // higher threshold - more chances to recognize a face (sometimes incorrectly);

        public static EigenFaceRecognizer NewEigen()
        {
            EigenFaceRecognizer eigenRec = new EigenFaceRecognizer(80, 4000);

            eigenRec.Write(Application.StartupPath + "/data/recognizer.yaml");
            return(eigenRec);
        }
Exemplo n.º 13
0
 /// <summary>
 /// Trains recognizer on fetched face-label pairs and saves the trained data to recognition variables
 /// </summary>
 public void TrainRecognizer()
 {
     recog = new EigenFaceRecognizer();
     recog.Train <Gray, byte>(imgs.ToArray(), ints);
     MessageBox.Show("aww yes");
     recog.Save("trainingset/test.frl");
     MessageBox.Show("tuwid na daan o");
 }
        private void Run()
        {
            List <Mat> images     = new List <Mat> ();
            List <int> labelsList = new List <int> ();
            MatOfInt   labels     = new MatOfInt();

            images.Add(Imgcodecs.imread(image_0_filepath, 0));
            images.Add(Imgcodecs.imread(image_1_filepath, 0));
            labelsList.Add(0);
            labelsList.Add(1);
            labels.fromList(labelsList);

            Mat testSampleMat   = Imgcodecs.imread(sample_image_filepath, 0);
            int testSampleLabel = 0;


            //                      foreach (Mat item in images) {
            //                              Debug.Log ("images.ToString " + item.ToString ());
            //                      }
            //                      foreach (int item in labelsList) {
            //                              Debug.Log ("labels.ToString " + item.ToString ());
            //                      }

            int[]    predictedLabel      = new int[1];
            double[] predictedConfidence = new double[1];


            BasicFaceRecognizer faceRecognizer = EigenFaceRecognizer.create();

            faceRecognizer.train(images, labels);

            faceRecognizer.predict(testSampleMat, predictedLabel, predictedConfidence);


            Debug.Log("Predicted class: " + predictedLabel [0] + " / " + "Actual class: " + testSampleLabel);
            Debug.Log("Confidence: " + predictedConfidence [0]);


            Mat predictedMat = images [predictedLabel [0]];

            Mat baseMat = new Mat(testSampleMat.rows(), predictedMat.cols() + testSampleMat.cols(), CvType.CV_8UC1);

            predictedMat.copyTo(baseMat.submat(new OpenCVForUnity.CoreModule.Rect(0, 0, predictedMat.cols(), predictedMat.rows())));
            testSampleMat.copyTo(baseMat.submat(new OpenCVForUnity.CoreModule.Rect(predictedMat.cols(), 0, testSampleMat.cols(), testSampleMat.rows())));

            Imgproc.putText(baseMat, "Predicted", new Point(10, 15), Imgproc.FONT_HERSHEY_SIMPLEX, 0.4, new Scalar(255), 1, Imgproc.LINE_AA, false);
            Imgproc.putText(baseMat, "Confidence:", new Point(5, 25), Imgproc.FONT_HERSHEY_SIMPLEX, 0.2, new Scalar(255), 1, Imgproc.LINE_AA, false);
            Imgproc.putText(baseMat, "   " + predictedConfidence [0], new Point(5, 33), Imgproc.FONT_HERSHEY_SIMPLEX, 0.2, new Scalar(255), 1, Imgproc.LINE_AA, false);
            Imgproc.putText(baseMat, "TestSample", new Point(predictedMat.cols() + 10, 15), Imgproc.FONT_HERSHEY_SIMPLEX, 0.4, new Scalar(255), 1, Imgproc.LINE_AA, false);


            Texture2D texture = new Texture2D(baseMat.cols(), baseMat.rows(), TextureFormat.RGBA32, false);

            Utils.matToTexture2D(baseMat, texture);

            gameObject.GetComponent <Renderer> ().material.mainTexture = texture;
        }
Exemplo n.º 15
0
        private void Window_Loaded(object sender, RoutedEventArgs e)
        {
            capture           = new VideoCapture();
            Frontface_Cascade = new CascadeClassifier(@"haarcascades/haarcascade_frontalface_default.xml");
            var connectionstring = ConfigurationManager.ConnectionStrings["Test"].ConnectionString;

            using (SqlConnection connection = new SqlConnection(connectionstring))
            {
                connection.Open();
                string query = "SELECT * FROM Attendance.dbo.TrainingData Order by StudentID";
                using (SqlCommand command = new SqlCommand(query, connection))
                {
                    using (SqlDataReader reader = command.ExecuteReader())
                    {
                        while (reader.Read())
                        {
                            labels.Add(reader.GetInt32(0));
                            NamePersons.Add(reader.GetString(1));
                            byte[] blob = null;
                            blob = (byte[])reader.GetValue(2);
                            var image = ByteToImage(blob);
                            Image <Gray, Byte> newImage = new Image <Gray, Byte>(image);
                            trainingImages.Add(newImage);
                        }
                    }
                }
            }

            recognizer = new EigenFaceRecognizer(1, 5000);

            Mat[] faceImages = new Mat[trainingImages.Count];
            int[] faceLabels = new int[labels.Count];

            for (int i = 0; i < trainingImages.Count; i++)
            {
                var face = Frontface_Cascade.DetectMultiScale(trainingImages[i]);
                foreach (var Tface in face)
                {
                    trainingImages[i].ROI = Tface;
                }
                gray = trainingImages[i].Clone().Resize(200, 200, 0);
                Mat x = gray.Mat;
                faceImages[i] = x;
            }

            for (int i = 0; i < labels.Count; i++)
            {
                faceLabels = labels.ToArray();
            }
            recognizer.Train(faceImages, faceLabels);
            logger.Info("Trained Face Recognizer");
            timer          = new DispatcherTimer();
            timer.Tick    += new EventHandler(timer_Tick);
            timer.Interval = new TimeSpan(0, 0, 0, 0, 1);
            timer.Start();
        }
Exemplo n.º 16
0
        public static EigenFaceRecognizer NewEigen()
        {
            EigenFaceRecognizer eigen = new EigenFaceRecognizer(7, tre);

            /*
             *      int.Parse(ConfigurationManager.AppSettings["RecognizerComponentsNum"]),
             *      int.Parse(ConfigurationManager.AppSettings["RecognizerThreshold"]));
             */
            return(eigen);
        }
Exemplo n.º 17
0
 /// <summary>
 /// Reconnaît les visages dans une image
 /// </summary>
 /// <param name="fileName"></param>
 /// <param name="labelsFileName"></param>
 /// <param name="recognizerFileName"></param>
 private void RecognizeFaces(string fileName, string labelsFileName, string recognizerFileName)
 {
     _faceLabels = GetLabels(labelsFileName);
     using (EigenFaceRecognizer faceRecognizer = new EigenFaceRecognizer())
     {
         _currentFaceRecognizer = faceRecognizer;
         faceRecognizer.Read(recognizerFileName);
         RecognizeFaces(fileName);
     }
 }
Exemplo n.º 18
0
        public FaceClassifier()
        {
            _faceRecognition = new EigenFaceRecognizer(80, double.PositiveInfinity);
            _faceDetection   = new CascadeClassifier(Path.GetFullPath(@"haarcascades/haarcascade_frontalface_default.xml"));
            _eyeDetection    = new CascadeClassifier(Path.GetFullPath(@"haarcascades/haarcascade_eye.xml"));

            Faces  = new List <Face>();
            _frame = new Mat();
            StartCapture();
        }
Exemplo n.º 19
0
        private void TrainerForm_FormClosing(object sender, FormClosingEventArgs e)
        {
            EigenFaceRecognizer eigen = Recognizer.NewEigen();
            bool success = Recognizer.TrainRecognizer(eigen, ImageHandler.GetGrayFaceArrayFromFiles(), ImageHandler.GetLabelArrayFromFiles());

            if (success == false)
            {
                MessageBox.Show("Corrupted data");
            }
        }
Exemplo n.º 20
0
        private void DetectFaces()
        {
            NamePersons.Add("");
            Image <Gray, byte> grayframe = the_image_frame.Convert <Gray, byte>();

            ImgCamera.Image = the_image_frame;
            MCvAvgComp[][] facesDetected = grayframe.DetectHaarCascade(
                face,
                1.1,
                2,
                Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                new Size(20, 20));
            lblTotalFacesDetected.Text = facesDetected[0].Length.ToString();
            if (facesDetected[0].Length > 0)
            {
                foreach (MCvAvgComp f in facesDetected[0])
                {
                    t      = t + 1;
                    result = the_image_frame.Copy(f.rect).Convert <Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                    the_image_frame.Draw(f.rect, new Bgr(Color.Green), 3);
                    if (TrainingImages.ToArray().Length != 0)
                    {
                        MCvTermCriteria     termCrit   = new MCvTermCriteria(ContTrain, 0.01);
                        EigenFaceRecognizer recognizer = new EigenFaceRecognizer(
                            TrainingImages.ToArray(),
                            labels.ToArray(),
                            5000,
                            ref termCrit
                            );
                        name = recognizer.Recognize(result);
                        if (string.IsNullOrEmpty(name) == true)
                        {
                            name = "UNKNOWN";
                        }
                        the_image_frame.Draw(name, ref font, new Point(f.rect.X - 2, f.rect.Y - 2), new Bgr(Color.Red));
                    }
                    //Now draw the rectangle on the detected image
                    NamePersons[t - 1] = name;
                    NamePersons.Add("");
                }
                lblTotalFacesDetected.Text = facesDetected[0].Length.ToString();
            }
            t = 0;
            ImgTrainedFaces.Image = result;
            for (int namelabel = 0; namelabel < facesDetected[0].Length; namelabel++)
            {
                names = names + NamePersons[namelabel] + ",";
            }
            string names_without_comma_at_the_end = names.Remove(names.Length - 1);

            lblNamesExtracted.Text = names_without_comma_at_the_end;
            names = "";
            NamePersons.Clear();
        }
Exemplo n.º 21
0
 public Form1()
 {
     InitializeComponent();
     FacialRecognition = new EigenFaceRecognizer(80, double.PositiveInfinity);
     FaceDetection     = new CascadeClassifier(System.IO.Path.GetFullPath(@"../../Data/haarcascade_frontalface_default.xml"));
     EyeDetection      = new CascadeClassifier(System.IO.Path.GetFullPath(@"../../Data/haarcascade_eye.xml"));
     Frame             = new Mat();
     Faces             = new List <Image <Gray, byte> >();
     Ids = new List <int>();
     BeginCamera();
 }
Exemplo n.º 22
0
 public JAVSFacialRecognizer()
 {
     _faceRecognizer = new EigenFaceRecognizer();
     try
     {
         _faceRecognizer.Load(Environment.GetFolderPath(Environment.SpecialFolder.UserProfile) + "\\Documents\\test-recognizer.yaml");
     }
     catch
     {
         var fail = "fail";
     }
 }
Exemplo n.º 23
0
 /// <summary>
 /// Reconnaît les visages dans toutes les images d'un répertoire
 /// </summary>
 /// <param name="directoryName"></param>
 /// <param name="searchPattern"></param>
 /// <param name="recursive"></param>
 public void RecognizeFacesInDirectory(string directoryName, string labelsFileName, string recognizerFileName, string searchPattern = "*.jpg", bool recursive = false)
 {
     _faceLabels = GetLabels(labelsFileName);
     using (EigenFaceRecognizer faceRecognizer = new EigenFaceRecognizer())
     {
         _currentFaceRecognizer = faceRecognizer;
         faceRecognizer.Read(recognizerFileName);
         foreach (var item in Directory.GetFiles(directoryName, searchPattern, recursive ? SearchOption.AllDirectories : SearchOption.TopDirectoryOnly))
         {
             RecognizeFaces(item);
         }
     }
 }
Exemplo n.º 24
0
        public int RecognizeFace(Image <Gray, byte> image)
        {
            EigenFaceRecognizer eigen = OldEigen();

            FaceRecognizer.PredictionResult result = eigen.Predict(image);

            Console.WriteLine("ID: " + result.Label + ", " + "Threshold:  " + result.Distance);
            if (result.Label != -1 && result.Distance < treshold)//int.Parse(ConfigurationManager.AppSettings["RecognizerThreshold"]))
            {
                return(result.Label);
            }
            return(0);
        }
Exemplo n.º 25
0
        private void button2_Click(object sender, EventArgs e)
        {
            if (comboBoxAlgorithm.Text == "EigenFaces")
            {
                try
                {
                    string dataDirectory = Directory.GetCurrentDirectory() + "\\TrainedFaces\\trainedDataEigen.dat";
                    eigenFaceRecognizer = new EigenFaceRecognizer(eigenTrainedImageCounter, 3000);
                    eigenFaceRecognizer.Load(dataDirectory);
                    richTextBox1.Text += "Trained Database Loaded.";
                }
                catch (Exception ex)
                {
                    MessageBox.Show(ex.ToString());
                    MessageBox.Show("Nothing in binary database, please add at least a face(Simply train the prototype with the Add Face Button).", "Triained faces load", MessageBoxButtons.OK, MessageBoxIcon.Exclamation);
                }
            }

            else if (comboBoxAlgorithm.Text == "FisherFaces")
            {
                try
                {
                    string dataDirectory = Directory.GetCurrentDirectory() + "\\TrainedFaces\\trainedDataFisher.dat";

                    fisherFaceRecognizer = new FisherFaceRecognizer(fisherTrainedImageCounter, 3000);
                    fisherFaceRecognizer.Load(dataDirectory);
                    richTextBox1.Text += "Trained Database Loaded.";
                }
                catch (Exception ex)
                {
                    MessageBox.Show(ex.ToString());
                    MessageBox.Show("Nothing in binary database, please add at least a face(Simply train the prototype with the Add Face Button).", "Triained faces load", MessageBoxButtons.OK, MessageBoxIcon.Exclamation);
                }
            }

            else if (comboBoxAlgorithm.Text == "LBPHFaces")
            {
                try
                {
                    string dataDirectory = Directory.GetCurrentDirectory() + "\\TrainedFaces\\trainedDataLBPH.dat";
                    lbphFaceRecognizer = new LBPHFaceRecognizer(1, 8, 8, 8, 400);
                    lbphFaceRecognizer.Load(dataDirectory);
                    richTextBox1.Text += "Trained Database Loaded.";
                }
                catch (Exception ex)
                {
                    MessageBox.Show(ex.ToString());
                    MessageBox.Show("Nothing in binary database, please add at least a face(Simply train the prototype with the Add Face Button).", "Triained faces load", MessageBoxButtons.OK, MessageBoxIcon.Exclamation);
                }
            }
        }
Exemplo n.º 26
0
        public FindPersonOnImage()
        {
            InitializeComponent();
            TrainedFaces.Clear();
            FaceIDs.Clear();
            PersonsNames.Clear();

            using (ApplicationContext db = new ApplicationContext())
            {
                InitializeComponent();
                db.Persons.Load();
                Persons = db.Persons.Local.ToList();
            }


            int FaceCount = 0;

            foreach (var person in Persons)
            {
                var img = new Bitmap(BitmapHelpers.byteArrayToImage(person.PersonPhoto));
                Image <Gray, byte> imageForTrain = img.ToImage <Gray, byte>();

                System.Drawing.Rectangle[] faces = faceCascadeClassifier.DetectMultiScale(imageForTrain, 1.1, 3, System.Drawing.Size.Empty, System.Drawing.Size.Empty);

                foreach (var face in faces)
                {
                    faceResult     = imageForTrain;
                    faceResult.ROI = face;

                    CvInvoke.Resize(faceResult, faceResult, new System.Drawing.Size(200, 200), 0, 0, Inter.Cubic);
                    CvInvoke.EqualizeHist(faceResult, faceResult);
                    TrainedFaces.Add(faceResult.Mat);
                }

                FaceIDs.Add(FaceCount);
                PersonsNames.Add(person.FirstName + " " + person.LastName);
                FaceCount++;
                Debug.WriteLine(FaceCount + ". " + person.FirstName + " " + person.LastName);

                double Threshold = 2000;

                if (TrainedFaces.Count > 0)
                {
                    recognizer = new EigenFaceRecognizer(FaceCount, Threshold);
                    recognizer.Train(TrainedFaces.ToArray(), FaceIDs.ToArray());
                }

                InitializeComponent();
            }
        }
Exemplo n.º 27
0
 public MainWindowViewModel()
 {
     _dataStoreAccess = new DataStoreAccess(_databasePath);
     if (File.Exists(_faceRecognizerPath))
     {
         _faceRecognizer = new EigenFaceRecognizer();
         _faceRecognizer.Read(_faceRecognizerPath);
     }
     else
     {
         _faceRecognizer = new EigenFaceRecognizer();
     }
     InitializeFaceDetection();
 }
Exemplo n.º 28
0
        //4: train hình //sử dụng các hình ảnh đã lưu từ bước 3
        private bool TrainImagesFromDir()
        {
            int    ImagesCount = 0;
            double Threshold   = 7000;

            TrainedFaces.Clear();
            PersonsLabes.Clear();
            PersonsNames.Clear();
            try
            {
                string   path  = Directory.GetCurrentDirectory() + @"\TrainedImages";
                string[] files = Directory.GetFiles(path, "*.jpg", SearchOption.AllDirectories);

                foreach (var file in files)
                {
                    Image <Gray, byte> trainedImage = new Image <Gray, byte>(file).Resize(200, 200, Inter.Cubic);
                    CvInvoke.EqualizeHist(trainedImage, trainedImage);
                    TrainedFaces.Add(trainedImage);
                    PersonsLabes.Add(ImagesCount);
                    string name = file.Split('\\').Last().Split('_')[0];
                    PersonsNames.Add(name);
                    ImagesCount++;
                    Debug.WriteLine(ImagesCount + ". " + name);
                }

                if (TrainedFaces.Count() > 0)
                {
                    // recognizer = new EigenFaceRecognizer(ImagesCount,Threshold);
                    recognizer = new EigenFaceRecognizer(ImagesCount, Threshold);
                    recognizer.Train(TrainedFaces.ToArray(), PersonsLabes.ToArray());
                    recognizerLBPH = new LBPHFaceRecognizer(ImagesCount, 10, 10, 10, Threshold);
                    recognizerLBPH.Train(TrainedFaces.ToArray(), PersonsLabes.ToArray());
                    isTrained = true;
                    //Debug.WriteLine(ImagesCount);
                    //Debug.WriteLine(isTrained);
                    return(true);
                }
                else
                {
                    isTrained = false;
                    return(false);
                }
            }
            catch (Exception ex)
            {
                isTrained = false;
                MessageBox.Show("Error in Train Images: " + ex.Message);
                return(false);
            }
        }
Exemplo n.º 29
0
        public int RecognizeUser(Image <Gray, byte> userImage)
        {
            //creating new instatnce of facerecognizer
            FaceRecognizer _faceRecognizer = new EigenFaceRecognizer(80, 1000);
            // EigenObjectRecognizer recognizer = new EigenObjectRecognizer(trainingImages.ToArray(), ref termCrit);
            // LBPHFaceRecognizer _faceRecognizer = new LBPHFaceRecognizer(1,8,8,8, double.PositiveInfinity);
            string _recognizerFilePath = Application.StartupPath + "/traineddata";

            _faceRecognizer.Load(_recognizerFilePath);

            var result = _faceRecognizer.Predict(userImage.Resize(100, 100, Inter.Cubic));

            return(result.Label);
        }
Exemplo n.º 30
0
        public Form1()
        {
            InitializeComponent();

            FaceRecognition = new EigenFaceRecognizer(80, 2500);
            FaceDetection   = new CascadeClassifier(System.IO.Path.GetFullPath(@"../../Algo/haarcascade_frontalface_default.xml"));
            EyeDetection    = new CascadeClassifier(System.IO.Path.GetFullPath(@"../../Algo/haarcascade_eye.xml"));
            Frame           = new Mat();
            Faces           = new List <Mat>();
            IDs             = new List <int>();
            listOfNames     = new List <string>();
            listOfIds       = new List <int>();
            BeginWebcam();
        }