Esempio n. 1
0
        private void DetectAndTrain_Click(object sender, EventArgs e)
        {
            using (var capture = new VideoCapture()) {
                Thread.Sleep(PluginOptions.CameraDelayMs);
                using (Image <Bgr, byte> imageFrame = capture.QueryFrame().ToImage <Bgr, byte>()) {
                    if (imageFrame == null)
                    {
                        return;
                    }
                    using (Image <Gray, byte> grayframe = imageFrame.Convert <Gray, byte>()) {
                        if (PluginOptions.UseImageCorrection)
                        {
                            grayframe._EqualizeHist();
                        }
                        Rectangle[] part1;
                        using (var classifier = new CascadeClassifier(PluginOptions.CascadesPath + "haarcascade_frontalface_default.xml")) {
                            part1 = classifier.DetectMultiScale(grayframe, 1.1, 10);
                        }
                        if (part1.Length > 0)
                        {
                            Rectangle face = part1[0]; //выбор первого лица
                            using (Image <Gray, byte> resultingImage = imageFrame.Copy(face).Convert <Gray, byte>().Resize(100, 100, Inter.Cubic)) {
                                imageFrame.Draw(face, new Bgr(Color.Blue), 2);
                                TestImage.Image        = imageFrame;
                                DetectedGrayFace.Image = resultingImage;

                                #region Добавление лица

                                if (string.IsNullOrEmpty(FaceName.Text))
                                {
                                    MessageBox.Show(
                                        "Сначала введите имя распознанного лица", "Имя лица не указано", MessageBoxButtons.OK, MessageBoxIcon.Warning
                                        );
                                    return;
                                }
                                TrainedImages.Add(resultingImage);
                                PluginOptions.PeopleFaces.Add(
                                    PluginOptions.PeopleFaces.Count + 1,
                                    FaceName.Text
                                    );
                                TrainedImages.Last().Save($"{PluginOptions.PluginPath}Faces\\face{TrainedImages.Count}.bmp");

                                #endregion
                            }
                            PluginOptions.SaveOptionsToXml();

                            using (FaceRecognizer recognizer = new EigenFaceRecognizer()) {
                                recognizer.Train(TrainedImages.ToArray(), PluginOptions.PeopleFaces.Keys.ToArray());
                                recognizer.Write(PluginOptions.PluginPath + "SavedCascade.xml");
                            }
                            MessageBox.Show("Лицо успешно добавлено", "Training OK", MessageBoxButtons.OK, MessageBoxIcon.Information);
                        }
                        else
                        {
                            MessageBox.Show("лиц не найдено", "Training OK", MessageBoxButtons.OK, MessageBoxIcon.Warning);
                        }
                    }
                }
            }
        }
Esempio n. 2
0
        public void TrainRecognizer()
        {
            var allFaces = new FRService().All();

            if (allFaces.Count > 0)
            {
                var faceImages = new Image <Gray, byte> [allFaces.Count];
                var faceLabels = new int[allFaces.Count];
                for (int i = 0; i < allFaces.Count; i++)
                {
                    Stream stream = new MemoryStream();
                    stream.Write(allFaces[i].Face, 0, allFaces[i].Face.Length);
                    var faceImage = new Image <Gray, byte>(new Bitmap(stream));
                    faceImages[i] = faceImage.Resize(100, 100, Inter.Cubic);
                    faceLabels[i] = (int)(allFaces[i].Id);
                }

                // can also try :LBPHFaceRecognizer
                var fr = new EigenFaceRecognizer();
                fr.Train(faceImages, faceLabels);

                var retPath   = ConfigurationManager.AppSettings["trainedPath"];
                var savedFile = retPath + $"{DateTime.Now.ToString("yyyy-MM-dd HH-mm-ss")}_frModel";
                fr.Save(savedFile);

                MessageBox.Show($"Model trained successfully. saved into {savedFile}");
            }
            else
            {
                MessageBox.Show("No face found in db");
            }
        }
Esempio n. 3
0
        public static void Test()
        {
            using (Mat image = new Mat("testGamePic.jpg"))
            {
                using (Mat uimg = new Mat())
                {
                    using (CascadeClassifier face = new CascadeClassifier("haarcascade_frontalface_default.xml"))
                    {
                        CvInvoke.CvtColor(image, uimg, ColorConversion.Bgr2Gray);
                        CvInvoke.EqualizeHist(uimg, uimg);

                        Rectangle[] facesDetected = face.DetectMultiScale(uimg, 1.1, 10, new Size(20, 20));


                        EigenFaceRecognizer efr = new EigenFaceRecognizer();

                        efr.Train(new VectorOfMat(uimg), new VectorOfInt(new int[] { 1 }));
                        var res = efr.Predict(uimg);


                        return;
                    }
                }
            }
        }
Esempio n. 4
0
        private bool TrainImage()
        {
            int    imageCount = 0;
            double Threshold  = -1;

            Trained.Clear();
            PersonLabel.Clear();
            try
            {
                string   path  = Directory.GetCurrentDirectory() + @"\TrainedFaces";
                string[] files = Directory.GetFiles(path, "*.jpg", SearchOption.AllDirectories);
                foreach (var file in files)
                {
                    Image <Gray, Byte> trainedImage = new Image <Gray, byte>(file);
                    Trained.Add(trainedImage);
                    PersonLabel.Add(imageCount);
                    PersonLabel.Add(imageCount);

                    imageCount++;
                }
                EigenFaceRecognizer recognizer = new EigenFaceRecognizer(imageCount, Threshold);
                recognizer.Train(Trained.ToArray(), PersonLabel.ToArray());

                isTrained = true;
                Debug.WriteLine(imageCount);
                Debug.WriteLine(isTrained);
                return(isTrained = true);
            }
            catch (Exception ex)
            {
                isTrained = false;
                MessageBox.Show("Erro" + ex.Message);
                return(false);
            }
        }
        public bool TrainRecognizer(Image <Gray, byte>[] faceImages, int[] faceLabels)
        {
            _faceRecognizer.Train(faceImages, faceLabels);
            _faceRecognizer.Save(_recognizerFilePath);

            return(true);
        }
        public void TrainImages()
        {
            string path = Application.StartupPath + @"/../../Images/";

            string[] files = Directory.GetFiles(path, "*.jpg", SearchOption.AllDirectories);
            //   int[] labelsDb = _context.Labels.Select(_ => _.LabelNumber).ToArray();
            List <int> labelsDb = new List <int>();

            Mat[] matImages = new Mat[files.Length];



            for (int i = 0; i < files.Length; i++)
            {
                matImages[i] = new Image <Gray, byte>(files[i]).Mat;
                string[] strings = files[i].Split('-');
                string   number  = strings[strings.Length - 1].Split('.')[0];
                labelsDb.Add(int.Parse(number));
            }


            VectorOfMat images = new VectorOfMat(matImages);
            VectorOfInt labels = new VectorOfInt(labelsDb.ToArray());

            faceRecognizer.Train(images, labels);
            faceRecognizer.Write(Application.StartupPath + @"/../../Images/eigenRecognizer.yml");
            fisherRecognizer.Train(images, labels);
            fisherRecognizer.Write(Application.StartupPath + @"/../../Images/fisherRecognizer.yml");
            LBPHFaceRecognizer.Train(images, labels);
            LBPHFaceRecognizer.Write(Application.StartupPath + @"/../../Images/lpbhRecognizer.yml");
            isTrained = true;
        }
Esempio n. 7
0
        private void EigenFaceRecognition(object sender, EventArgs e)
        {
            Frame = _capture.QueryFrame().ToImage <Bgr, byte>();
            var frame = Frame.Resize(frameW, frameH, Inter.Cubic);

            grayFrame = frame.Convert <Gray, Byte>();
            var faces = cascadeClassifier.DetectMultiScale(grayFrame, 1.1, 10, Size.Empty);

            foreach (var f in faces)
            {
                eigenFaceRecognizer = new EigenFaceRecognizer(Count, double.PositiveInfinity);
                eigenFaceRecognizer.Train(trainingImages.ToArray(), indexLabels.ToArray());

                var result = eigenFaceRecognizer.Predict(frame.Copy(f).Convert <Gray, Byte>().Resize(100, 100, Inter.Cubic));
                if (result.Label == -1)
                {
                    frame.Draw(f, new Bgr(Color.Red), 2);
                    frame.Draw("Unknown", new Point(f.X, f.Y - 10), font, 0.8, new Bgr(Color.Blue), 2, new LineType(), false);
                }
                else
                {
                    frame.Draw(f, new Bgr(Color.Green), 2);
                    frame.Draw(nameLabels[result.Label], new Point(f.X, f.Y - 10), font, 0.8, new Bgr(Color.Blue), 2, new LineType(), false);
                }
                alertMessage.Text = (alert + "เริ่มการ Face Recognition ด้วยวิธีการ " + RecognitionType.Text + " แล้ว \r\n" + "Distance " + result.Distance + "\r\n Faces " + faces.Length.ToString());
            }
            imgFrame.Image = frame.Resize(imgBoxW, imgBoxH, Inter.Cubic);
        }
Esempio n. 8
0
        /// <summary>
        /// Création d'un fichier d’entraînement
        /// </summary>
        /// <param name="labels">Dictionnaire Id de libellé / libellé</param>
        /// <param name="files">Liste des couples fichier / Id de libellé</param>
        /// <param name="labelsFileName">Nom du fichier des correspondances Id de libellé / Libellé</param>
        /// <param name="recognizerFileName">Nom du fichier de reconnaissance</param>
        public void CreateTrainingFaceRecognizerFile(Dictionary <int, string> labels, List <Tuple <string, int> > files, string labelsFileName, string recognizerFileName)
        {
            List <Image <Gray, byte> > images = new List <Image <Gray, byte> >();
            List <int> labelIds = new List <int>();

            try
            {
                foreach (var item in files)
                {
                    images.Add(new Image <Bgr, byte>(item.Item1).Convert <Gray, byte>());
                    labelIds.Add(item.Item2);
                }

                using (EigenFaceRecognizer frz = new EigenFaceRecognizer(images.Count, 3000))
                {
                    frz.Train(images.ToArray(), labelIds.ToArray());
                    frz.Write(recognizerFileName);
                }
            }
            finally
            {
                images.ForEach(i => i.Dispose());
                images.Clear();
            }

            StringBuilder sb = new StringBuilder();

            foreach (var item in labels)
            {
                sb.AppendLine($"{item.Key}|{item.Value}");
            }
            File.WriteAllText(labelsFileName, sb.ToString());
        }
Esempio n. 9
0
        public void TrainRecognizer()
        {
            //  FaceRecognizer _faceRecognizer = new LBPHFaceRecognizer(1, 8, 8, 8, double.PositiveInfinity);
            FaceRecognizer  _faceRecognizer  = new EigenFaceRecognizer(80, double.PositiveInfinity);
            DataStoreAccess _datastoreaccess = new DataStoreAccess();
            var             allFaces         = _datastoreaccess.callfaces();

            //  MessageBox.Show(allFaces.Count.ToString());
            if (allFaces != null)
            {
                var faceImages = new Image <Gray, byte> [allFaces.Count];
                var faceLabels = new int[allFaces.Count];
                for (int i = 0; i < allFaces.Count; i++)
                {
                    Stream stream = new MemoryStream();
                    stream.Write(allFaces[i].Image, 0, allFaces[i].Image.Length);
                    var faceImage = new Image <Gray, byte>(new Bitmap(stream));
                    // faceImages[i] = faceImage.Resize(100, 100, Inter.Cubic);
                    faceImages[i] = faceImage.Resize(100, 100, Inter.Cubic);
                    faceLabels[i] = Convert.ToInt32(allFaces[i].Label);
                }

                string filepath = Application.StartupPath + "/traineddata";
                // var filePath = Application.StartupPath + String.Format("/{0}.bmp", face);
                _faceRecognizer.Train(faceImages, faceLabels);

                _faceRecognizer.Save(filepath);
                // MessageBox.Show(allFaces[0].Label);
            }
            else
            {
                //MessageBox.Show("adasfsf");
            }
        }
Esempio n. 10
0
        //Treniranje
        private void btnTrain_Click(object sender, EventArgs e)
        {
            string location = Application.StartupPath + @"/../../Images/";

            if (trainingImages.Count == 0)
            {
                string[] images = Directory.GetFiles(location, "*.bmp", SearchOption.TopDirectoryOnly);
                foreach (var image in images)
                {
                    trainingImages.Add(new Image <Gray, byte>(image));
                }
            }
            if (labels.Count == 0)
            {
                labels = File.ReadAllText(location + "Faces.txt").Split(',').Select(_ => int.Parse(_)).ToList();
            }



            Mat[] mats = new Mat[trainingImages.Count];
            for (int i = 0; i < mats.Count(); i++)
            {
                mats[i] = trainingImages[i].Mat.Clone();
            }
            VectorOfMat vMat = new VectorOfMat(mats);
            VectorOfInt vInt = new VectorOfInt(labels.ToArray());

            faceRecognizer.Train(vMat, vInt);
            faceRecognizer.Write(Application.StartupPath + @"/../../Images/Newfacerecognizer.yml");
            MessageBox.Show("Training done", "Info", MessageBoxButtons.OK);
            isTrained = true;
        }
Esempio n. 11
0
        public bool TrainRecognizer(int widthToProccess = 128, int heightToProccess = 128)
        {
            var allPeople = Storage.People;

            if (allPeople != null)
            {
                List <Bitmap> fullImageList = new List <Bitmap>();
                List <int>    fullIdList    = new List <int>();
                foreach (var person in allPeople)
                {
                    fullImageList.AddRange(person.Images);
                    foreach (var notUsed in person.Images)
                    {
                        fullIdList.Add(person.Id);
                    }
                }
                var grayScaleFaces = new Image <Gray, byte> [fullImageList.Count];
                for (int i = 0; i < fullImageList.Count; i++)
                {
                    Bitmap image = fullImageList[i];

                    var grayScaleFull = new Image <Gray, byte>(image);
                    var faceRects     = EmguSingleton.DetectFacesFromGrayscale(grayScaleFull);
                    if (faceRects.Length > 0)
                    {
                        grayScaleFaces[i] = grayScaleFull.Copy(faceRects[0]).Resize(widthToProccess, heightToProccess, Inter.Cubic);
                    }
                    else
                    {
                        grayScaleFaces[i] = grayScaleFull.Clone().Resize(widthToProccess, heightToProccess, Inter.Cubic);
                    }
                    grayScaleFull.Dispose();
                }
                _faceRecognizer.Train(grayScaleFaces, fullIdList.ToArray());
                _faceRecognizer.Write(_recognizerFilePath);
                foreach (var grayScaleFace in grayScaleFaces)
                {
                    grayScaleFace.Dispose();
                }
            }

            /*var allFaces = Storage.Faces;
             * if (allFaces != null)
             * {
             *  var faceImages = new Image<Gray, byte>[allFaces.Count()];
             *  var faceLabels = new int[allFaces.Count()];
             *  for (int i = 0; i < allFaces.Count(); i++)
             *  {
             *      Stream stream = new MemoryStream();
             *      stream.Write(allFaces[i].Image, 0, allFaces[i].Image.Length);
             *      var faceImage = new Image<Gray, byte>(new Bitmap(stream));
             *      faceImages[i] = faceImage.Resize(100, 100, Inter.Cubic);
             *      faceLabels[i] = allFaces[i].UserId;
             *  }
             *  _faceRecognizer.Train(faceImages, faceLabels);
             *  _faceRecognizer.Write(_recognizerFilePath);
             * }*/
            return(true);
        }
Esempio n. 12
0
 /// <summary>
 /// Trains recognizer on fetched face-label pairs and saves the trained data to recognition variables
 /// </summary>
 public void TrainRecognizer()
 {
     recog = new EigenFaceRecognizer();
     recog.Train <Gray, byte>(imgs.ToArray(), ints);
     MessageBox.Show("aww yes");
     recog.Save("trainingset/test.frl");
     MessageBox.Show("tuwid na daan o");
 }
        private void Window_Loaded(object sender, RoutedEventArgs e)
        {
            capture           = new VideoCapture();
            Frontface_Cascade = new CascadeClassifier(@"haarcascades/haarcascade_frontalface_default.xml");
            var connectionstring = ConfigurationManager.ConnectionStrings["Test"].ConnectionString;

            using (SqlConnection connection = new SqlConnection(connectionstring))
            {
                connection.Open();
                string query = "SELECT * FROM Attendance.dbo.TrainingData Order by StudentID";
                using (SqlCommand command = new SqlCommand(query, connection))
                {
                    using (SqlDataReader reader = command.ExecuteReader())
                    {
                        while (reader.Read())
                        {
                            labels.Add(reader.GetInt32(0));
                            NamePersons.Add(reader.GetString(1));
                            byte[] blob = null;
                            blob = (byte[])reader.GetValue(2);
                            var image = ByteToImage(blob);
                            Image <Gray, Byte> newImage = new Image <Gray, Byte>(image);
                            trainingImages.Add(newImage);
                        }
                    }
                }
            }

            recognizer = new EigenFaceRecognizer(1, 5000);

            Mat[] faceImages = new Mat[trainingImages.Count];
            int[] faceLabels = new int[labels.Count];

            for (int i = 0; i < trainingImages.Count; i++)
            {
                var face = Frontface_Cascade.DetectMultiScale(trainingImages[i]);
                foreach (var Tface in face)
                {
                    trainingImages[i].ROI = Tface;
                }
                gray = trainingImages[i].Clone().Resize(200, 200, 0);
                Mat x = gray.Mat;
                faceImages[i] = x;
            }

            for (int i = 0; i < labels.Count; i++)
            {
                faceLabels = labels.ToArray();
            }
            recognizer.Train(faceImages, faceLabels);
            logger.Info("Trained Face Recognizer");
            timer          = new DispatcherTimer();
            timer.Tick    += new EventHandler(timer_Tick);
            timer.Interval = new TimeSpan(0, 0, 0, 0, 1);
            timer.Start();
        }
Esempio n. 14
0
 public bool Train(byte[][] faces, int[] labels)
 {
     Image <Gray, byte>[] resizedFaces = new Image <Gray, byte> [faces.Length];
     for (int i = 0; i < faces.Length; i++)
     {
         resizedFaces[i] = StreamConverter.ByteToImageResize(faces[i]).Clone();
     }
     _faceRecognizer.Train(resizedFaces, labels);
     _faceRecognizer.Save(Environment.GetFolderPath(Environment.SpecialFolder.UserProfile) + "\\Documents\\test-recognizer.yaml");
     return(true);
 }
Esempio n. 15
0
        public bool TrainRecognizer(IEnumerable <Face> Data)
        {
            try
            {
                if (_eigen == null)
                {
                    throw new Exception(ConfigurationManager.AppSettings["RecognizerError"]);
                }

                List <Image <Gray, byte> > FacesOnly = new List <Image <Gray, byte> >();
                int nameId = 0, j = 0;
                var FacesPhotos = new List <Image <Bgr, byte> >();
                var FacesNamesArray = new int[Data.Count()];
                _FacesNamesArray = new string[Data.Count()];

                Data.ToList().ForEach(f => FacesPhotos.Add(f.Photo.ByteArrayToImage()));
                // to do: remove face detection because we are giving a face photo already
                foreach (var face in FacesPhotos)
                {
                    var facePhoto = _faceDetectionService.DetectFaceAsGrayImage(face);

                    if (facePhoto != null)
                    {
                        FacesOnly.Add(facePhoto);
                    }
                }

                Data.ToList().ForEach(f =>
                {
                    if (!_FacesNamesArray.Contains(f.PersonName))
                    {
                        _FacesNamesArray[nameId] = f.PersonName;
                        FacesNamesArray[j]       = nameId + 1;
                        nameId++;
                    }
                    else
                    {
                        FacesNamesArray[j] = FindIndexInArray(_FacesNamesArray, f.PersonName) + 1;
                    }
                    j++;
                });

                _eigen.Train(FacesOnly.ToArray(), FacesNamesArray);
                _eigen.Write(_recognizerFileName);
                File.WriteAllLines(_recognizerFacesFileName, _FacesNamesArray, Encoding.UTF8);
                return(true);
            }
            catch (Exception ex)
            {
                throw ex;
            }
        }
Esempio n. 16
0
        private void Window_Loaded(object sender, RoutedEventArgs e)
        {
            LoadKnownFaces();

            if (_knownFaces.Count > 0)
            {
                //var termCrit = new MCvTermCriteria(facesNames.Count, 0.001);
                var mats = new Mat[_knownFaces.Count];

                for (var index = 0; index < _knownFaces.Count; index++)
                {
                    mats[index] = _knownFaces[index].FaceImage.Mat;
                }

                _faceRecognizer.Train(mats, _knownFaces.Select(knownFace => knownFace.FaceId).ToArray());
            }

            _videoCapture = new VideoCapture(Config.ActiveCameraIndex);
            _videoCapture.SetCaptureProperty(CapProp.Fps, 30);
            _videoCapture.SetCaptureProperty(CapProp.FrameHeight, 450);
            _videoCapture.SetCaptureProperty(CapProp.FrameWidth, 370);
            _captureTimer.Start();
        }
Esempio n. 17
0
        //4: train hình //sử dụng các hình ảnh đã lưu từ bước 3
        private bool TrainImagesFromDir()
        {
            int    ImagesCount = 0;
            double Threshold   = 7000;

            TrainedFaces.Clear();
            PersonsLabes.Clear();
            PersonsNames.Clear();
            try
            {
                string   path  = Directory.GetCurrentDirectory() + @"\TrainedImages";
                string[] files = Directory.GetFiles(path, "*.jpg", SearchOption.AllDirectories);

                foreach (var file in files)
                {
                    Image <Gray, byte> trainedImage = new Image <Gray, byte>(file).Resize(200, 200, Inter.Cubic);
                    CvInvoke.EqualizeHist(trainedImage, trainedImage);
                    TrainedFaces.Add(trainedImage);
                    PersonsLabes.Add(ImagesCount);
                    string name = file.Split('\\').Last().Split('_')[0];
                    PersonsNames.Add(name);
                    ImagesCount++;
                    Debug.WriteLine(ImagesCount + ". " + name);
                }

                if (TrainedFaces.Count() > 0)
                {
                    // recognizer = new EigenFaceRecognizer(ImagesCount,Threshold);
                    recognizer = new EigenFaceRecognizer(ImagesCount, Threshold);
                    recognizer.Train(TrainedFaces.ToArray(), PersonsLabes.ToArray());
                    recognizerLBPH = new LBPHFaceRecognizer(ImagesCount, 10, 10, 10, Threshold);
                    recognizerLBPH.Train(TrainedFaces.ToArray(), PersonsLabes.ToArray());
                    isTrained = true;
                    //Debug.WriteLine(ImagesCount);
                    //Debug.WriteLine(isTrained);
                    return(true);
                }
                else
                {
                    isTrained = false;
                    return(false);
                }
            }
            catch (Exception ex)
            {
                isTrained = false;
                MessageBox.Show("Error in Train Images: " + ex.Message);
                return(false);
            }
        }
Esempio n. 18
0
        public FindPersonOnImage()
        {
            InitializeComponent();
            TrainedFaces.Clear();
            FaceIDs.Clear();
            PersonsNames.Clear();

            using (ApplicationContext db = new ApplicationContext())
            {
                InitializeComponent();
                db.Persons.Load();
                Persons = db.Persons.Local.ToList();
            }


            int FaceCount = 0;

            foreach (var person in Persons)
            {
                var img = new Bitmap(BitmapHelpers.byteArrayToImage(person.PersonPhoto));
                Image <Gray, byte> imageForTrain = img.ToImage <Gray, byte>();

                System.Drawing.Rectangle[] faces = faceCascadeClassifier.DetectMultiScale(imageForTrain, 1.1, 3, System.Drawing.Size.Empty, System.Drawing.Size.Empty);

                foreach (var face in faces)
                {
                    faceResult     = imageForTrain;
                    faceResult.ROI = face;

                    CvInvoke.Resize(faceResult, faceResult, new System.Drawing.Size(200, 200), 0, 0, Inter.Cubic);
                    CvInvoke.EqualizeHist(faceResult, faceResult);
                    TrainedFaces.Add(faceResult.Mat);
                }

                FaceIDs.Add(FaceCount);
                PersonsNames.Add(person.FirstName + " " + person.LastName);
                FaceCount++;
                Debug.WriteLine(FaceCount + ". " + person.FirstName + " " + person.LastName);

                double Threshold = 2000;

                if (TrainedFaces.Count > 0)
                {
                    recognizer = new EigenFaceRecognizer(FaceCount, Threshold);
                    recognizer.Train(TrainedFaces.ToArray(), FaceIDs.ToArray());
                }

                InitializeComponent();
            }
        }
Esempio n. 19
0
        private bool TrainImagesFromDir()// lấy hình ảnh trong file
        {
            double Threshold   = 2000;
            int    ImagesCount = 0;

            TrainedFaces.Clear();
            PersonsLabes.Clear();
            PersonsNames.Clear();
            try
            {
                string   path  = Directory.GetCurrentDirectory() + @"\TrainedImages";
                string[] files = Directory.GetFiles(path, "*.bmp", SearchOption.AllDirectories);
                foreach (var file in files)
                {
                    Image <Gray, Byte> trainedImage = new Image <Gray, Byte>(file).Resize(100, 100, Inter.Cubic);
                    CvInvoke.EqualizeHist(trainedImage, trainedImage);
                    TrainedFaces.Add(trainedImage);
                    PersonsLabes.Add(ImagesCount);
                    string name = file.Split('\\').Last().Split('_')[0];
                    string mssv = file.Split('\\').Last().Split('_')[1];
                    string lop  = file.Split('\\').Last().Split('_')[2];
                    PersonsNames.Add(name);
                    PersonsMSSV.Add(mssv);
                    PersonsLop.Add(lop);
                    ImagesCount++;
                }
                if (TrainedFaces.Count() > 0)
                {
                    recognizer = new EigenFaceRecognizer(ImagesCount, Threshold);
                    recognizer.Train(TrainedFaces.ToArray(), PersonsLabes.ToArray());

                    isTrained = true;
                    //Debug.WriteLine(ImagesCount);
                    //Debug.WriteLine(isTrained);
                    return(true);
                }
                else
                {
                    isTrained = false;
                    return(false);
                }
            }
            catch (Exception ex)
            {
                isTrained = false;
                MessageBox.Show("Lỗi: " + ex.Message);
                return(false);
            }
        }
Esempio n. 20
0
        private bool TrainImagesFromDir()// lấy hình ảnh trong file
        {
            double Threshold   = 2000;
            int    ImagesCount = 0;

            TrainedFaces.Clear();
            PersonsLabes.Clear();
            try
            {
                string   path  = Directory.GetCurrentDirectory() + @"\TrainedImages";
                string[] files = Directory.GetFiles(path, "*.bmp", SearchOption.AllDirectories);
                foreach (var file in files)
                {
                    Image <Gray, Byte> trainedImage = new Image <Gray, Byte>(file).Resize(100, 100, Inter.Cubic);
                    CvInvoke.EqualizeHist(trainedImage, trainedImage);   // cân bằng độ sáng.
                    TrainedFaces.Add(trainedImage);                      // add khuôn mặt có vào list traind
                    PersonsLabes.Add(ImagesCount);                       // gắn số tứ tự cho hình ảnh
                    string mssv = file.Split('\\').Last().Split('_')[0]; //lay mssv trước dấu _
                    string lop  = file.Split('\\').Last().Split('_')[1];
                    PersonsMSSV.Add(mssv);
                    PersonsLop.Add(lop);
                    ImagesCount++;
                }
                if (TrainedFaces.Count() > 0)
                {
                    recognizer = new EigenFaceRecognizer(ImagesCount, Threshold);     //khởi tạo để sử dụng phương thức train
                    recognizer.Train(TrainedFaces.ToArray(), PersonsLabes.ToArray()); // ngắn số thứ tự của hình ảnh, vào ảnh

                    isTrained = true;
                    //Debug.WriteLine(ImagesCount);
                    //Debug.WriteLine(isTrained);
                    return(true);
                }
                else
                {
                    isTrained = false;
                    return(false);
                }
            }
            catch (Exception ex)
            {
                isTrained = false;
                MessageBox.Show("Chưa có dữ liệu nhận dạng sinh viên !");
                return(false);
            }
        }
Esempio n. 21
0
 public static bool TrainRecognizer(EigenFaceRecognizer rec, Image <Gray, byte>[] faceArray, int[] labelArray)
 {
     if (faceArray.Length != labelArray.Length)
     {
         return(false);
     }
     else
     {
         for (int i = 0; i < faceArray.Length; i++)
         {
             faceArray[i] = ImageHandler.ResizeGrayImage(faceArray[i]);
         }
         rec.Train(faceArray, labelArray);
         SaveRecognizer(rec);
         return(true);
     }
 }
Esempio n. 22
0
        public bool LoadTrainedFaces()
        {
            EigenFaceRecognizer eigen = NewEigen();

            string directory = "", picturePath = "";
            int    picNumber = 1, personLabelId = 0;

            Faces.Clear();

            foreach (Person person in DataManagement.Instance.GetPersonsCatalog())
            {
                directory = person.Name;
                directory = directory.Replace(" ", "_");

                picNumber   = 1;
                picturePath = ImageDataPath + directory + "/";
                personLabelId++;

                while (File.Exists(picturePath + picNumber + ConfigurationManager.AppSettings["PictureFormat"]))
                {
                    Faces.Add(new Face
                    {
                        PersonLabelId = personLabelId,
                        Name          = directory,
                        FileName      = picturePath + picNumber + ConfigurationManager.AppSettings["PictureFormat"],
                        Image         = new Image <Gray, byte>(picturePath + picNumber + ConfigurationManager.AppSettings["PictureFormat"])
                    });
                    picNumber++;
                }
            }
            Image <Gray, byte>[] faceArray = Faces.Select(f => f.Image).ToArray();
            int[] labelArray = Faces.Select(f => f.PersonLabelId).ToArray();

            if (faceArray.Length != labelArray.Length || faceArray.Length < 1 || labelArray.Length < 1)
            {
                return(false);
            }
            else
            {
                eigen.Train(faceArray, labelArray);
                SaveRecognizer(eigen);
                return(true);
            }
        }
        private void trainFaceRecognition()
        {
            int imageCount = 0;
            //treshHold can be incressed in order to lower the hardnes of the ditection, in case that the imgs that are being used are of low quality
            double treshHold = 7500;

            workers_images.Clear();
            workers_lable.Clear();
            workers_names.Clear();
            workers_ID.Clear();

            try
            {
                //get all the images that are stored in the TrainedImages fore the recognition process
                string   images_path = Directory.GetCurrentDirectory() + @"\TrainedImages";
                string[] files       = Directory.GetFiles(images_path, "*.jpg", SearchOption.AllDirectories);

                foreach (var file in files)
                {
                    //resizeing the file, adding it to the list of images that will be sent, extracting the name and id of the worker form the img name
                    Image <Gray, byte> trainedImage = new Image <Gray, byte>(file).Resize(200, 200, Inter.Cubic);
                    CvInvoke.EqualizeHist(trainedImage, trainedImage);
                    workers_images.Add(trainedImage);
                    workers_lable.Add(imageCount);
                    string[] name_ID = file.Split('\\').Last().Split('_');
                    workers_names.Add(name_ID[0]);
                    workers_ID.Add(name_ID[1]);
                    imageCount++;
                }
            }
            catch (Exception ex)
            {
                MessageBox.Show("Error: unable to train the face detection program, please contact support!! \n" + ex.Message);
            }
            //train the recognizer in case that there where any img files in the TrainedImages folder
            if (workers_images.Count > 0)
            {
                recognizer = new EigenFaceRecognizer(imageCount, treshHold);
                recognizer.Train(workers_images.ToArray(), workers_lable.ToArray());

                Debug.WriteLine("the recognizer has rendered the databse");
            }
        }
Esempio n. 24
0
        private void Timer_Tick(object sender, EventArgs e)
        {
            Frame = Capture.QueryFrame();
            var imageFrame = Frame.ToImage <Gray, byte>();

            if (TimerCounter < TimeLimit)
            {
                TimerCounter++;

                if (imageFrame != null)
                {
                    var faces = FaceDetection.DetectMultiScale(imageFrame, 1.3, 5);

                    if (faces.Length > 0)
                    {
                        var processImage = imageFrame.Copy(faces[0]).Resize(ProcessImageWidth, ProcessImageHeight, Inter.Cubic);
                        Faces.Add(processImage);
                        Labels.Add(Convert.ToInt32(txtUserId.Text));
                        ScanCounter++;
                        rtbOutPut.AppendText($"{ScanCounter} Success Scan Taken... {Environment.NewLine}");
                        rtbOutPut.ScrollToCaret();
                    }
                }
            }
            else
            {
                var trainFaces = ConvertImageToMat(Faces);

                foreach (var face in trainFaces)
                {
                    DataStoreAccess.SaveFace(Convert.ToInt32(txtUserId.Text), txtUserName.Text, ConvertImageToBytes(face.Bitmap));
                }

                EigenFaceRecognizer.Train(trainFaces.ToArray(), Labels.ToArray());

                EigenFaceRecognizer.Write(YMLPath);
                Timer.Stop();
                TimerCounter       = 0;
                btnPredict.Enabled = true;
                rtbOutPut.AppendText($"Training Completed! {Environment.NewLine}");
                MessageBox.Show("Training Completed!");
            }
        }
        public bool TrainFromFolder()
        {
            if (IsTrained)
            {
                eigenTrainedImageCounter = 0;
                eigenTrainingImages.Clear();
                eigenIntlabels.Clear();
                eigenlabels.Clear();
                IsTrained = false;
            }

            try
            {
                string   dataDirectory = Directory.GetCurrentDirectory() + @"\Traineddata";
                string[] files         = Directory.GetFiles(dataDirectory, "*.jpg", SearchOption.AllDirectories);

                foreach (var file in files)
                {
                    Image <Gray, byte> trainedImage = new Image <Gray, byte>(file);
                    //trainedImage._EqualizeHist();
                    eigenTrainingImages.Add(trainedImage);
                    eigenlabels.Add(GetFileName(file));
                    eigenIntlabels.Add(eigenTrainedImageCounter);
                    eigenTrainedImageCounter++;
                    Console.WriteLine(eigenTrainedImageCounter);
                }

                try
                {
                    eigenFaceRecognizer = new EigenFaceRecognizer(eigenTrainedImageCounter, eigenThreshold);
                    eigenFaceRecognizer.Train(eigenTrainingImages.ToArray(), eigenIntlabels.ToArray());
                }
                catch (Exception e)
                {
                }
            }
            catch (Exception ex)
            {
                return(IsTrained = false);
            }
            return(IsTrained = true);
        }
Esempio n. 26
0
        public FacialRecognition(IConfiguration configuration, List <Face> trainingFaces)
        {
            _imagesToLocalPath  = configuration["ImageProcessing:FacesToLocalPath"];
            _writeImagesToLocal = bool.Parse(configuration["ImageProcessing:WriteFacesToLocal"] ?? "false") && !string.IsNullOrEmpty(_imagesToLocalPath) && Directory.Exists(_imagesToLocalPath);
            _haarPath           = configuration["ImageProcessing:HaarFolder"] ?? Path.Combine(AppContext.BaseDirectory, "haar");
            _configuration      = configuration;
            LabelDictionary     = new Dictionary <int, long>();
            var trainingImages = new List <Image <Gray, byte> >();
            var labels         = new List <int>();
            var tempDictionary = new Dictionary <long, int>();


            if (!trainingFaces.Any(tf => tf.TagId.HasValue))
            {
                return;
            }
            foreach (var face in trainingFaces)
            {
                if (face.TagId.HasValue)
                {
                    trainingImages.Add(new Image <Gray, byte>(ByteArrayToBitmap(face.Bytes)).Resize(100, 100, Inter.Cubic));
                    if (!tempDictionary.ContainsKey(face.TagId.Value))
                    {
                        var idx = tempDictionary.Count + 1;
                        tempDictionary.Add(face.TagId.Value, idx);
                        LabelDictionary.Add(idx, face.TagId.Value);
                    }
                    labels.Add(tempDictionary[face.TagId.Value]);
                }
                //Logger.Trace($"Adding training image {face.Name}");
            }

            Logger.Trace($"Using {trainingImages.Count} training images.");

            Recognizer = new EigenFaceRecognizer(80, double.PositiveInfinity);
            Recognizer.Train(trainingImages.ToArray(), labels.ToArray());

            foreach (var img in trainingImages)
            {
                img.Dispose();
            }
        }
Esempio n. 27
0
        public Form1()
        {
            InitializeComponent();

            EigenFaceRecognizer = new EigenFaceRecognizer(4, 800);
            DataStoreAccess     = new DataStoreAccess(ConnectionString);
            FaceDetection       = new CascadeClassifier(Path.GetFullPath($"{AppDomain.CurrentDomain.BaseDirectory}haarcascade_frontalface_default.xml"));
            Frame  = new Mat();
            Faces  = new List <Image <Gray, byte> >();
            Labels = new List <int>();

            if (File.Exists(YMLPath))
            {
                EigenFaceRecognizer.Read(YMLPath);
            }

            var allFaces = DataStoreAccess.CallFaces("ALL_USERS");

            if (allFaces != null)
            {
                for (int i = 0; i < allFaces.Count; i++)
                {
                    Stream stream = new MemoryStream();
                    stream.Write(allFaces[i].Image, 0, allFaces[i].Image.Length);
                    var faceImage = new Image <Gray, byte>(new Bitmap(stream));
                    Faces.Add(faceImage);
                    Labels.Add(allFaces[i].UserId);
                }

                EigenFaceRecognizer.Train(ConvertImageToMat(Faces).ToArray(), Labels.ToArray());

                btnPredict.Enabled = true;
                MessageBox.Show("Training Completed!");
            }
            else
            {
                MessageBox.Show("Nothing to traing!");
            }

            BeginCapture();
        }
Esempio n. 28
0
        private void TrainRecognizer()
        {
            var allFaces = _dataStoreAccess.GetFaces("ALL_USERS").ToList();

            if (allFaces != null)
            {
                var faceImages  = new Image <Gray, byte> [allFaces.Count()];
                var faceUserIds = new int[allFaces.Count()];
                for (int i = 0; i < allFaces.Count(); i++)
                {
                    //var stream = new MemoryStream();
                    //stream.Write(allFaces[i].Image, 0, allFaces[i].Image.Length);
                    var faceImage = new Image <Gray, byte>(100, 100);
                    faceImage.Bytes = allFaces[i].Image;
                    faceImages[i]   = faceImage;
                    faceUserIds[i]  = allFaces[i].UserId;
                }
                _faceRecognizer.Train(faceImages, faceUserIds);
                _faceRecognizer.Write(_faceRecognizerPath);
            }
        }
        public void TrainRecognizer(List <User> users)
        {
            var imageList = new List <Mat>();
            var indexList = new List <int>();

            var userIndex = 0;

            foreach (var user in users)
            {
                foreach (var userImage in user.UserImages.Where(userImage => File.Exists(userImage.ImageFilePath)))
                {
                    imageList.Add(new Mat(userImage.ImageFilePath).CvtColor(ColorConversionCodes.BGR2GRAY));
                    indexList.Add(userIndex);
                }

                userIndex++;
            }

            Recognizer = EigenFaceRecognizer.Create();
            Recognizer.Train(imageList, indexList);
        }
Esempio n. 30
0
        public void TrainRecognizer(List<User> users)
        {
            var imageList = new VectorOfMat();
            var indexList = new VectorOfInt();

            var userIndex = 0;

            foreach (var user in users)
            {
                foreach (var userImage in user.UserImages.Where(userImage => File.Exists(userImage.ImageFilePath)))
                {
                    imageList.Push(new Image<Gray, byte>(userImage.ImageFilePath));
                    indexList.Push(new[] { userIndex });
                }

                userIndex++;
            }

            Recognizer = new EigenFaceRecognizer(imageList.Size);
            Recognizer.Train(imageList, indexList);
        }