コード例 #1
0
ファイル: Recognizer.cs プロジェクト: maddddd/nightOwl
        //  private static readonly int threshold = 10000;
        // higher threshold - more chances to recognize a face (sometimes incorrectly);

        public static EigenFaceRecognizer NewEigen()
        {
            EigenFaceRecognizer eigenRec = new EigenFaceRecognizer(80, 4000);

            eigenRec.Write(Application.StartupPath + "/data/recognizer.yaml");
            return(eigenRec);
        }
コード例 #2
0
        private void DetectAndTrain_Click(object sender, EventArgs e)
        {
            using (var capture = new VideoCapture()) {
                Thread.Sleep(PluginOptions.CameraDelayMs);
                using (Image <Bgr, byte> imageFrame = capture.QueryFrame().ToImage <Bgr, byte>()) {
                    if (imageFrame == null)
                    {
                        return;
                    }
                    using (Image <Gray, byte> grayframe = imageFrame.Convert <Gray, byte>()) {
                        if (PluginOptions.UseImageCorrection)
                        {
                            grayframe._EqualizeHist();
                        }
                        Rectangle[] part1;
                        using (var classifier = new CascadeClassifier(PluginOptions.CascadesPath + "haarcascade_frontalface_default.xml")) {
                            part1 = classifier.DetectMultiScale(grayframe, 1.1, 10);
                        }
                        if (part1.Length > 0)
                        {
                            Rectangle face = part1[0]; //выбор первого лица
                            using (Image <Gray, byte> resultingImage = imageFrame.Copy(face).Convert <Gray, byte>().Resize(100, 100, Inter.Cubic)) {
                                imageFrame.Draw(face, new Bgr(Color.Blue), 2);
                                TestImage.Image        = imageFrame;
                                DetectedGrayFace.Image = resultingImage;

                                #region Добавление лица

                                if (string.IsNullOrEmpty(FaceName.Text))
                                {
                                    MessageBox.Show(
                                        "Сначала введите имя распознанного лица", "Имя лица не указано", MessageBoxButtons.OK, MessageBoxIcon.Warning
                                        );
                                    return;
                                }
                                TrainedImages.Add(resultingImage);
                                PluginOptions.PeopleFaces.Add(
                                    PluginOptions.PeopleFaces.Count + 1,
                                    FaceName.Text
                                    );
                                TrainedImages.Last().Save($"{PluginOptions.PluginPath}Faces\\face{TrainedImages.Count}.bmp");

                                #endregion
                            }
                            PluginOptions.SaveOptionsToXml();

                            using (FaceRecognizer recognizer = new EigenFaceRecognizer()) {
                                recognizer.Train(TrainedImages.ToArray(), PluginOptions.PeopleFaces.Keys.ToArray());
                                recognizer.Write(PluginOptions.PluginPath + "SavedCascade.xml");
                            }
                            MessageBox.Show("Лицо успешно добавлено", "Training OK", MessageBoxButtons.OK, MessageBoxIcon.Information);
                        }
                        else
                        {
                            MessageBox.Show("лиц не найдено", "Training OK", MessageBoxButtons.OK, MessageBoxIcon.Warning);
                        }
                    }
                }
            }
        }
コード例 #3
0
ファイル: Form1.cs プロジェクト: nasa03/EmguCVFD-FR
        //Treniranje
        private void btnTrain_Click(object sender, EventArgs e)
        {
            string location = Application.StartupPath + @"/../../Images/";

            if (trainingImages.Count == 0)
            {
                string[] images = Directory.GetFiles(location, "*.bmp", SearchOption.TopDirectoryOnly);
                foreach (var image in images)
                {
                    trainingImages.Add(new Image <Gray, byte>(image));
                }
            }
            if (labels.Count == 0)
            {
                labels = File.ReadAllText(location + "Faces.txt").Split(',').Select(_ => int.Parse(_)).ToList();
            }



            Mat[] mats = new Mat[trainingImages.Count];
            for (int i = 0; i < mats.Count(); i++)
            {
                mats[i] = trainingImages[i].Mat.Clone();
            }
            VectorOfMat vMat = new VectorOfMat(mats);
            VectorOfInt vInt = new VectorOfInt(labels.ToArray());

            faceRecognizer.Train(vMat, vInt);
            faceRecognizer.Write(Application.StartupPath + @"/../../Images/Newfacerecognizer.yml");
            MessageBox.Show("Training done", "Info", MessageBoxButtons.OK);
            isTrained = true;
        }
コード例 #4
0
        public void TrainImages()
        {
            string path = Application.StartupPath + @"/../../Images/";

            string[] files = Directory.GetFiles(path, "*.jpg", SearchOption.AllDirectories);
            //   int[] labelsDb = _context.Labels.Select(_ => _.LabelNumber).ToArray();
            List <int> labelsDb = new List <int>();

            Mat[] matImages = new Mat[files.Length];



            for (int i = 0; i < files.Length; i++)
            {
                matImages[i] = new Image <Gray, byte>(files[i]).Mat;
                string[] strings = files[i].Split('-');
                string   number  = strings[strings.Length - 1].Split('.')[0];
                labelsDb.Add(int.Parse(number));
            }


            VectorOfMat images = new VectorOfMat(matImages);
            VectorOfInt labels = new VectorOfInt(labelsDb.ToArray());

            faceRecognizer.Train(images, labels);
            faceRecognizer.Write(Application.StartupPath + @"/../../Images/eigenRecognizer.yml");
            fisherRecognizer.Train(images, labels);
            fisherRecognizer.Write(Application.StartupPath + @"/../../Images/fisherRecognizer.yml");
            LBPHFaceRecognizer.Train(images, labels);
            LBPHFaceRecognizer.Write(Application.StartupPath + @"/../../Images/lpbhRecognizer.yml");
            isTrained = true;
        }
コード例 #5
0
        /// <summary>
        /// Création d'un fichier d’entraînement
        /// </summary>
        /// <param name="labels">Dictionnaire Id de libellé / libellé</param>
        /// <param name="files">Liste des couples fichier / Id de libellé</param>
        /// <param name="labelsFileName">Nom du fichier des correspondances Id de libellé / Libellé</param>
        /// <param name="recognizerFileName">Nom du fichier de reconnaissance</param>
        public void CreateTrainingFaceRecognizerFile(Dictionary <int, string> labels, List <Tuple <string, int> > files, string labelsFileName, string recognizerFileName)
        {
            List <Image <Gray, byte> > images = new List <Image <Gray, byte> >();
            List <int> labelIds = new List <int>();

            try
            {
                foreach (var item in files)
                {
                    images.Add(new Image <Bgr, byte>(item.Item1).Convert <Gray, byte>());
                    labelIds.Add(item.Item2);
                }

                using (EigenFaceRecognizer frz = new EigenFaceRecognizer(images.Count, 3000))
                {
                    frz.Train(images.ToArray(), labelIds.ToArray());
                    frz.Write(recognizerFileName);
                }
            }
            finally
            {
                images.ForEach(i => i.Dispose());
                images.Clear();
            }

            StringBuilder sb = new StringBuilder();

            foreach (var item in labels)
            {
                sb.AppendLine($"{item.Key}|{item.Value}");
            }
            File.WriteAllText(labelsFileName, sb.ToString());
        }
コード例 #6
0
        public bool TrainRecognizer(int widthToProccess = 128, int heightToProccess = 128)
        {
            var allPeople = Storage.People;

            if (allPeople != null)
            {
                List <Bitmap> fullImageList = new List <Bitmap>();
                List <int>    fullIdList    = new List <int>();
                foreach (var person in allPeople)
                {
                    fullImageList.AddRange(person.Images);
                    foreach (var notUsed in person.Images)
                    {
                        fullIdList.Add(person.Id);
                    }
                }
                var grayScaleFaces = new Image <Gray, byte> [fullImageList.Count];
                for (int i = 0; i < fullImageList.Count; i++)
                {
                    Bitmap image = fullImageList[i];

                    var grayScaleFull = new Image <Gray, byte>(image);
                    var faceRects     = EmguSingleton.DetectFacesFromGrayscale(grayScaleFull);
                    if (faceRects.Length > 0)
                    {
                        grayScaleFaces[i] = grayScaleFull.Copy(faceRects[0]).Resize(widthToProccess, heightToProccess, Inter.Cubic);
                    }
                    else
                    {
                        grayScaleFaces[i] = grayScaleFull.Clone().Resize(widthToProccess, heightToProccess, Inter.Cubic);
                    }
                    grayScaleFull.Dispose();
                }
                _faceRecognizer.Train(grayScaleFaces, fullIdList.ToArray());
                _faceRecognizer.Write(_recognizerFilePath);
                foreach (var grayScaleFace in grayScaleFaces)
                {
                    grayScaleFace.Dispose();
                }
            }

            /*var allFaces = Storage.Faces;
             * if (allFaces != null)
             * {
             *  var faceImages = new Image<Gray, byte>[allFaces.Count()];
             *  var faceLabels = new int[allFaces.Count()];
             *  for (int i = 0; i < allFaces.Count(); i++)
             *  {
             *      Stream stream = new MemoryStream();
             *      stream.Write(allFaces[i].Image, 0, allFaces[i].Image.Length);
             *      var faceImage = new Image<Gray, byte>(new Bitmap(stream));
             *      faceImages[i] = faceImage.Resize(100, 100, Inter.Cubic);
             *      faceLabels[i] = allFaces[i].UserId;
             *  }
             *  _faceRecognizer.Train(faceImages, faceLabels);
             *  _faceRecognizer.Write(_recognizerFilePath);
             * }*/
            return(true);
        }
コード例 #7
0
        public bool TrainRecognizer(IEnumerable <Face> Data)
        {
            try
            {
                if (_eigen == null)
                {
                    throw new Exception(ConfigurationManager.AppSettings["RecognizerError"]);
                }

                List <Image <Gray, byte> > FacesOnly = new List <Image <Gray, byte> >();
                int nameId = 0, j = 0;
                var FacesPhotos = new List <Image <Bgr, byte> >();
                var FacesNamesArray = new int[Data.Count()];
                _FacesNamesArray = new string[Data.Count()];

                Data.ToList().ForEach(f => FacesPhotos.Add(f.Photo.ByteArrayToImage()));
                // to do: remove face detection because we are giving a face photo already
                foreach (var face in FacesPhotos)
                {
                    var facePhoto = _faceDetectionService.DetectFaceAsGrayImage(face);

                    if (facePhoto != null)
                    {
                        FacesOnly.Add(facePhoto);
                    }
                }

                Data.ToList().ForEach(f =>
                {
                    if (!_FacesNamesArray.Contains(f.PersonName))
                    {
                        _FacesNamesArray[nameId] = f.PersonName;
                        FacesNamesArray[j]       = nameId + 1;
                        nameId++;
                    }
                    else
                    {
                        FacesNamesArray[j] = FindIndexInArray(_FacesNamesArray, f.PersonName) + 1;
                    }
                    j++;
                });

                _eigen.Train(FacesOnly.ToArray(), FacesNamesArray);
                _eigen.Write(_recognizerFileName);
                File.WriteAllLines(_recognizerFacesFileName, _FacesNamesArray, Encoding.UTF8);
                return(true);
            }
            catch (Exception ex)
            {
                throw ex;
            }
        }
コード例 #8
0
        private void Timer_Tick(object sender, EventArgs e)
        {
            Frame = Capture.QueryFrame();
            var imageFrame = Frame.ToImage <Gray, byte>();

            if (TimerCounter < TimeLimit)
            {
                TimerCounter++;

                if (imageFrame != null)
                {
                    var faces = FaceDetection.DetectMultiScale(imageFrame, 1.3, 5);

                    if (faces.Length > 0)
                    {
                        var processImage = imageFrame.Copy(faces[0]).Resize(ProcessImageWidth, ProcessImageHeight, Inter.Cubic);
                        Faces.Add(processImage);
                        Labels.Add(Convert.ToInt32(txtUserId.Text));
                        ScanCounter++;
                        rtbOutPut.AppendText($"{ScanCounter} Success Scan Taken... {Environment.NewLine}");
                        rtbOutPut.ScrollToCaret();
                    }
                }
            }
            else
            {
                var trainFaces = ConvertImageToMat(Faces);

                foreach (var face in trainFaces)
                {
                    DataStoreAccess.SaveFace(Convert.ToInt32(txtUserId.Text), txtUserName.Text, ConvertImageToBytes(face.Bitmap));
                }

                EigenFaceRecognizer.Train(trainFaces.ToArray(), Labels.ToArray());

                EigenFaceRecognizer.Write(YMLPath);
                Timer.Stop();
                TimerCounter       = 0;
                btnPredict.Enabled = true;
                rtbOutPut.AppendText($"Training Completed! {Environment.NewLine}");
                MessageBox.Show("Training Completed!");
            }
        }
コード例 #9
0
        private void TrainRecognizer()
        {
            var allFaces = _dataStoreAccess.GetFaces("ALL_USERS").ToList();

            if (allFaces != null)
            {
                var faceImages  = new Image <Gray, byte> [allFaces.Count()];
                var faceUserIds = new int[allFaces.Count()];
                for (int i = 0; i < allFaces.Count(); i++)
                {
                    //var stream = new MemoryStream();
                    //stream.Write(allFaces[i].Image, 0, allFaces[i].Image.Length);
                    var faceImage = new Image <Gray, byte>(100, 100);
                    faceImage.Bytes = allFaces[i].Image;
                    faceImages[i]   = faceImage;
                    faceUserIds[i]  = allFaces[i].UserId;
                }
                _faceRecognizer.Train(faceImages, faceUserIds);
                _faceRecognizer.Write(_faceRecognizerPath);
            }
        }
コード例 #10
0
ファイル: Recognizer.cs プロジェクト: maddddd/nightOwl
 public static void SaveRecognizer(EigenFaceRecognizer rec)
 {
     rec.Write(Application.StartupPath + "/data/recognizer.yaml");
 }
コード例 #11
0
 private void Window_Closing(object sender, CancelEventArgs e)
 {
     _faceRecognizer.Write("face_recognizer");
 }
コード例 #12
0
ファイル: Plugin.cs プロジェクト: F1uctus/VC-Vision
        private actionResult TrainFace(string[] parameters)
        {
            var ar = new actionResult();

            #region Parameters parsing

            switch (parameters.Length)
            {
            case 0: {
                ar.setError("Path to image not specified.");
                return(ar);
            }

            case 1: {
                ar.setError("Face name not specified.");
                return(ar);
            }
            }

            Image <Gray, byte> grayImage;
            if (string.IsNullOrEmpty(parameters[0]))
            {
                using (var capture = new VideoCapture()) {
                    Thread.Sleep(PluginOptions.CameraDelayMs);
                    grayImage = capture.QueryFrame().ToImage <Gray, byte>();
                }
            }
            else
            {
                try {
                    grayImage = new Image <Gray, byte>(parameters[0]);
                }
                catch {
                    ar.setError("Invalid path to image.");
                    return(ar);
                }
            }

            if (PluginOptions.UseImageCorrection)
            {
                grayImage._EqualizeHist();
            }

            #endregion

            Rectangle[] faces;
            using (var classifier = new CascadeClassifier($"{PluginOptions.CascadesPath}haarcascade_frontalface_default.xml")) {
                faces = classifier.DetectMultiScale(grayImage, 1.1, 10);
            }
            if (faces.Length == 0)
            {
                ar.setError("No face recognized.");
                return(ar);
            }
            using (Image <Gray, byte> faceImage = grayImage.Copy(faces[0]).Resize(100, 100, Inter.Cubic)) {
                MainCtl.TrainedImages.Add(faceImage);
                PluginOptions.PeopleFaces.Add(PluginOptions.PeopleFaces.Count + 1, parameters[1]);
                faceImage.Save($"{PluginOptions.PluginPath}Faces\\face{MainCtl.TrainedImages.Count}.bmp");
            }

            PluginOptions.SaveOptionsToXml();
            grayImage.Dispose();

            using (FaceRecognizer recognizer = new EigenFaceRecognizer()) {
                recognizer.Train(MainCtl.TrainedImages.ToArray(), PluginOptions.PeopleFaces.Keys.ToArray());
                recognizer.Write($"{PluginOptions.PluginPath}SavedCascade.xml");
            }
            ar.setInfo($"Added face with name: {parameters[0]}.");
            return(ar);
        }
コード例 #13
0
        private void GetFacesList()
        {
            if (isTrained == true)
            {
                recognizer.Read(Config.TrainingFile);
                return;
            }

            if (!File.Exists(Config.HaarCascadePath))
            {
                string message = "Can't find Harr Cascade file! \n";
                message += Config.HaarCascadePath;
                DialogResult results = MessageBox.Show(message, "Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
            }

            cascadeClassifier = new CascadeClassifier(Config.HaarCascadePath);

            faceList.Clear();
            string line;

            // create file to store face data if neccessary
            if (!Directory.Exists(Config.FacePhotosPath))
            {
                Directory.CreateDirectory(Config.FacePhotosPath);
            }

            if (!File.Exists(Config.FaceListTextFile))
            {
                string message = "Can't find face data file!";
                message += Config.FaceListTextFile;
                message += "An empty file will be create if this is your first time running the application!";
                DialogResult results = MessageBox.Show(message, "Error", MessageBoxButtons.OK, MessageBoxIcon.Error);

                if (results == DialogResult.OK)
                {
                    String dirName = Path.GetDirectoryName(Config.FaceListTextFile);
                    Directory.CreateDirectory(dirName);
                    File.Create(Config.FaceListTextFile).Close();
                }
            }

            FaceData     faceDataInstance = null;
            StreamReader reader           = new StreamReader(Config.FaceListTextFile);
            int          i = 0;

            while ((line = reader.ReadLine()) != null)
            {
                string[] lineParts = line.Split(':');
                faceDataInstance            = new FaceData();
                faceDataInstance.FaceImage  = new Image <Gray, byte>(Config.FacePhotosPath + lineParts[0] + Config.ImageFileExtension);
                faceDataInstance.PersonName = lineParts[1];
                faceList.Add(faceDataInstance);
            }

            foreach (FaceData face in faceList)
            {
                imageList.Push(face.FaceImage.Mat);
                nameList.Add(face.PersonName);
                labelList.Push(new[] { i++ });
            }
            reader.Close();

            //Train recognizer
            if (imageList.Size > 0 && isTrained == false)
            {
                recognizer = new EigenFaceRecognizer(imageList.Size, double.PositiveInfinity);
                recognizer.Train(imageList, labelList);
                recognizer.Write(Config.TrainingFile);
                isTrained = true;
            }
        }
コード例 #14
0
ファイル: PersonRecognizer.cs プロジェクト: povilux/nightOwl
 public void SaveRecognizer(EigenFaceRecognizer rec)
 {
     rec.Write(RecognizerDataPath);
 }