コード例 #1
0
 public FaceRecognitionDB()
 {
     try
     {
         faceRecognizer.Read(Application.StartupPath + @"/../../Images/eigenRecognizer.yml");
         fisherRecognizer.Read(Application.StartupPath + @"/../../Images/fisherRecognizer.yml");
         LBPHFaceRecognizer.Read(Application.StartupPath + @"/../../Images/lpbhRecognizer.yml");
     }
     catch (Exception)
     {
     }
 }
コード例 #2
0
ファイル: Form1.cs プロジェクト: nasa03/EmguCVFD-FR
        //Predikcijaš+v  xyttrrtr
        private void btnPredict_Click(object sender, EventArgs e)
        {
            try
            {
                Mat imageMat = null;
                while (true)
                {
                    imageMat = videoCapture.QueryFrame().Clone();

                    Image <Gray, byte> image      = imageMat.ToImage <Gray, byte>().Resize(180, 200, Emgu.CV.CvEnum.Inter.Cubic);
                    Rectangle[]        rectangles = classifier.DetectMultiScale(image, 1.1, 4);
                    if (rectangles.Count() > 0)
                    {
                        break;
                    }
                }
                if (imageMat != null)
                {
                    imageMat = imageMat.ToImage <Gray, byte>().Resize(180, 200, Emgu.CV.CvEnum.Inter.Cubic).Mat;
                    faceRecognizer.Read(Application.StartupPath + @"/../../Images/Newfacerecognizer.yml");
                    var res = faceRecognizer.Predict(imageMat);


                    if (res.Distance > threshold)
                    {
                        txtBox_Label.Text    = res.Label.ToString();
                        txtBox_distance.Text = res.Distance.ToString();
                        string foundImage = Application.StartupPath + @"/../../Images/face" + res.Label.ToString() + ".bmp";
                        imgBox1.Image = imageMat.ToImage <Gray, byte>();
                        picBox.Image  = Image.FromFile(foundImage);
                        MessageBox.Show("Successufully found label", "Success", MessageBoxButtons.OK);
                    }
                    else
                    {
                        MessageBox.Show("Not Found", "INFO", MessageBoxButtons.OK);
                    }
                }
                videoCapture.Stop();
            }
            catch (Exception err)
            {
                MessageBox.Show(err.Message, "INFO", MessageBoxButtons.OK);
            }
        }
コード例 #3
0
        private void Recognize()
        {
            _faceRecognizer.Read(_faceRecognizerPath);
            var result = _faceRecognizer.Predict(detectedFaceForRecognizer);

            if (result.Label != 0)
            {
                RecognizedFace = _dataStoreAccess.GetUserName(result.Label);
            }
        }
コード例 #4
0
 /// <summary>
 /// Reconnaît les visages dans une image
 /// </summary>
 /// <param name="fileName"></param>
 /// <param name="labelsFileName"></param>
 /// <param name="recognizerFileName"></param>
 private void RecognizeFaces(string fileName, string labelsFileName, string recognizerFileName)
 {
     _faceLabels = GetLabels(labelsFileName);
     using (EigenFaceRecognizer faceRecognizer = new EigenFaceRecognizer())
     {
         _currentFaceRecognizer = faceRecognizer;
         faceRecognizer.Read(recognizerFileName);
         RecognizeFaces(fileName);
     }
 }
コード例 #5
0
 /// <summary>
 /// Reconnaît les visages dans toutes les images d'un répertoire
 /// </summary>
 /// <param name="directoryName"></param>
 /// <param name="searchPattern"></param>
 /// <param name="recursive"></param>
 public void RecognizeFacesInDirectory(string directoryName, string labelsFileName, string recognizerFileName, string searchPattern = "*.jpg", bool recursive = false)
 {
     _faceLabels = GetLabels(labelsFileName);
     using (EigenFaceRecognizer faceRecognizer = new EigenFaceRecognizer())
     {
         _currentFaceRecognizer = faceRecognizer;
         faceRecognizer.Read(recognizerFileName);
         foreach (var item in Directory.GetFiles(directoryName, searchPattern, recursive ? SearchOption.AllDirectories : SearchOption.TopDirectoryOnly))
         {
             RecognizeFaces(item);
         }
     }
 }
コード例 #6
0
 public MainWindowViewModel()
 {
     _dataStoreAccess = new DataStoreAccess(_databasePath);
     if (File.Exists(_faceRecognizerPath))
     {
         _faceRecognizer = new EigenFaceRecognizer();
         _faceRecognizer.Read(_faceRecognizerPath);
     }
     else
     {
         _faceRecognizer = new EigenFaceRecognizer();
     }
     InitializeFaceDetection();
 }
コード例 #7
0
 public FaceRecognitionService(DatabaseContext context)
 {
     try
     {
         _eigen           = new EigenFaceRecognizer();
         _FacesNamesArray = File.ReadAllLines(_recognizerFacesFileName);
         _eigen.Read(_recognizerFileName);
         _faceDetectionService = new FaceDetectionService();
         _context = context;
     }
     catch (Exception ex)
     {
         throw ex;
     }
 }
コード例 #8
0
 private void BtnSnapshot_Click(object sender, EventArgs e)
 {
     using (var capture = new VideoCapture()) {
         Thread.Sleep(PluginOptions.CameraDelayMs);
         using (Image <Bgr, byte> imageFrame = capture.QueryFrame().ToImage <Bgr, byte>()) {
             if (imageFrame == null)
             {
                 return;
             }
             using (Image <Gray, byte> grayImage = imageFrame.Convert <Gray, byte>()) {
                 if (PluginOptions.UseImageCorrection)
                 {
                     grayImage._EqualizeHist();
                 }
                 using (var classifier = new CascadeClassifier(PluginOptions.CascadesPath + "haarcascade_frontalface_default.xml")) {
                     Rectangle[] part1 = classifier.DetectMultiScale(grayImage, 1.1, 10);
                     if (part1.Length == 0)
                     {
                         return;
                     }
                     LabelFacesList.Text = "";
                     foreach (Rectangle face in part1)
                     {
                         using (Image <Gray, byte> resultingImage = imageFrame.Copy(face).Convert <Gray, byte>().Resize(100, 100, Inter.Cubic)) {
                             if (PluginOptions.UseImageCorrection)
                             {
                                 resultingImage._EqualizeHist();
                             }
                             imageFrame.Draw(face, new Bgr(Color.Blue), 2);
                             TestImage.Image = imageFrame;
                             if (TrainedImages.Count == 0)
                             {
                                 continue;
                             }
                             using (FaceRecognizer recognizer = new EigenFaceRecognizer()) {
                                 recognizer.Read(PluginOptions.PluginPath + "SavedCascade.xml");
                                 LabelFacesList.Text +=
                                     $"{PluginOptions.PeopleFaces.ElementAt(recognizer.Predict(resultingImage).Label).Value}\r\n";
                             }
                         }
                     }
                 }
             }
         }
     }
 }
コード例 #9
0
        public FaceRecognition()
        {
            _faceRecognizer = new EigenFaceRecognizer(0, double.PositiveInfinity);

            if (File.Exists("face_recognizer"))
            {
                _faceRecognizer.Read("face_recognizer");
            }

            _knownFaces     = new List <FaceData>();
            _emptyFaceImage = CreateEmptyFaceImage();

            InitializeComponent();
            _captureTimer = new Timer()
            {
                Interval = Config.TimerResponseValue
            };
            _captureTimer.Elapsed += CaptureTimer_Elapsed;
        }
コード例 #10
0
ファイル: Recognizer.cs プロジェクト: maddddd/nightOwl
        public static EigenFaceRecognizer OldEigen()
        {
            EigenFaceRecognizer eigenRec = new EigenFaceRecognizer(80, 4000);

            if (File.Exists(Application.StartupPath + "/data/recognizer.yaml"))
            {
                try
                {
                    eigenRec.Read(Application.StartupPath + "/data/recognizer.yaml");
                }
                catch
                {
                }
            }
            else
            {
                eigenRec = NewEigen();
            }
            return(eigenRec);
        }
コード例 #11
0
ファイル: PersonRecognizer.cs プロジェクト: povilux/nightOwl
        public static EigenFaceRecognizer OldEigen()
        {
            /*     Console.WriteLine("Bsd" + int.Parse(ConfigurationManager.AppSettings["RecognizerComponentsNum"])+
             *             int.Parse(ConfigurationManager.AppSettings["RecognizerThreshold"]));
             *   EigenFaceRecognizer eigenRec = new EigenFaceRecognizer(
             *               int.Parse(ConfigurationManager.AppSettings["RecognizerComponentsNum"]),
             *               int.Parse(ConfigurationManager.AppSettings["RecognizerThreshold"]));*/

            EigenFaceRecognizer eigenRec = new EigenFaceRecognizer(7, tre);

            try
            {
                eigenRec.Read(RecognizerDataPath);
            }
            catch
            {
                eigenRec = NewEigen();
            }
            return(eigenRec);
        }
コード例 #12
0
        public Form1()
        {
            InitializeComponent();

            EigenFaceRecognizer = new EigenFaceRecognizer(4, 800);
            DataStoreAccess     = new DataStoreAccess(ConnectionString);
            FaceDetection       = new CascadeClassifier(Path.GetFullPath($"{AppDomain.CurrentDomain.BaseDirectory}haarcascade_frontalface_default.xml"));
            Frame  = new Mat();
            Faces  = new List <Image <Gray, byte> >();
            Labels = new List <int>();

            if (File.Exists(YMLPath))
            {
                EigenFaceRecognizer.Read(YMLPath);
            }

            var allFaces = DataStoreAccess.CallFaces("ALL_USERS");

            if (allFaces != null)
            {
                for (int i = 0; i < allFaces.Count; i++)
                {
                    Stream stream = new MemoryStream();
                    stream.Write(allFaces[i].Image, 0, allFaces[i].Image.Length);
                    var faceImage = new Image <Gray, byte>(new Bitmap(stream));
                    Faces.Add(faceImage);
                    Labels.Add(allFaces[i].UserId);
                }

                EigenFaceRecognizer.Train(ConvertImageToMat(Faces).ToArray(), Labels.ToArray());

                btnPredict.Enabled = true;
                MessageBox.Show("Training Completed!");
            }
            else
            {
                MessageBox.Show("Nothing to traing!");
            }

            BeginCapture();
        }
コード例 #13
0
        public string Predict(Image <Gray, byte> image)
        {
            faceRecognizer.Read(Application.StartupPath + @"/../../Images/faceRecognizer.yml");
            var res = faceRecognizer.Predict(image);

            DAL.Models.Label label = null;
            //if(res.Distance > threshold)
            if (res.Distance < 3500)
            {
                label = _context.Labels.FirstOrDefault(_ => _.LabelNumber == res.Label);
                var user = _context.Users.FirstOrDefault(_ => _.Id == label.UserId);
                Log.Logger.Information("{@VisitDate} User{@username} with UserId {@UserId} {@VisitType} home and has access to House", DateTime.Now, user.FirstName + " " + user.LastName, user.Id);
                Log.CloseAndFlush();
                return(user.FirstName + " " + user.LastName);
            }
            else
            {
                Log.Logger.Information("{@VisitDate} User{@username} with UserId {@UserId} {@VisitType} home and has access to House", DateTime.Now);
            }

            return(string.Empty);
        }
コード例 #14
0
 public void LoadRecognizerData()
 {
     _faceRecognizer.Read(_recognizerFilePath);
 }
コード例 #15
0
ファイル: Plugin.cs プロジェクト: F1uctus/VC-Vision
        private actionResult RecognizeFace(string[] parameters)
        {
            var ar = new actionResult();

            if (MainCtl.TrainedImages.Count == 0)
            {
                ar.setError("Database contains no trained faces.");
                return(ar);
            }

            #region Parameters parsing

            switch (parameters.Length)
            {
            case 0: {
                ar.setError("Path to image not specified.");
                return(ar);
            }

            case 1: {
                ar.setError("Face name not specified.");
                return(ar);
            }
            }

            Image <Gray, byte> grayImage;
            if (string.IsNullOrEmpty(parameters[0]))
            {
                using (var capture = new VideoCapture()) {
                    Thread.Sleep(PluginOptions.CameraDelayMs);
                    grayImage = capture.QueryFrame().ToImage <Gray, byte>();
                }
            }
            else
            {
                try {
                    grayImage = new Image <Gray, byte>(parameters[0]);
                }
                catch {
                    ar.setError("Invalid path to image.");
                    return(ar);
                }
            }

            if (PluginOptions.UseImageCorrection)
            {
                grayImage._EqualizeHist();
            }

            #endregion

            Rectangle[] faces;
            using (var classifier = new CascadeClassifier(PluginOptions.CascadesPath + "haarcascade_frontalface_default.xml")) {
                faces = classifier.DetectMultiScale(grayImage, 1.1, 10);
            }
            if (faces.Length == 0)
            {
                ar.setError("No trained faces found.");
                return(ar);
            }

            var resultString = "";
            foreach (Rectangle face in faces)
            {
                using (FaceRecognizer recognizer = new EigenFaceRecognizer()) {
                    recognizer.Read(PluginOptions.PluginPath + "SavedCascade.xml");
                    FaceRecognizer.PredictionResult recoResult = recognizer.Predict(grayImage.Copy(face).Resize(100, 100, Inter.Cubic));
                    resultString += $"<{PluginOptions.PeopleFaces.ElementAt(recoResult.Label)}:{recoResult.Distance}>";
                }
            }
            grayImage.Dispose();
            ar.setSuccess(resultString);
            return(ar);
        }
コード例 #16
0
        private void GetFacesList()
        {
            if (isTrained == true)
            {
                recognizer.Read(Config.TrainingFile);
                return;
            }

            if (!File.Exists(Config.HaarCascadePath))
            {
                string message = "Can't find Harr Cascade file! \n";
                message += Config.HaarCascadePath;
                DialogResult results = MessageBox.Show(message, "Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
            }

            cascadeClassifier = new CascadeClassifier(Config.HaarCascadePath);

            faceList.Clear();
            string line;

            // create file to store face data if neccessary
            if (!Directory.Exists(Config.FacePhotosPath))
            {
                Directory.CreateDirectory(Config.FacePhotosPath);
            }

            if (!File.Exists(Config.FaceListTextFile))
            {
                string message = "Can't find face data file!";
                message += Config.FaceListTextFile;
                message += "An empty file will be create if this is your first time running the application!";
                DialogResult results = MessageBox.Show(message, "Error", MessageBoxButtons.OK, MessageBoxIcon.Error);

                if (results == DialogResult.OK)
                {
                    String dirName = Path.GetDirectoryName(Config.FaceListTextFile);
                    Directory.CreateDirectory(dirName);
                    File.Create(Config.FaceListTextFile).Close();
                }
            }

            FaceData     faceDataInstance = null;
            StreamReader reader           = new StreamReader(Config.FaceListTextFile);
            int          i = 0;

            while ((line = reader.ReadLine()) != null)
            {
                string[] lineParts = line.Split(':');
                faceDataInstance            = new FaceData();
                faceDataInstance.FaceImage  = new Image <Gray, byte>(Config.FacePhotosPath + lineParts[0] + Config.ImageFileExtension);
                faceDataInstance.PersonName = lineParts[1];
                faceList.Add(faceDataInstance);
            }

            foreach (FaceData face in faceList)
            {
                imageList.Push(face.FaceImage.Mat);
                nameList.Add(face.PersonName);
                labelList.Push(new[] { i++ });
            }
            reader.Close();

            //Train recognizer
            if (imageList.Size > 0 && isTrained == false)
            {
                recognizer = new EigenFaceRecognizer(imageList.Size, double.PositiveInfinity);
                recognizer.Train(imageList, labelList);
                recognizer.Write(Config.TrainingFile);
                isTrained = true;
            }
        }