void LoadFaces()
        {
            List <string> usernames = _dataClient.GetAllUsernames();

            if (usernames != null)
            {
                foreach (var name in usernames)
                {
                    foreach (var face in _dataClient.CallFaces(name))
                    {
                        _savedFaces.Add(StreamConverter.ByteToBitmap(face.Image));
                    }
                }
            }
        }
        //private FaceTrainer _trainer;

        #region Constructor & Properties
        public SaveFaceDialog()
        {
            _dataClient = new DataStoreAccess(@"C:\data\db\SQLite-Faces.db");
            List <string> usernames = _dataClient.GetAllUsernames();

            if (usernames != null)
            {
                foreach (var name in usernames)
                {
                    foreach (var face in _dataClient.CallFaces(name))
                    {
                        _savedFaces.Add(StreamConverter.ByteToBitmap(face.Image));
                    }
                }
            }

            _detector = new JAVSFaceCropper();
            _trainer  = new TrainingEngine();
            _camera   = new CameraManager();
            _camera.SetDetector(_detector);
            _cameraIsReady = _camera.IsReady();

            if (_cameraIsReady)
            {
                _camera.Start();
                _camera.NewFrame += AttachFrames;
            }

            InitializeComponent();
            DataContext = this;
        }
Example #3
0
        public Form1()
        {
            InitializeComponent();

            EigenFaceRecognizer = new EigenFaceRecognizer(4, 800);
            DataStoreAccess     = new DataStoreAccess(ConnectionString);
            FaceDetection       = new CascadeClassifier(Path.GetFullPath($"{AppDomain.CurrentDomain.BaseDirectory}haarcascade_frontalface_default.xml"));
            Frame  = new Mat();
            Faces  = new List <Image <Gray, byte> >();
            Labels = new List <int>();

            if (File.Exists(YMLPath))
            {
                EigenFaceRecognizer.Read(YMLPath);
            }

            var allFaces = DataStoreAccess.CallFaces("ALL_USERS");

            if (allFaces != null)
            {
                for (int i = 0; i < allFaces.Count; i++)
                {
                    Stream stream = new MemoryStream();
                    stream.Write(allFaces[i].Image, 0, allFaces[i].Image.Length);
                    var faceImage = new Image <Gray, byte>(new Bitmap(stream));
                    Faces.Add(faceImage);
                    Labels.Add(allFaces[i].UserId);
                }

                EigenFaceRecognizer.Train(ConvertImageToMat(Faces).ToArray(), Labels.ToArray());

                btnPredict.Enabled = true;
                MessageBox.Show("Training Completed!");
            }
            else
            {
                MessageBox.Show("Nothing to traing!");
            }

            BeginCapture();
        }
Example #4
0
        public void TrainRecognizer()
        {
            var allFaces = _dataStoreAccess.CallFaces("ALL_USERS");

            if (allFaces != null)
            {
                var faceImages = new Image <Gray, byte> [allFaces.Count];
                var faceLabels = new int[allFaces.Count];
                for (int i = 0; i < allFaces.Count; i++)
                {
                    Stream stream = new MemoryStream();
                    stream.Write(allFaces[i].Image, 0, allFaces[i].Image.Length);
                    var faceImage = new Image <Gray, byte>(new Bitmap(stream));
                    faceImages[i] = faceImage.Resize(100, 100, Inter.Cubic);
                    faceLabels[i] = allFaces[i].UserId;
                }


                _faceRecognizer.Train(faceImages, faceLabels);
                _faceRecognizer.Save(@"C:\Users\Dom\Documents\Visual Studio 2015\Projects\Emgu\Emgu\Faces\recognizerFilePath\file.yaml");
            }
        }