private void btnRecognize_Click(object sender, EventArgs e) { if (string.IsNullOrWhiteSpace(PathPhoto) || string.IsNullOrWhiteSpace( PathModel)) { MessageBox.Show("need to select photo and model"); } else { try { var userBmp = new Bitmap(PathPhoto); var userImage = new Image <Gray, byte>(userBmp); _faceRecognizer.Load(PathModel); var result = _faceRecognizer.Predict(userImage.Resize(100, 100, Inter.Cubic)); var userId = result.Label; var userRecord = new FRService().GetById(userId); if (userRecord != null) { lblResult.Text = userRecord.UserName; } else { MessageBox.Show("User not enrolled in db"); } } catch (Exception ex) { MessageBox.Show(ex.Message); } } }
public string RecognizeUser(Image <Gray, byte> userImage) { _faceRecognizer.Load(@"C:\Users\Dom\Documents\Visual Studio 2015\Projects\Emgu\Emgu\Faces\recognizerFilePath\file.yaml"); var result = _faceRecognizer.Predict(userImage.Resize(100, 100, Inter.Cubic)); return(result.Label.ToString()); }
static OpenCvTrainer() { Subjects = new Dictionary <int, string>(); SubjectSamples = new List <Tuple <int, Image <Gray, byte> > >(); var haarPath = @"haarcascade_frontalface_default.xml"; DetectionClassifier = new CascadeClassifier(haarPath); FaceRecognizerData = new EigenFaceRecognizer(); if (File.Exists(Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "trainingFile.ocv"))) { FaceRecognizerData.Load(Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "trainingFile.ocv")); } if (File.Exists(Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "trainingSubjects.txt"))) { var lines = File.ReadAllLines((Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "trainingSubjects.txt"))); foreach (var line in lines) { if (!(string.IsNullOrEmpty(line)) && line.Contains(":")) { var items = line.Split(':'); Subjects.Add(int.Parse(items[0]), items[1]); } } } }
public JAVSFacialRecognizer() { _faceRecognizer = new EigenFaceRecognizer(); try { _faceRecognizer.Load(Environment.GetFolderPath(Environment.SpecialFolder.UserProfile) + "\\Documents\\test-recognizer.yaml"); } catch { var fail = "fail"; } }
private void button2_Click(object sender, EventArgs e) { if (comboBoxAlgorithm.Text == "EigenFaces") { try { string dataDirectory = Directory.GetCurrentDirectory() + "\\TrainedFaces\\trainedDataEigen.dat"; eigenFaceRecognizer = new EigenFaceRecognizer(eigenTrainedImageCounter, 3000); eigenFaceRecognizer.Load(dataDirectory); richTextBox1.Text += "Trained Database Loaded."; } catch (Exception ex) { MessageBox.Show(ex.ToString()); MessageBox.Show("Nothing in binary database, please add at least a face(Simply train the prototype with the Add Face Button).", "Triained faces load", MessageBoxButtons.OK, MessageBoxIcon.Exclamation); } } else if (comboBoxAlgorithm.Text == "FisherFaces") { try { string dataDirectory = Directory.GetCurrentDirectory() + "\\TrainedFaces\\trainedDataFisher.dat"; fisherFaceRecognizer = new FisherFaceRecognizer(fisherTrainedImageCounter, 3000); fisherFaceRecognizer.Load(dataDirectory); richTextBox1.Text += "Trained Database Loaded."; } catch (Exception ex) { MessageBox.Show(ex.ToString()); MessageBox.Show("Nothing in binary database, please add at least a face(Simply train the prototype with the Add Face Button).", "Triained faces load", MessageBoxButtons.OK, MessageBoxIcon.Exclamation); } } else if (comboBoxAlgorithm.Text == "LBPHFaces") { try { string dataDirectory = Directory.GetCurrentDirectory() + "\\TrainedFaces\\trainedDataLBPH.dat"; lbphFaceRecognizer = new LBPHFaceRecognizer(1, 8, 8, 8, 400); lbphFaceRecognizer.Load(dataDirectory); richTextBox1.Text += "Trained Database Loaded."; } catch (Exception ex) { MessageBox.Show(ex.ToString()); MessageBox.Show("Nothing in binary database, please add at least a face(Simply train the prototype with the Add Face Button).", "Triained faces load", MessageBoxButtons.OK, MessageBoxIcon.Exclamation); } } }
public int RecognizeUser(Image <Gray, byte> userImage) { //creating new instatnce of facerecognizer FaceRecognizer _faceRecognizer = new EigenFaceRecognizer(80, 1000); // EigenObjectRecognizer recognizer = new EigenObjectRecognizer(trainingImages.ToArray(), ref termCrit); // LBPHFaceRecognizer _faceRecognizer = new LBPHFaceRecognizer(1,8,8,8, double.PositiveInfinity); string _recognizerFilePath = Application.StartupPath + "/traineddata"; _faceRecognizer.Load(_recognizerFilePath); var result = _faceRecognizer.Predict(userImage.Resize(100, 100, Inter.Cubic)); return(result.Label); }
public TrainingWindow() { InitializeComponent(); BDelete.IsEnabled = false; BUpdate.IsEnabled = false; isEditingXML = false; listdata = TrainingData.Deserialize(IMAGES_PATH); CBInterpolation.ItemsSource = Enum.GetValues(typeof(Emgu.CV.CvEnum.Inter)); CBInterpolation.SelectedIndex = 0; CBGender.ItemsSource = Enum.GetValues(typeof(GenderEnum)); CBGender.SelectedIndex = 0; foreach (TrainingData data in listdata) { switch (data.label) { case GenderEnum.MALE: male++; break; case GenderEnum.FEMALE: female++; break; default: break; } } TBGenderCount.Text = string.Format("Male: {0} Female: {1}", male, female); currentData = new TrainingData(); Photo.Stretch = Stretch.Fill; pathIndex = 0; FaceRecognizer fr = new EigenFaceRecognizer(16, 123); if (File.Exists(TRAINING_PATH)) { fr.Load(TRAINING_PATH); } GenderClassifier.threshold = 100; _genderClassifier = new GenderClassifier { classifier = new CascadeClassifier("Classifiers\\haarcascade_frontalface_alt2.xml"), faceRecognizer = fr, recognizerHeight = MainWindow.recognitionHeight, recognizerWidth = MainWindow.recognitionWidth }; }
public void LoadRecognizerData() { _faceRecognizer.Load(_recognizerFilePath); }
public MainWindow() { InitializeComponent(); Loaded += MainWindowLoaded; Closing += MainWindowClosing; ExcelManager.CreateSingleton(ConfigurationManager.AppSettings["DemographicsLocation"]); Configuration confg = ConfigurationManager.OpenExeConfiguration(ConfigurationUserLevel.None); if (confg.AppSettings.Settings["Width"] == null || !int.TryParse(confg.AppSettings.Settings["Width"].Value, out recognitionWidth)) { recognitionWidth = 120; confg.AppSettings.Settings.Add(new KeyValueConfigurationElement("Width", recognitionWidth.ToString())); confg.Save(ConfigurationSaveMode.Modified); ConfigurationManager.RefreshSection("appSettings"); } if (confg.AppSettings.Settings["Height"] == null || !int.TryParse(confg.AppSettings.Settings["Height"].Value, out recognitionHeight)) { recognitionHeight = 120; confg.AppSettings.Settings.Add(new KeyValueConfigurationElement("Height", recognitionHeight.ToString())); confg.Save(ConfigurationSaveMode.Modified); ConfigurationManager.RefreshSection("appSettings"); } if (confg.AppSettings.Settings["DemographicsLocation"] == null) { string path = @"..\demo.xlsx"; ExcelManager.CreateSingleton(path); confg.AppSettings.Settings.Add(new KeyValueConfigurationElement("DemographicsLocation", path.ToString())); confg.Save(ConfigurationSaveMode.Modified); ConfigurationManager.RefreshSection("appSettings"); } else { ExcelManager.CreateSingleton(ConfigurationManager.AppSettings["DemographicsLocation"]); } #if ON_TOP StatusValue.Text = "Debugging!"; Activated += MainWindowActive; Deactivated += MainWindowHidden; #else StatusValue.Text = "Release"; #endif mouseController = new KinectMouseController(); FaceRecognizer fr = new EigenFaceRecognizer(14, 123); fr.Load(TrainingWindow.TRAINING_PATH); #if SHOW_CAMERA imgViewer = new ImageViewer(); imgViewer.Show(); #endif GenderClassifier.threshold = 50D; _genderClassifier = new GenderClassifier { classifier = new CascadeClassifier(CLASSIFIER_PATH), faceRecognizer = fr, recognizerWidth = recognitionWidth, recognizerHeight = recognitionHeight }; areaOfInterest = new Rectangle(CROPPED_X, CROPPED_Y, CROPPED_WIDTH, CROPPED_HEIGHT); }