private void BtnSkip_Click(object sender, RoutedEventArgs e)
        {
            Button           button = sender as Button;
            RecognitionModel model  = button.Tag as RecognitionModel;

            PV_Recognizer.Train(model, "Unknown");
            icImages.ItemsSource = PeopleImage.Where(x => x.Processed == false);
        }
        private void BtnSave_Click(object sender, RoutedEventArgs e)
        {
            Button           button  = sender as Button;
            RecognitionModel model   = button.Tag as RecognitionModel;
            TextBox          textBox = (button.Parent as DockPanel).Children.OfType <TextBox>().FirstOrDefault();
            string           name    = textBox.Text;

            PV_Recognizer.Train(model, name);
            icImages.ItemsSource = PeopleImage.Where(x => x.Processed == false);
        }
Example #3
0
 public static void Train(RecognitionModel model, String name)
 {
     model.Processed = true;
     if (dic_labels.Values.Contains(name))
     {
         model.LabelInt = dic_labels.FirstOrDefault(x => x.Value == name).Key;
     }
     else
     {
         int num = dic_labels.Count;
         dic_labels.Add(num, name);
         model.LabelInt = num;
     }
 }
Example #4
0
        public static List <RecognitionModel> Recognize(IInputArray image, String uri)
        {
            if (training_sets.Exists(x => x.FullImageUri == uri))
            {
                return(new List <RecognitionModel>());
            }
            List <Rectangle> faces = new List <Rectangle>();
            //List<Rectangle> eyes = new List<Rectangle>();
            long detectionTime;

            Detect(image, "haarcascade_frontalface_default.xml", /*"haarcascade_eye.xml",*/ faces, /*eyes,*/ out detectionTime);
            List <RecognitionModel> results = new List <RecognitionModel>();

            foreach (Rectangle r in faces)
            {
                IImage face = new UMat((UMat)image, r);
                Directory.CreateDirectory("training_set\\");
                String path = "training_set\\" + Path.GetRandomFileName() + ".jpg";
                new Image <Bgr, byte>(face.Bitmap).Resize(100, 100, Inter.Cubic).Save(path);
                RecognitionModel rm = new RecognitionModel(0, path, uri, false);
                if (Trained)
                {
                    using (Image <Gray, Byte> f = new Image <Gray, byte>(face.Bitmap))
                    {
                        FaceRecognizer.PredictionResult predictionResults = Recognizer.Predict(f.Resize(100, 100, Inter.Cubic));
                        Console.WriteLine(predictionResults.Distance + "     " + predictionResults.Label);
                        rm.Distance = predictionResults.Distance;
                        if (predictionResults.Distance < 6000 && dic_labels.ContainsKey(predictionResults.Label))
                        {
                            rm.LabelInt = predictionResults.Label;
                        }
                        else
                        {
                            rm.LabelInt = 0;
                        }
                    }
                }
                else
                {
                    rm.LabelInt = 0;
                }

                results.Add(rm);
                training_sets.Add(rm);
            }
            return(results);
        }
Example #5
0
        public static List <RecognitionModel> Init()
        {
            training_sets = new List <RecognitionModel>();
            dic_labels    = new Dictionary <int, string>();
            using (StreamReader reader = new StreamReader(new BufferedStream(new FileStream("labels.txt", FileMode.OpenOrCreate, FileAccess.Read), 512)))
            {
                while (!reader.EndOfStream)
                {
                    string[] vals = reader.ReadLine().Split('|');
                    dic_labels.Add(int.Parse(vals[0]), vals[1]);
                }
                if (dic_labels.Count == 0)
                {
                    dic_labels.Add(0, "Unknown");
                }
            }
            using (StreamReader reader = new StreamReader(new BufferedStream(new FileStream("training_sets.txt", FileMode.OpenOrCreate, FileAccess.Read), 512)))
            {
                while (!reader.EndOfStream)
                {
                    string[]         vals        = reader.ReadLine().Split('|');
                    RecognitionModel trainingSet = new RecognitionModel(int.Parse(vals[0]), vals[1], vals[2], int.Parse(vals[3]) == 0 ? false : true);
                    if (vals.Length > 4)
                    {
                        trainingSet.Distance = double.Parse(vals[4]);
                    }
                    training_sets.Add(trainingSet);
                }
            }
            if (training_sets.Count > 0)
            {
                IEnumerable <RecognitionModel> arr = training_sets.Where(x => x.Processed);
                Recognizer.Train <Gray, byte>(arr.Select(x => x.FaceImageCV).ToArray(), arr.Select(x => x.LabelInt).ToArray());
                Trained = true;
            }

            Console.WriteLine(training_sets.Count);
            return(training_sets);
        }