System.Drawing.Size face_size = new System.Drawing.Size(7, 7); // units in pixels

        public FacialRecognition(dynamic mainWindow)
        {
            this.mainWindow = mainWindow;
            this.commHelper = this.mainWindow.communicationHelper;
            this.systemData = this.commHelper.systemData;

            classifier_path  = System.IO.Path.GetDirectoryName(System.Reflection.Assembly.GetExecutingAssembly().Location) + "/opencv/data/haarcascades/haarcascade_frontalface_default.xml";
            database_path    = "../../../../public/";
            people_path      = database_path + "people/";
            activations_path = activations_path + "activations/";

            recognized_threshold       = 105;
            add_new_training_threshold = 75;
            num_trained  = 0;
            input_height = 240;
            input_width  = 426;

            face_outline_pen     = new System.Windows.Media.Pen(System.Windows.Media.Brushes.LightBlue, 3);
            face_label_brush     = Brushes.Black;
            face_label_font      = new Typeface("Georgia");
            face_label_font_size = 10;

            label_to_int        = new Dictionary <string, int>();
            training_images     = new List <Image <Gray, byte> >();
            training_int_labels = new List <int>();
            training_labels     = new List <string>();

            face_finder     = new CascadeClassifier(classifier_path);
            face_recognizer = new LBPHFaceRecognizer();

            load_database();
        }
        private void DeleteFaces_Click(object sender, RoutedEventArgs e)
        {
            if (MessageBox.Show("Вы действительно собираетесь удалить базу лиц?", "Подтверждение",
                                MessageBoxButton.YesNo, MessageBoxImage.Warning) == MessageBoxResult.Yes)
            {
                FaceItemsPlace.Children.Clear();
                foreach (var file in PeopleData.Face)
                {
                    if (File.Exists(Environment.CurrentDirectory + file))
                    {
                        File.Delete(Environment.CurrentDirectory + file);
                    }
                }
                PeopleData = new PeopleData();
                if (File.Exists(Environment.CurrentDirectory + ConfigurationManager.AppSettings["JsonData"]))
                {
                    File.Delete(Environment.CurrentDirectory + ConfigurationManager.AppSettings["JsonData"]);
                }

                if (File.Exists(ConfigurationManager.AppSettings["FaceRecognizerData"]))
                {
                    File.Delete(ConfigurationManager.AppSettings["FaceRecognizerData"]);
                }
                FaceRecognizer = new LBPHFaceRecognizer(1, 8, 8, 8, 100);
                MessageBox.Show("Данные обучения удалены");
            }
        }
        public void FaceLearn()
        {
            recognizer = new LBPHFaceRecognizer(1, 8, 8, 8, 80);
            Image <Gray, Byte>[] images = new Image <Gray, Byte> [30];
            Image <Gray, Byte>   image;

            // image = getFace("123.jpg");
            int[] labels = new int[30];
            open_image(ref images, ref labels);
            for (int i = 25; i < 30; i++)
            {
                images[i] = getFace((i - 24) + ".jpg");
                labels[i] = 6;
            }
            recognizer.Train <Gray, Byte>(images, labels);
            Console.WriteLine("Training iss over");

            FaceRecognizer.PredictionResult r;
            Console.WriteLine("!");
            //for (int i = 0; i < 5; i++)
            //{
            //    r = recognizer.Predict(getFace("yalefaces/subject1" + (i + 1) + ".centerlight"));
            //    Console.WriteLine("labels:" + r.Label + " and distance:" + r.Distance);
            //}
            //r = recognizer.Predict(image);
            //Console.WriteLine("labels:" + r.Label + " and distance:" + r.Distance);

            // Console.ReadKey();
        }
示例#4
0
        private void LockScreen_Load(object sender, EventArgs e)
        {
            this.TopMost = true;
            this.BackgroundImageLayout = ImageLayout.Stretch;
            FormBorderStyle            = FormBorderStyle.None;
            WindowState = FormWindowState.Maximized;
            this.Show();
            this.Activate();

            for (int i = 0; i < Properties.Settings.Default.numberOfProfiles; i++)
            {
                FaceRecognizer recogniser = new LBPHFaceRecognizer(1, 8, 8, 8, 100);
                recogniser.Read(Application.StartupPath + $"\\Profiles\\profile{i + 1}.txt");
                recognisers.Add(recogniser);
            }

            //set the lockscreen form to fullscreen on-top of all windows


            //setup the video capture device
            filter = new FilterInfoCollection(FilterCategory.VideoInputDevice);
            foreach (FilterInfo device in filter)
            {
                if (device.Name == Properties.Settings.Default.webcam)
                {
                    cam = new VideoCaptureDevice(device.MonikerString);
                }
            }
            cam.Start();

            cam.NewFrame += Cam_NewFrame;
        }
 DetectService()
 {
     detectFaceTrain    = new CascadeClassifier(haarLikePath);
     detectFaceRollCall = new CascadeClassifier(haarLikePath);
     recognizer         = new LBPHFaceRecognizer(1, 8, 8, 8, 90);
     trainData          = new List <TrainModel>();
 }
        // Trains a recognizer using the labeled images in inputDir and saves the result
        // to outputPath
        static void TrainRecognizer(string inputDir, string outputPath)
        {
            var imageFiles = Directory.EnumerateFiles(inputDir).ToList();
            var images     = new Image <Gray, byte> [imageFiles.Count];
            var labels     = new int[imageFiles.Count];

            // Load each file and it's label
            Console.WriteLine("Loading data...");
            int i = 0;

            foreach (var imageFile in imageFiles)
            {
                var label = Path.GetFileNameWithoutExtension(imageFile).Split('_')[0];
                labels[i] = int.Parse(label);
                images[i] = CvInvoke.Imread(imageFile, Emgu.CV.CvEnum.LoadImageType.Grayscale).ToImage <Gray, byte>();

                i++;
            }
            Console.WriteLine("Loading complete.");

            // Train the recognizer and save the result
            Console.WriteLine("Training...");
            LBPHFaceRecognizer recognizer = new LBPHFaceRecognizer();

            recognizer.Train(images, labels);
            Console.WriteLine("Training done. Saving results...");
            recognizer.Save(outputPath);
        }
示例#7
0
        private void LBPHFaceRecognition(object sender, EventArgs e)
        {
            Frame = _capture.QueryFrame().ToImage <Bgr, byte>();
            var frame = Frame.Resize(frameW, frameH, Inter.Cubic);

            grayFrame = frame.Convert <Gray, Byte>();
            var faces = cascadeClassifier.DetectMultiScale(grayFrame, 1.1, 10, Size.Empty);

            foreach (var f in faces)
            {
                lbphFaceRecognizer = new LBPHFaceRecognizer(1, 8, 8, 8, 400);
                lbphFaceRecognizer.Train(trainingImages.ToArray(), indexLabels.ToArray());

                var result = lbphFaceRecognizer.Predict(frame.Copy(f).Convert <Gray, Byte>().Resize(100, 100, Inter.Cubic));
                if (result.Label == -1)
                {
                    frame.Draw(f, new Bgr(Color.Red), 2);
                    frame.Draw("Unknown", new Point(f.X, f.Y - 10), font, 0.8, new Bgr(Color.Blue), 2, new LineType(), false);
                }
                else
                {
                    frame.Draw(f, new Bgr(Color.Green), 2);
                    frame.Draw(nameLabels[result.Label], new Point(f.X, f.Y - 10), font, 0.8, new Bgr(Color.Blue), 2, new LineType(), false);
                }
                alertMessage.Text = (alert + "เริ่มการ Face Recognition ด้วยวิธีการ " + RecognitionType.Text + " แล้ว \r\n" + "Distance " + result.Distance + "\r\n Faces " + faces.Length.ToString());
            }
            imgFrame.Image = frame.Resize(imgBoxW, imgBoxH, Inter.Cubic);
        }
示例#8
0
 public FaceRecognitionService(String databasePath, String recognizerFilePath)
 {
     _recognizerFilePath = recognizerFilePath;
     _dataStoreAccess    = new DataStoreAccess(databasePath);
     _faceRecognizer     = new EigenFaceRecognizer(80, 1.1);
     _LBPHFaceRecognizer = new LBPHFaceRecognizer(1, 8, 8, 8, 100);
 }
        public void TrainAndPredict()
        {
            using (var image = Image("lenna.png"))
                using (var grayImage = image.CvtColor(ColorConversionCodes.BGR2GRAY))
                    using (var model = LBPHFaceRecognizer.Create())
                        using (var cascade = new CascadeClassifier("_data/text/haarcascade_frontalface_default.xml"))
                        {
                            var rects = cascade.DetectMultiScale(image);

                            model.Train(new[] { grayImage }, new[] { 1 });

                            foreach (Rect rect in rects)
                            {
                                using (Mat face = grayImage[rect].Clone())
                                {
                                    Cv2.Resize(face, face, new Size(256, 256));

                                    model.Predict(face, out int label, out double confidence);

                                    Console.WriteLine($"{label} ({confidence})");
                                    Assert.Equal(1, label);
                                    Assert.NotEqual(0, confidence, 9);
                                    Window.ShowImages(face);
                                }
                            }
                        }
        }
        public ImageService()
        {
            _fisherFaceRecognizer = new FisherFaceRecognizer();
            //_fisherFaceRecognizer = new FisherFaceRecognizer(FaceRecognizerNumberOfComponents, FaceRecognizerThreshold);

            _lbphFaceRecognizer = new LBPHFaceRecognizer();
            //_lbphFaceRecognizer = new LBPHFaceRecognizer(1, 8, 32, 32, 256);

            _cascadeClassifier = new CascadeClassifier(CascadeClassifierConfigPath);

            if (!Directory.Exists(DataFolder))
            {
                Directory.CreateDirectory(DataFolder);
            }

            if (File.Exists(FisherTrainingDataPath))
            {
                _fisherFaceRecognizer.Read(FisherTrainingDataPath);
            }

            if (File.Exists(LBPHTrainingDataPath))
            {
                _lbphFaceRecognizer.Read(LBPHTrainingDataPath);
            }
        }
 private FaceRecognizer LoadFaceRecognizer()
 {
     if (File.Exists(ConfigurationManager.AppSettings["FaceRecognizerData"]))
     {
         FaceRecognizer faceRecognizer = new LBPHFaceRecognizer(1, 8, 8, 8, 100);
         faceRecognizer.Read(ConfigurationManager.AppSettings["FaceRecognizerData"]);
         return(faceRecognizer);
     }
     return(new LBPHFaceRecognizer(1, 8, 8, 8, 100));
 }
示例#12
0
        public Form1( )
        {
            //reader = new AVIReader();
            //reader.Open("123.avi");
            //Capture
            faceRecog = new FaceRecognition();
            InitializeComponent( );
            faceCascade = new CascadeClassifier("haarcascade_frontalface_default.xml");
            //  button2.Enabled = false;
            //   button3.Enabled = false;
            // buttonLearn.Enabled = false;
            LBPHFaceRecognizer recognizer = new LBPHFaceRecognizer(1, 8, 8, 8, 123);


            camera1FpsLabel.Text         = string.Empty;
            videoSourcePlayer1.NewFrame += new VideoSourcePlayer.NewFrameHandler(this.playerControl_NewFrame);


            // show device list
            try
            {
                // enumerate video devices
                videoDevices = new FilterInfoCollection(FilterCategory.VideoInputDevice);

                if (videoDevices.Count == 0)
                {
                    throw new Exception( );
                }

                for (int i = 1, n = videoDevices.Count; i <= n; i++)
                {
                    string cameraName = i + " : " + videoDevices[i - 1].Name;

                    camera1Combo.Items.Add(cameraName);
                }
                //camera1Combo.Items.Add("2 : WebCam GG-321");
                //camera1Combo.Items.Add("3 : WebCam SC-01DCL32219N");
                // check cameras count

                camera1Combo.SelectedIndex = 0;
            }
            catch
            {
                startButton.Enabled = false;

                camera1Combo.Items.Add("No cameras found");


                camera1Combo.SelectedIndex = 0;


                camera1Combo.Enabled = false;
            }
        }
示例#13
0
        private void button2_Click(object sender, EventArgs e)
        {
            if (comboBoxAlgorithm.Text == "EigenFaces")
            {
                try
                {
                    string dataDirectory = Directory.GetCurrentDirectory() + "\\TrainedFaces\\trainedDataEigen.dat";
                    eigenFaceRecognizer = new EigenFaceRecognizer(eigenTrainedImageCounter, 3000);
                    eigenFaceRecognizer.Load(dataDirectory);
                    richTextBox1.Text += "Trained Database Loaded.";
                }
                catch (Exception ex)
                {
                    MessageBox.Show(ex.ToString());
                    MessageBox.Show("Nothing in binary database, please add at least a face(Simply train the prototype with the Add Face Button).", "Triained faces load", MessageBoxButtons.OK, MessageBoxIcon.Exclamation);
                }
            }

            else if (comboBoxAlgorithm.Text == "FisherFaces")
            {
                try
                {
                    string dataDirectory = Directory.GetCurrentDirectory() + "\\TrainedFaces\\trainedDataFisher.dat";

                    fisherFaceRecognizer = new FisherFaceRecognizer(fisherTrainedImageCounter, 3000);
                    fisherFaceRecognizer.Load(dataDirectory);
                    richTextBox1.Text += "Trained Database Loaded.";
                }
                catch (Exception ex)
                {
                    MessageBox.Show(ex.ToString());
                    MessageBox.Show("Nothing in binary database, please add at least a face(Simply train the prototype with the Add Face Button).", "Triained faces load", MessageBoxButtons.OK, MessageBoxIcon.Exclamation);
                }
            }

            else if (comboBoxAlgorithm.Text == "LBPHFaces")
            {
                try
                {
                    string dataDirectory = Directory.GetCurrentDirectory() + "\\TrainedFaces\\trainedDataLBPH.dat";
                    lbphFaceRecognizer = new LBPHFaceRecognizer(1, 8, 8, 8, 400);
                    lbphFaceRecognizer.Load(dataDirectory);
                    richTextBox1.Text += "Trained Database Loaded.";
                }
                catch (Exception ex)
                {
                    MessageBox.Show(ex.ToString());
                    MessageBox.Show("Nothing in binary database, please add at least a face(Simply train the prototype with the Add Face Button).", "Triained faces load", MessageBoxButtons.OK, MessageBoxIcon.Exclamation);
                }
            }
        }
示例#14
0
        //4: train hình //sử dụng các hình ảnh đã lưu từ bước 3
        private bool TrainImagesFromDir()
        {
            int    ImagesCount = 0;
            double Threshold   = 7000;

            TrainedFaces.Clear();
            PersonsLabes.Clear();
            PersonsNames.Clear();
            try
            {
                string   path  = Directory.GetCurrentDirectory() + @"\TrainedImages";
                string[] files = Directory.GetFiles(path, "*.jpg", SearchOption.AllDirectories);

                foreach (var file in files)
                {
                    Image <Gray, byte> trainedImage = new Image <Gray, byte>(file).Resize(200, 200, Inter.Cubic);
                    CvInvoke.EqualizeHist(trainedImage, trainedImage);
                    TrainedFaces.Add(trainedImage);
                    PersonsLabes.Add(ImagesCount);
                    string name = file.Split('\\').Last().Split('_')[0];
                    PersonsNames.Add(name);
                    ImagesCount++;
                    Debug.WriteLine(ImagesCount + ". " + name);
                }

                if (TrainedFaces.Count() > 0)
                {
                    // recognizer = new EigenFaceRecognizer(ImagesCount,Threshold);
                    recognizer = new EigenFaceRecognizer(ImagesCount, Threshold);
                    recognizer.Train(TrainedFaces.ToArray(), PersonsLabes.ToArray());
                    recognizerLBPH = new LBPHFaceRecognizer(ImagesCount, 10, 10, 10, Threshold);
                    recognizerLBPH.Train(TrainedFaces.ToArray(), PersonsLabes.ToArray());
                    isTrained = true;
                    //Debug.WriteLine(ImagesCount);
                    //Debug.WriteLine(isTrained);
                    return(true);
                }
                else
                {
                    isTrained = false;
                    return(false);
                }
            }
            catch (Exception ex)
            {
                isTrained = false;
                MessageBox.Show("Error in Train Images: " + ex.Message);
                return(false);
            }
        }
示例#15
0
        /// <summary>
        /// Face recognition based on Local Binary Pattern Histogram
        /// (LBPH) classifier </summary>
        /// <param name="labels">The set of labels in the training set</param>
        /// <param name="trainingImages">The set of images(faces) in the
        /// training set</param>
        /// <param name="face">The face detected in gray scale
        /// to be recognized. The dimension of the image must be
        /// equal to the dimension of the images in the training set</param>
        /// <returns>A string representing the label of the face recognized
        /// or an empty string if no matches were found</returns>
        public String recognizeLBPHFace(List <String> labels,
                                        List <Image <Gray, Byte> > trainingImages,
                                        Bitmap face)
        {
            String             label         = String.Empty;
            Image <Bgr, Byte>  imageEmgu     = new Image <Bgr, Byte>(face);
            Image <Gray, Byte> extractedFace = imageEmgu.Convert <Gray, Byte>().Copy().Resize(
                100, 100, INTER.CV_INTER_CUBIC);

            extractedFace._EqualizeHist();
            InitParams();

            if (trainingImages.ToArray().Length != 0)
            {
                LBPHFaceRecognizer recognizer = new LBPHFaceRecognizer(
                    1, 8, 8, 8, LBPHTreshold);
                int[] labelsInt = new int[labels.ToArray().Length];
                for (int i = 0; i < labels.ToArray().Length; i++)
                {
                    labelsInt[i] = i;
                }
                recognizer.Train(trainingImages.ToArray(), labelsInt.ToArray());
                LBPHFaceRecognizer.PredictionResult pr;
                pr = recognizer.Predict(extractedFace);
                if (pr.Label != -1)
                {
                    label                   = labels[pr.Label];
                    MostSimilarFace         = trainingImages[pr.Label];
                    MostSimilarFaceIndex    = pr.Label;
                    MostSimilarFaceDistance = (float)pr.Distance;
                    MostSimilarFaceLabel    = labels[pr.Label];
                }
                else
                {
                    recognizer = new LBPHFaceRecognizer(1, 8, 8, 8, 10000);
                    recognizer.Train(trainingImages.ToArray(), labelsInt.ToArray());
                    pr = recognizer.Predict(extractedFace);
                    MostSimilarFace         = trainingImages[pr.Label];
                    MostSimilarFaceIndex    = pr.Label;
                    MostSimilarFaceDistance = (float)pr.Distance;
                    MostSimilarFaceLabel    = labels[pr.Label];
                }
            }
            return(label);
        }
        public bool Train()
        {
            lock (_sync) {
                if (_images.Count <= 1)
                {
                    return(false);
                }
                if (_faceRecognizer != null)
                {
                    _faceRecognizer.Dispose();
                }

                _faceRecognizer = new LBPHFaceRecognizer(1, 8, 8, 8, 100);
                _faceRecognizer.Train(_images.ToArray(), _images.Select((c, i) => i).ToArray());
                _shouldTrain = false;
                return(true);
            }
        }
        public void TrainFaceRecognizer()
        {
            if (PeopleData.Face.Count() == 0)
            {
                return;
            }

            Image <Gray, Byte>[] trainFaces = new Image <Gray, Byte> [PeopleData.Face.Count()];

            for (int i = 0; i < PeopleData.Face.Count(); i++)
            {
                trainFaces[i] = new Image <Gray, byte>(Environment.CurrentDirectory + PeopleData.Face[i]);
            }
            FaceRecognizer = new LBPHFaceRecognizer(1, 8, 8, 8, 100);
            FaceRecognizer.Train(trainFaces, PeopleData.NameId.ToArray());

            SaveFaceRecognizer();
        }
示例#18
0
        public MainWindow()
        {
            InitializeComponent();

            // Initialize detector and recognizer
            detector   = new CascadeClassifier(cascadePath);
            recognizer = new LBPHFaceRecognizer();
            recognizer.Load(trainedData);

            // Load the labels
            names = new Dictionary <int, string>();
            var labels = System.IO.File.ReadAllLines(labelPath);

            foreach (var line in labels)
            {
                var content = line.Split(',');
                names.Add(int.Parse(content[0]), content[1]);
            }
        }
示例#19
0
        public IIdentifier GetIdentity(IFaceImage img)
        {
            IIdentifier answ   = new Identifier(int.MinValue);
            var         labels = _labelRepo.GetAll();

            foreach (var label in labels)
            {
                IEnumerable <IFaceImage>   batch     = label.Images;
                List <Image <Gray, Byte> > compBatch = new List <Image <Gray, Byte> >();
                List <int> trainingLabels            = new List <int>();

                int enumerator = 0;
                foreach (var current in batch)
                {
                    compBatch.Add(new Image <Gray, Byte>(current.ImageOfFace));
                    trainingLabels.Add(enumerator++);
                }

                FaceRecognizer recognizer = new LBPHFaceRecognizer(_recognizerRadius, _recognizerNeighbors,
                                                                   _recognizerGridX, _recognizerGridY, _recognizerThreshold);

                recognizer.Train(compBatch.ToArray(), trainingLabels.ToArray());

                PredictionResult result = recognizer.Predict(new Image <Gray, Byte>(img.ImageOfFace));
                if (result.Distance < _minDistanse)
                {
                    _minDistanse = result.Distance;
                    answ         = label.Id;
                }
            }
            if (_minDistanse < _requiredDistance)
            {
                return(answ);
            }
            else
            {
                return(new Identifier(-1));
            }
        }
示例#20
0
        public Form1()
        {
            InitializeComponent();

            faceRecognition = new LBPHFaceRecognizer(2, 10, 8, 8, 85);

            if (File.Exists(YMLPath))
            {
                faceRecognition.Read(YMLPath);
            }
            else
            {
                PredictButton.Enabled = false;
            }


            faceDetection = new CascadeClassifier(Path.GetFullPath(@"../../data/haarcascades/haarcascade_frontalface_default.xml"));
            eyeDetection  = new CascadeClassifier(Path.GetFullPath(@"../../data/haarcascades/haarcascade_eye.xml"));
            frame         = new Mat();
            Faces         = new List <Image <Gray, byte> >();
            IDs           = new List <int>();
            BeginWebCam();
        }
 public FaceRecognition()
 {
     faceCascade = new CascadeClassifier("haarcascade_frontalface_default.xml");
     recognizer  = new LBPHFaceRecognizer(1, 8, 8, 8, 130);
 }
        /***
         *  Function: private void TrainMachine(FaceIdentity faceIdentity, String name)
         *  Parameter(s): FaceIdentity faceIdentity
         *      Privilege of the face that is being trained to store in the ASSET_INDEX.dat file.
         *                String name
         *      The name of the individual that is being trained; currently, it is not used, but it exists so that in the next version,
         *      the machine can be more customized.
         *  Return Value: void
         ***/
        private void TrainMachine(FaceIdentity faceIdentity, String name)
        {
            // Notify the user that training has begun.
            MessageBox.Show(trainingBeginning, this.Title, MessageBoxButton.OK);

            String[]   fileList = Directory.GetFiles(FileUtilities.TrainingDirectoryName);
            List <Mat> matList  = new List <Mat>();

            foreach (String file in fileList)
            {
                matList.Add(new Mat(file, Emgu.CV.CvEnum.LoadImageType.Unchanged));
            }
            List <Image <Gray, Byte> > list = new List <Image <Gray, Byte> >();

            // Detect each face in each image.
            foreach (Mat mat in matList)
            {
                Image <Gray, Byte> image    = mat.ToImage <Gray, Byte>().Resize(1 / (double)scale_factor, Emgu.CV.CvEnum.Inter.Cubic);
                Rectangle[]        faceList = faceClassifier.DetectMultiScale(image);
                foreach (Rectangle rect in faceList)
                {
                    list.Add(image.Copy(rect).Convert <Gray, Byte>());
                }
            }

            // Make sure that there is at least one face to train.
            if (list.Count() == 0)
            {
                PanicAndTerminateProgram();
            }

            // If a height exists in the CORE_IMAGE_DATA.dat file, resize to that, useful for future training.
            int height = facialRecognitionHeight == 0 ? list[0].Height * scale_factor : facialRecognitionHeight;
            int width  = facialRecognitionWidth == 0 ? list[0].Width * scale_factor : facialRecognitionWidth;

            if (facialRecognitionHeight == 0 || facialRecognitionWidth == 0)
            {
                List <String> lines = new List <String>();
                lines.Add(height + "|" + width);
                File.WriteAllLines(FileUtilities.DirectoryName + "\\" + FileUtilities.CoreImageData, lines.ToArray());
            }
            List <Image <Gray, Byte> > listFinal = new List <Image <Gray, Byte> >();

            foreach (Image <Gray, Byte> image in list)
            {
                listFinal.Add(image.Resize(width, height, Emgu.CV.CvEnum.Inter.Cubic));
            }
            List <int> labelList = new List <int>();
            int        integer   = 0;
            String     prefix    = "";
            String     ident     = "";

            if (faceIdentity == FaceIdentity.FaceAdmin)
            {
                integer = 0;
                prefix  = FileUtilities.AdminTrainedPrefix;
                ident   = adminIdentifier;
            }
            else if (faceIdentity == FaceIdentity.FaceAsset)
            {
                integer = 2;
                prefix  = FileUtilities.AssetTrainedPrefix;
                ident   = auxAdminIdentifier;
            }
            else if (faceIdentity == FaceIdentity.FaceAuxAdmin)
            {
                integer = 1;
                prefix  = FileUtilities.AuxAdminTrainedPrefix;
                ident   = assetIdentifier;
            }
            else
            {
                PanicAndTerminateProgram();
            }
            for (int i = 0; i < list.Count(); i++)
            {
                labelList.Add(integer);
            }

            // Train the machine and write its trained state to a file.
            LBPHFaceRecognizer lbphFaceRecognizer = new LBPHFaceRecognizer();

            lbphFaceRecognizer.Train <Gray, Byte>(listFinal.ToArray(), labelList.ToArray());
            Directory.Delete(FileUtilities.TrainingDirectoryName, true);
            String temp  = categories[integer];
            String fname = FileUtilities.DirectoryName + "\\" + prefix + temp.ToUpper().Replace(' ', '_') + FileUtilities.FileExtension;

            lbphFaceRecognizer.Save(fname);

            // Write everything to the ASSET_INDEX.dat file.
            FileUtilities.TrainingDirectoryDeletion();
            List <String> aboutTraining = new List <String>();

            aboutTraining.Add(name + "^" + ident + "^" + fname);
            File.AppendAllLines(FileUtilities.DirectoryName + "\\" + FileUtilities.AssetIndexData, aboutTraining.ToArray());

            // Notify the used that training has ended.
            MessageBox.Show(trainingEnded, this.Title, MessageBoxButton.OK);
        }
示例#23
0
        static LoadData()
        {
            try
            {
                Console.WriteLine("====================================LOAD DATA========================================");

                myconnection.ConnectionString = Properties.Settings.Default.ConnectionString;
                myconnection.Open();
                string           query = "select employee.userid,employeename,image from employee,image where image.userid=employee.userid";
                OracleCommand    cmd   = new OracleCommand(query, myconnection);
                OracleDataReader dr    = cmd.ExecuteReader();

                while (dr.Read())
                {
                    Labels_ID.Add(Convert.ToInt32(dr["userid"].ToString()));
                    Console.WriteLine("id: " + Convert.ToInt32(dr["userid"].ToString()));
                    Labels_Name.Add(dr["employeename"].ToString());
                    Console.WriteLine("name: " + dr["employeename"].ToString());

                    string     img_name = "image.bmp";
                    FileStream FS       = new FileStream(temp_images + img_name, FileMode.Create);
                    byte[]     blob     = (byte[])dr["IMAGE"];
                    //byte[] blob = Convert.FromBase64String(dr["IMAGE"].ToString()); //for clob
                    FS.Write(blob, 0, blob.Length);
                    FS.Close();
                    FS = null;

                    Bitmap masterImage;
                    using (var fs = new System.IO.FileStream(temp_images + img_name, System.IO.FileMode.Open))
                    {
                        masterImage = new Bitmap(fs);
                    }

                    // Normalizing it to grayscale
                    Image <Gray, Byte> normalizedMasterImage = new Image <Gray, Byte>(masterImage);
                    normalizedMasterImage = normalizedMasterImage.Resize(320, 244, Emgu.CV.CvEnum.Inter.Cubic);
                    _traineImages.Add(normalizedMasterImage);

                    //---------------------delete file after being added----------------------------
                    var filePath = temp_images + img_name;
                    if (File.Exists(filePath))
                    {
                        File.Delete(filePath);
                    }
                }
                myconnection.Close();
                Console.WriteLine("====================================LOAD DATA END========================================");


                //converting data
                faceImages = new Mat[_traineImages.Count];
                for (int i = 0; i < _traineImages.Count; i++)
                {
                    faceImages[i] = _traineImages[i].Mat;
                }

                //display faceImages somewhere


                Console.WriteLine("==============================Training model=======================================");

                recog = new LBPHFaceRecognizer(1, 8, 8, 8, 100);

                //Training model
                if (_traineImages.ToArray().Length != 0)
                {
                    recog.Train(faceImages, Labels_ID.ToArray());
                }

                Console.WriteLine("==============================Training model end=======================================");
            }
            catch (Exception e)
            {
                MessageBox.Show(e.ToString());
                Console.Write(e.ToString());
                //MessageBox.Show("Nothing in binary database, please add at least a face(Simply train the prototype with the Add Face Button).", "Triained faces load", MessageBoxButtons.OK, MessageBoxIcon.Exclamation);
            }
        }
示例#24
0
        private void button1_Click(object sender, EventArgs e)
        {
            if (comboBoxAlgorithm.Text == "EigenFaces")
            {
                try
                {
                    string dataDirectory = Directory.GetCurrentDirectory() + "\\TrainedFaces";

                    string[] files = Directory.GetFiles(dataDirectory, "*.jpg", SearchOption.AllDirectories);
                    eigenTrainedImageCounter = 0;
                    foreach (var file in files)
                    {
                        Image <Bgr, Byte> TrainedImage = new Image <Bgr, Byte>(file);
                        if (eqHisChecked.Checked == true)
                        {
                            TrainedImage._EqualizeHist();
                        }
                        eigenTrainingImages.Add(TrainedImage.Convert <Gray, Byte>());
                        eigenlabels.Add(fileName(file));
                        eigenIntlabels.Add(eigenTrainedImageCounter);
                        eigenTrainedImageCounter++;
                        richTextBox1.Text += fileName(file) + "\n";
                    }

                    /*
                     *  //TermCriteria for face recognition with numbers of trained images like maxIteration
                     *  MCvTermCriteria termCrit = new MCvTermCriteria(eigenTrainedImageCounter, 0.001);
                     *
                     *  //Eigen face recognizer
                     *  eigenObjRecognizer=new EigenObjectRecognizer(
                     *    eigenTrainingImages.ToArray(),
                     *    eigenlabels.ToArray(),
                     *    3000,
                     *    ref termCrit);
                     */
                    eigenFaceRecognizer = new EigenFaceRecognizer(eigenTrainedImageCounter, 2000);
                    eigenFaceRecognizer.Train(eigenTrainingImages.ToArray(), eigenIntlabels.ToArray());
                    //eigenFaceRecognizer.Save(dataDirectory + "\\trainedDataEigen.dat");
                }
                catch (Exception ex)
                {
                    MessageBox.Show(ex.ToString());
                    MessageBox.Show("Nothing in binary database, please add at least a face(Simply train the prototype with the Add Face Button).", "Triained faces load", MessageBoxButtons.OK, MessageBoxIcon.Exclamation);
                }
            }

            else if (comboBoxAlgorithm.Text == "FisherFaces")
            {
                try
                {
                    string dataDirectory = Directory.GetCurrentDirectory() + "\\TrainedFaces";

                    string[] files = Directory.GetFiles(dataDirectory, "*.jpg", SearchOption.AllDirectories);
                    fisherTrainedImageCounter = 0;
                    foreach (var file in files)
                    {
                        Image <Bgr, Byte> TrainedImage = new Image <Bgr, Byte>(file);
                        fisherTrainingImages.Add(TrainedImage.Convert <Gray, Byte>());
                        if (eqHisChecked.Checked == true)
                        {
                            TrainedImage._EqualizeHist();
                        }
                        fisherlabels.Add(fileName(file));
                        fisherIntlabels.Add(fisherTrainedImageCounter);
                        fisherTrainedImageCounter++;
                        richTextBox1.Text += fileName(file) + "\n";
                    }
                    fisherFaceRecognizer = new FisherFaceRecognizer(fisherTrainedImageCounter, 2000);
                    fisherFaceRecognizer.Train(fisherTrainingImages.ToArray(), fisherIntlabels.ToArray());
                    //fisherFaceRecognizer.Save(dataDirectory + "\\trainedDataFisher.dat");
                }
                catch (Exception ex)
                {
                    MessageBox.Show(ex.ToString());
                    MessageBox.Show("Nothing in binary database, please add at least a face(Simply train the prototype with the Add Face Button).", "Triained faces load", MessageBoxButtons.OK, MessageBoxIcon.Exclamation);
                }
            }

            else if (comboBoxAlgorithm.Text == "LBPHFaces")
            {
                try
                {
                    string dataDirectory = Directory.GetCurrentDirectory() + "\\TrainedFaces";

                    string[] files = Directory.GetFiles(dataDirectory, "*.jpg", SearchOption.AllDirectories);
                    lbphTrainedImageCounter = 0;
                    foreach (var file in files)
                    {
                        Image <Bgr, Byte> TrainedImage = new Image <Bgr, Byte>(file);
                        if (eqHisChecked.Checked == true)
                        {
                            TrainedImage._EqualizeHist();
                        }
                        lbphTrainingImages.Add(TrainedImage.Convert <Gray, Byte>());
                        lbphlabels.Add(fileName(file));
                        lbphIntlabels.Add(lbphTrainedImageCounter);
                        lbphTrainedImageCounter++;
                        richTextBox1.Text += fileName(file) + "\n";
                    }
                    lbphFaceRecognizer = new LBPHFaceRecognizer(1, 8, 8, 8, 400);
                    lbphFaceRecognizer.Train(lbphTrainingImages.ToArray(), lbphIntlabels.ToArray());
                    lbphFaceRecognizer.Save(dataDirectory + "\\trainedDataLBPH.dat");
                }
                catch (Exception ex)
                {
                    MessageBox.Show(ex.ToString());
                    MessageBox.Show("Nothing in binary database, please add at least a face(Simply train the prototype with the Add Face Button).", "Triained faces load", MessageBoxButtons.OK, MessageBoxIcon.Exclamation);
                }
            }
        }
        /***
         *  Function: public MainWindow(bool train, FaceIdentity fit, String nt, int cn, int fps, bool crd, int mdt, MachineInputSourceKind misk)
         *  Parameter(s): bool train
         *      Whether or not the machine is supposed to be training itself.
         *                FaceIdentity fit
         *      The classification of the person that the Machine is training itself to recognize (supposed to be equal to FaceIdentity.FaceNone during
         *      normal execution).
         *                String nt
         *      The name of the person that the Machine is training itself to recognize (supposed to be equal to "" during normal execution).
         *                int cn
         *      The camera number to used by the 'capture' variable.
         *                int fps
         *      The frame rate of the camera (not used when processing video files).
         *                bool crd
         *      Whether or not the Machine should dispose of resources (could potentially be unstable).
         *                int mdt
         *      The maxDistanceThreshold to use when classifying faces.
         *                 MachineInputSourceKind misk
         *      The input source from which the Machine will gather its input.
         *                 String t
         *      The title of this window.
         *                  MachineStartPage msp
         *      A reference to the main machine window to control.
         *  Return Value: N/A (Constructor)
         ***/
        public MainWindow(bool train, FaceIdentity fit, String nt, int cn, int fps, bool crd, int mdt,
                          MachineInputSourceKind misk, String t, MachineStartPage msp, UIKind uk)
        {
            machineStartPage       = msp;
            cameraResourceDisposal = crd;
            maxDistanceThreshold   = mdt;
            cameraNumber           = cn;
            cameraFrameRate        = fps;
            machineInputSourceKind = misk;
            if (misk == MachineInputSourceKind.SourceFile || misk == MachineInputSourceKind.SourceNone || train)
            {
                speechRecognitionOnline = true;
            }
            else
            {
                speechRecognitionOnline = true;
            }
            userSpeech = "";
            FileUtilities.DirectoryCreation();
            FileUtilities.TrainingDirectoryCreation();
            faceClassifier = new CascadeClassifier("assets\\haarcascade_frontalface_alt.xml");
            uikind         = uk;
            if (uikind == UIKind.UIMachine)
            {
                admin_focus     = new Bitmap("assets\\machine\\admin_focus.jpg");
                secondary_focus = new Bitmap("assets\\machine\\secondary_focus.jpg");
                threat_focus    = new Bitmap("assets\\machine\\threat_focus.jpg");
            }
            else
            {
                admin_focus     = new Bitmap("assets\\samaritan\\deviant_focus.jpg");
                secondary_focus = new Bitmap("assets\\samaritan\\irrelevant_focus.jpg");
                threat_focus    = new Bitmap("assets\\samaritan\\threat_focus.jpg");
            }
            machineIsTraining = train;
            //Check if the CORE_IMAGE_DATA.dat file exists and read the predefined width and height.
            if (File.Exists(FileUtilities.DirectoryName + "\\" + FileUtilities.CoreImageData))
            {
                String[] lines = File.ReadAllLines(FileUtilities.DirectoryName + "\\" + FileUtilities.CoreImageData);
                if (lines.Length != 0)
                {
                    String[] heightWidth = lines[0].Split('|');
                    facialRecognitionHeight = Int32.Parse(heightWidth[0]);
                    facialRecognitionWidth  = Int32.Parse(heightWidth[1]);
                }
            }
            FileUtilities.CoreImageDataCreation();
            // Check if the Machine is being trained or not.
            if (!train && fit == FaceIdentity.FaceNone && nt == "")
            {
                String[]                  lines2           = File.ReadAllLines(FileUtilities.DirectoryName + "\\" + FileUtilities.AssetIndexData);
                List <FaceIdentity>       faceIdentityList = new List <FaceIdentity>();
                List <String>             nList            = new List <String>();
                List <LBPHFaceRecognizer> lbphList         = new List <LBPHFaceRecognizer>();
                // Load the trained neural networks, list of names, and classifications.
                foreach (String line in lines2)
                {
                    String[] innerSplit = line.Split('^');
                    String   name       = innerSplit[0];
                    String   identifier = innerSplit[1];
                    String   file       = innerSplit[2];
                    if (identifier == adminIdentifier)
                    {
                        faceIdentityList.Add(FaceIdentity.FaceAdmin);
                    }
                    else if (identifier == auxAdminIdentifier)
                    {
                        faceIdentityList.Add(FaceIdentity.FaceAuxAdmin);
                    }
                    else if (identifier == assetIdentifier)
                    {
                        faceIdentityList.Add(FaceIdentity.FaceAsset);
                    }
                    else
                    {
                        PanicAndTerminateProgram();
                    }
                    nList.Add(name);
                    LBPHFaceRecognizer lbph = new LBPHFaceRecognizer();
                    lbph.Load(file);
                    lbphList.Add(lbph);
                }
                focusKindDatabase      = faceIdentityList.ToArray();
                nameDatabase           = nList.ToArray();
                lbphFaceRecognizerList = lbphList.ToArray();
                // Check to make sure the ANN, name, and classification database lengths are all equal.
                if (focusKindDatabase.Length == nameDatabase.Length && nameDatabase.Length == lbphFaceRecognizerList.Length)
                {
                    ;
                }
                else
                {
                    PanicAndTerminateProgram();
                }
            }
            faceIdentityTraining = fit;
            nameTraining         = nt;
            InitializeComponent();
            this.Title = t;

            Closed += (object sender, EventArgs args) =>
            {
                Environment.Exit(1);
            };
        }
        public void CreateAndDispose()
        {
            var recognizer = LBPHFaceRecognizer.Create(1, 8, 8, 8, 123);

            recognizer.Dispose();
        }
示例#27
0
        private bool Train(string folder)
        {
            string facesPath = Path.Combine(folder, "faces.xml");

            if (!File.Exists(facesPath))
            {
                return(false);
            }

            try
            {
                names.Clear();
                faces.Clear();
                List <int> tmp       = new List <int>();
                FileStream facesInfo = File.OpenRead(facesPath);

                using (XmlReader reader = XmlTextReader.Create(facesInfo))
                {
                    while (reader.Read())
                    {
                        if (reader.IsStartElement())
                        {
                            switch (reader.Name)
                            {
                            case "name":
                                if (reader.Read())
                                {
                                    tmp.Add(names.Count);
                                    names.Add(reader.Value.Trim());
                                }
                                break;

                            case "file":
                                if (reader.Read())
                                {
                                    faces.Add(new Image <Gray, byte>(Path.Combine(Application.StartupPath,
                                                                                  "TrainedFaces",
                                                                                  reader.Value.Trim())));
                                }
                                break;
                            }
                        }
                    }
                }

                facesInfo.Close();

                if (faces.Count == 0)
                {
                    return(false);
                }

                recognizer = new LBPHFaceRecognizer(1, 8, 8, 8, 100);
                recognizer.Train(faces.ToArray(), tmp.ToArray());

                return(true);
            }
            catch
            {
                return(false);
            }
        }