public void TrainImages()
        {
            string path = Application.StartupPath + @"/../../Images/";

            string[] files = Directory.GetFiles(path, "*.jpg", SearchOption.AllDirectories);
            //   int[] labelsDb = _context.Labels.Select(_ => _.LabelNumber).ToArray();
            List <int> labelsDb = new List <int>();

            Mat[] matImages = new Mat[files.Length];



            for (int i = 0; i < files.Length; i++)
            {
                matImages[i] = new Image <Gray, byte>(files[i]).Mat;
                string[] strings = files[i].Split('-');
                string   number  = strings[strings.Length - 1].Split('.')[0];
                labelsDb.Add(int.Parse(number));
            }


            VectorOfMat images = new VectorOfMat(matImages);
            VectorOfInt labels = new VectorOfInt(labelsDb.ToArray());

            faceRecognizer.Train(images, labels);
            faceRecognizer.Write(Application.StartupPath + @"/../../Images/eigenRecognizer.yml");
            fisherRecognizer.Train(images, labels);
            fisherRecognizer.Write(Application.StartupPath + @"/../../Images/fisherRecognizer.yml");
            LBPHFaceRecognizer.Train(images, labels);
            LBPHFaceRecognizer.Write(Application.StartupPath + @"/../../Images/lpbhRecognizer.yml");
            isTrained = true;
        }
 public void train()
 {
     if (training_images.Count != 0)
     {
         face_recognizer.Train <Gray, byte>(training_images.ToArray(), training_int_labels.ToArray());
     }
 }
示例#3
0
        private void LBPHFaceRecognition(object sender, EventArgs e)
        {
            Frame = _capture.QueryFrame().ToImage <Bgr, byte>();
            var frame = Frame.Resize(frameW, frameH, Inter.Cubic);

            grayFrame = frame.Convert <Gray, Byte>();
            var faces = cascadeClassifier.DetectMultiScale(grayFrame, 1.1, 10, Size.Empty);

            foreach (var f in faces)
            {
                lbphFaceRecognizer = new LBPHFaceRecognizer(1, 8, 8, 8, 400);
                lbphFaceRecognizer.Train(trainingImages.ToArray(), indexLabels.ToArray());

                var result = lbphFaceRecognizer.Predict(frame.Copy(f).Convert <Gray, Byte>().Resize(100, 100, Inter.Cubic));
                if (result.Label == -1)
                {
                    frame.Draw(f, new Bgr(Color.Red), 2);
                    frame.Draw("Unknown", new Point(f.X, f.Y - 10), font, 0.8, new Bgr(Color.Blue), 2, new LineType(), false);
                }
                else
                {
                    frame.Draw(f, new Bgr(Color.Green), 2);
                    frame.Draw(nameLabels[result.Label], new Point(f.X, f.Y - 10), font, 0.8, new Bgr(Color.Blue), 2, new LineType(), false);
                }
                alertMessage.Text = (alert + "เริ่มการ Face Recognition ด้วยวิธีการ " + RecognitionType.Text + " แล้ว \r\n" + "Distance " + result.Distance + "\r\n Faces " + faces.Length.ToString());
            }
            imgFrame.Image = frame.Resize(imgBoxW, imgBoxH, Inter.Cubic);
        }
示例#4
0
        public bool TrainRecognizer()
        {
            var allFaces = _dataStoreAccess.CallFaces("ALL_USERS");

            if (allFaces != null)
            {
                var faceImages = new Image <Gray, byte> [allFaces.Count];
                var faceLabels = new int[allFaces.Count];
                for (int i = 0; i < allFaces.Count; i++)
                {
                    Stream stream = new MemoryStream();
                    stream.Write(allFaces[i].Image, 0, allFaces[i].Image.Length);
                    var faceImage = new Image <Gray, byte>(new Bitmap(stream));
                    faceImages[i] = faceImage.Resize(200, 200, Inter.Cubic);
                    faceLabels[i] = allFaces[i].UserId;
                }

                //LBPHFaceRecognizer--------------------------
                _LBPHFaceRecognizer.Train(faceImages, faceLabels);
                _LBPHFaceRecognizer.Save(_recognizerFilePath);

                ////EigenFaceRecognizer-------------------------
                //_faceRecognizer.Train(faceImages, faceLabels);
                //_faceRecognizer.Save(_recognizerFilePath);
            }
            return(true);
        }
        // Trains a recognizer using the labeled images in inputDir and saves the result
        // to outputPath
        static void TrainRecognizer(string inputDir, string outputPath)
        {
            var imageFiles = Directory.EnumerateFiles(inputDir).ToList();
            var images     = new Image <Gray, byte> [imageFiles.Count];
            var labels     = new int[imageFiles.Count];

            // Load each file and it's label
            Console.WriteLine("Loading data...");
            int i = 0;

            foreach (var imageFile in imageFiles)
            {
                var label = Path.GetFileNameWithoutExtension(imageFile).Split('_')[0];
                labels[i] = int.Parse(label);
                images[i] = CvInvoke.Imread(imageFile, Emgu.CV.CvEnum.LoadImageType.Grayscale).ToImage <Gray, byte>();

                i++;
            }
            Console.WriteLine("Loading complete.");

            // Train the recognizer and save the result
            Console.WriteLine("Training...");
            LBPHFaceRecognizer recognizer = new LBPHFaceRecognizer();

            recognizer.Train(images, labels);
            Console.WriteLine("Training done. Saving results...");
            recognizer.Save(outputPath);
        }
        public void FaceLearn()
        {
            recognizer = new LBPHFaceRecognizer(1, 8, 8, 8, 80);
            Image <Gray, Byte>[] images = new Image <Gray, Byte> [30];
            Image <Gray, Byte>   image;

            // image = getFace("123.jpg");
            int[] labels = new int[30];
            open_image(ref images, ref labels);
            for (int i = 25; i < 30; i++)
            {
                images[i] = getFace((i - 24) + ".jpg");
                labels[i] = 6;
            }
            recognizer.Train <Gray, Byte>(images, labels);
            Console.WriteLine("Training iss over");

            FaceRecognizer.PredictionResult r;
            Console.WriteLine("!");
            //for (int i = 0; i < 5; i++)
            //{
            //    r = recognizer.Predict(getFace("yalefaces/subject1" + (i + 1) + ".centerlight"));
            //    Console.WriteLine("labels:" + r.Label + " and distance:" + r.Distance);
            //}
            //r = recognizer.Predict(image);
            //Console.WriteLine("labels:" + r.Label + " and distance:" + r.Distance);

            // Console.ReadKey();
        }
示例#7
0
        //4: train hình //sử dụng các hình ảnh đã lưu từ bước 3
        private bool TrainImagesFromDir()
        {
            int    ImagesCount = 0;
            double Threshold   = 7000;

            TrainedFaces.Clear();
            PersonsLabes.Clear();
            PersonsNames.Clear();
            try
            {
                string   path  = Directory.GetCurrentDirectory() + @"\TrainedImages";
                string[] files = Directory.GetFiles(path, "*.jpg", SearchOption.AllDirectories);

                foreach (var file in files)
                {
                    Image <Gray, byte> trainedImage = new Image <Gray, byte>(file).Resize(200, 200, Inter.Cubic);
                    CvInvoke.EqualizeHist(trainedImage, trainedImage);
                    TrainedFaces.Add(trainedImage);
                    PersonsLabes.Add(ImagesCount);
                    string name = file.Split('\\').Last().Split('_')[0];
                    PersonsNames.Add(name);
                    ImagesCount++;
                    Debug.WriteLine(ImagesCount + ". " + name);
                }

                if (TrainedFaces.Count() > 0)
                {
                    // recognizer = new EigenFaceRecognizer(ImagesCount,Threshold);
                    recognizer = new EigenFaceRecognizer(ImagesCount, Threshold);
                    recognizer.Train(TrainedFaces.ToArray(), PersonsLabes.ToArray());
                    recognizerLBPH = new LBPHFaceRecognizer(ImagesCount, 10, 10, 10, Threshold);
                    recognizerLBPH.Train(TrainedFaces.ToArray(), PersonsLabes.ToArray());
                    isTrained = true;
                    //Debug.WriteLine(ImagesCount);
                    //Debug.WriteLine(isTrained);
                    return(true);
                }
                else
                {
                    isTrained = false;
                    return(false);
                }
            }
            catch (Exception ex)
            {
                isTrained = false;
                MessageBox.Show("Error in Train Images: " + ex.Message);
                return(false);
            }
        }
示例#8
0
        /// <summary>
        /// Face recognition based on Local Binary Pattern Histogram
        /// (LBPH) classifier </summary>
        /// <param name="labels">The set of labels in the training set</param>
        /// <param name="trainingImages">The set of images(faces) in the
        /// training set</param>
        /// <param name="face">The face detected in gray scale
        /// to be recognized. The dimension of the image must be
        /// equal to the dimension of the images in the training set</param>
        /// <returns>A string representing the label of the face recognized
        /// or an empty string if no matches were found</returns>
        public String recognizeLBPHFace(List <String> labels,
                                        List <Image <Gray, Byte> > trainingImages,
                                        Bitmap face)
        {
            String             label         = String.Empty;
            Image <Bgr, Byte>  imageEmgu     = new Image <Bgr, Byte>(face);
            Image <Gray, Byte> extractedFace = imageEmgu.Convert <Gray, Byte>().Copy().Resize(
                100, 100, INTER.CV_INTER_CUBIC);

            extractedFace._EqualizeHist();
            InitParams();

            if (trainingImages.ToArray().Length != 0)
            {
                LBPHFaceRecognizer recognizer = new LBPHFaceRecognizer(
                    1, 8, 8, 8, LBPHTreshold);
                int[] labelsInt = new int[labels.ToArray().Length];
                for (int i = 0; i < labels.ToArray().Length; i++)
                {
                    labelsInt[i] = i;
                }
                recognizer.Train(trainingImages.ToArray(), labelsInt.ToArray());
                LBPHFaceRecognizer.PredictionResult pr;
                pr = recognizer.Predict(extractedFace);
                if (pr.Label != -1)
                {
                    label                   = labels[pr.Label];
                    MostSimilarFace         = trainingImages[pr.Label];
                    MostSimilarFaceIndex    = pr.Label;
                    MostSimilarFaceDistance = (float)pr.Distance;
                    MostSimilarFaceLabel    = labels[pr.Label];
                }
                else
                {
                    recognizer = new LBPHFaceRecognizer(1, 8, 8, 8, 10000);
                    recognizer.Train(trainingImages.ToArray(), labelsInt.ToArray());
                    pr = recognizer.Predict(extractedFace);
                    MostSimilarFace         = trainingImages[pr.Label];
                    MostSimilarFaceIndex    = pr.Label;
                    MostSimilarFaceDistance = (float)pr.Distance;
                    MostSimilarFaceLabel    = labels[pr.Label];
                }
            }
            return(label);
        }
        public void TrainFaceRecognizer()
        {
            if (PeopleData.Face.Count() == 0)
            {
                return;
            }

            Image <Gray, Byte>[] trainFaces = new Image <Gray, Byte> [PeopleData.Face.Count()];

            for (int i = 0; i < PeopleData.Face.Count(); i++)
            {
                trainFaces[i] = new Image <Gray, byte>(Environment.CurrentDirectory + PeopleData.Face[i]);
            }
            FaceRecognizer = new LBPHFaceRecognizer(1, 8, 8, 8, 100);
            FaceRecognizer.Train(trainFaces, PeopleData.NameId.ToArray());

            SaveFaceRecognizer();
        }
        private void Train()
        {
            var inputImages = new List <Image <Gray, byte> >();
            var labelsList  = new List <int>();

            foreach (var photo in PhotoContext.Photos.Where(x => x.Id != 1).ToList())
            {
                MemoryStream       str = new MemoryStream(photo.Face.Image);
                Bitmap             b   = new Bitmap(Image.FromStream(str));
                Image <Gray, byte> img = new Image <Gray, byte>(b);
                img._EqualizeHist();
                inputImages.Add(img);
                labelsList.Add(photo.OwnerId);
            }
            DataPath = Server.MapPath("~/App_Data/Faces");
            FaceRecognizer.Train(inputImages.ToArray(), labelsList.ToArray());
            FaceRecognizer.Save(DataPath);
        }
        public bool Train()
        {
            lock (_sync) {
                if (_images.Count <= 1)
                {
                    return(false);
                }
                if (_faceRecognizer != null)
                {
                    _faceRecognizer.Dispose();
                }

                _faceRecognizer = new LBPHFaceRecognizer(1, 8, 8, 8, 100);
                _faceRecognizer.Train(_images.ToArray(), _images.Select((c, i) => i).ToArray());
                _shouldTrain = false;
                return(true);
            }
        }
示例#12
0
        void loadPics()
        {
            var me    = getAllImages(ProcessDirectory(@"C:\Users\Chris\Desktop\test3\crops\chris"));
            var des   = getAllImages(ProcessDirectory(@"C:\Users\Chris\Desktop\test3\crops\destinee"));
            var liam  = getAllImages(ProcessDirectory(@"C:\Users\Chris\Desktop\test3\crops\liam"));
            var logan = getAllImages(ProcessDirectory(@"C:\Users\Chris\Desktop\test3\crops\logan"));

            var trainingData   = new List <Image <Gray, Byte> >();
            var trainingLabels = new List <int>();

            AddToTrainingData(ref trainingData, ref trainingLabels, me, 1);
            AddToTrainingData(ref trainingData, ref trainingLabels, des, 2);
            AddToTrainingData(ref trainingData, ref trainingLabels, liam, 3);
            AddToTrainingData(ref trainingData, ref trainingLabels, logan, 4);


            // faceRecognizerEigen.Train<Gray, Byte>(trainingData.ToArray(), trainingLabels.ToArray());
            faceRecognizerLBPH.Train <Gray, Byte>(trainingData.ToArray(), trainingLabels.ToArray());
            //  faceRecognizerFisher.Train<Gray, Byte>(trainingData.ToArray(), trainingLabels.ToArray());
        }
示例#13
0
        public IIdentifier GetIdentity(IFaceImage img)
        {
            IIdentifier answ   = new Identifier(int.MinValue);
            var         labels = _labelRepo.GetAll();

            foreach (var label in labels)
            {
                IEnumerable <IFaceImage>   batch     = label.Images;
                List <Image <Gray, Byte> > compBatch = new List <Image <Gray, Byte> >();
                List <int> trainingLabels            = new List <int>();

                int enumerator = 0;
                foreach (var current in batch)
                {
                    compBatch.Add(new Image <Gray, Byte>(current.ImageOfFace));
                    trainingLabels.Add(enumerator++);
                }

                FaceRecognizer recognizer = new LBPHFaceRecognizer(_recognizerRadius, _recognizerNeighbors,
                                                                   _recognizerGridX, _recognizerGridY, _recognizerThreshold);

                recognizer.Train(compBatch.ToArray(), trainingLabels.ToArray());

                PredictionResult result = recognizer.Predict(new Image <Gray, Byte>(img.ImageOfFace));
                if (result.Distance < _minDistanse)
                {
                    _minDistanse = result.Distance;
                    answ         = label.Id;
                }
            }
            if (_minDistanse < _requiredDistance)
            {
                return(answ);
            }
            else
            {
                return(new Identifier(-1));
            }
        }
        public async Task TrainAsync()
        {
            await Task.CompletedTask;

            var files = Directory.GetFiles(DataFolder, "*.image");

            if (files?.Length > 0)
            {
                var count = files.Length;

                var counter = 0;

                var faceImages = new Mat[count];

                var faceLabels = new int[count];

                foreach (var file in files)
                {
                    var faceImage = new Image <Gray, byte>(file);

                    faceImage = ResizeImage(faceImage);

                    faceImages[counter] = faceImage.Mat;

                    faceLabels[counter] = int.Parse(Path.GetFileName(file).Split('.').First());

                    counter++;
                }

                //_fisherFaceRecognizer.Train(faceImages, faceLabels);

                //_fisherFaceRecognizer.Write(FisherTrainingDataPath);

                _lbphFaceRecognizer.Train(faceImages, faceLabels);

                _lbphFaceRecognizer.Write(LBPHTrainingDataPath);
            }
        }
示例#15
0
        private void button1_Click(object sender, EventArgs e)
        {
            if (comboBoxAlgorithm.Text == "EigenFaces")
            {
                try
                {
                    string dataDirectory = Directory.GetCurrentDirectory() + "\\TrainedFaces";

                    string[] files = Directory.GetFiles(dataDirectory, "*.jpg", SearchOption.AllDirectories);
                    eigenTrainedImageCounter = 0;
                    foreach (var file in files)
                    {
                        Image <Bgr, Byte> TrainedImage = new Image <Bgr, Byte>(file);
                        if (eqHisChecked.Checked == true)
                        {
                            TrainedImage._EqualizeHist();
                        }
                        eigenTrainingImages.Add(TrainedImage.Convert <Gray, Byte>());
                        eigenlabels.Add(fileName(file));
                        eigenIntlabels.Add(eigenTrainedImageCounter);
                        eigenTrainedImageCounter++;
                        richTextBox1.Text += fileName(file) + "\n";
                    }

                    /*
                     *  //TermCriteria for face recognition with numbers of trained images like maxIteration
                     *  MCvTermCriteria termCrit = new MCvTermCriteria(eigenTrainedImageCounter, 0.001);
                     *
                     *  //Eigen face recognizer
                     *  eigenObjRecognizer=new EigenObjectRecognizer(
                     *    eigenTrainingImages.ToArray(),
                     *    eigenlabels.ToArray(),
                     *    3000,
                     *    ref termCrit);
                     */
                    eigenFaceRecognizer = new EigenFaceRecognizer(eigenTrainedImageCounter, 2000);
                    eigenFaceRecognizer.Train(eigenTrainingImages.ToArray(), eigenIntlabels.ToArray());
                    //eigenFaceRecognizer.Save(dataDirectory + "\\trainedDataEigen.dat");
                }
                catch (Exception ex)
                {
                    MessageBox.Show(ex.ToString());
                    MessageBox.Show("Nothing in binary database, please add at least a face(Simply train the prototype with the Add Face Button).", "Triained faces load", MessageBoxButtons.OK, MessageBoxIcon.Exclamation);
                }
            }

            else if (comboBoxAlgorithm.Text == "FisherFaces")
            {
                try
                {
                    string dataDirectory = Directory.GetCurrentDirectory() + "\\TrainedFaces";

                    string[] files = Directory.GetFiles(dataDirectory, "*.jpg", SearchOption.AllDirectories);
                    fisherTrainedImageCounter = 0;
                    foreach (var file in files)
                    {
                        Image <Bgr, Byte> TrainedImage = new Image <Bgr, Byte>(file);
                        fisherTrainingImages.Add(TrainedImage.Convert <Gray, Byte>());
                        if (eqHisChecked.Checked == true)
                        {
                            TrainedImage._EqualizeHist();
                        }
                        fisherlabels.Add(fileName(file));
                        fisherIntlabels.Add(fisherTrainedImageCounter);
                        fisherTrainedImageCounter++;
                        richTextBox1.Text += fileName(file) + "\n";
                    }
                    fisherFaceRecognizer = new FisherFaceRecognizer(fisherTrainedImageCounter, 2000);
                    fisherFaceRecognizer.Train(fisherTrainingImages.ToArray(), fisherIntlabels.ToArray());
                    //fisherFaceRecognizer.Save(dataDirectory + "\\trainedDataFisher.dat");
                }
                catch (Exception ex)
                {
                    MessageBox.Show(ex.ToString());
                    MessageBox.Show("Nothing in binary database, please add at least a face(Simply train the prototype with the Add Face Button).", "Triained faces load", MessageBoxButtons.OK, MessageBoxIcon.Exclamation);
                }
            }

            else if (comboBoxAlgorithm.Text == "LBPHFaces")
            {
                try
                {
                    string dataDirectory = Directory.GetCurrentDirectory() + "\\TrainedFaces";

                    string[] files = Directory.GetFiles(dataDirectory, "*.jpg", SearchOption.AllDirectories);
                    lbphTrainedImageCounter = 0;
                    foreach (var file in files)
                    {
                        Image <Bgr, Byte> TrainedImage = new Image <Bgr, Byte>(file);
                        if (eqHisChecked.Checked == true)
                        {
                            TrainedImage._EqualizeHist();
                        }
                        lbphTrainingImages.Add(TrainedImage.Convert <Gray, Byte>());
                        lbphlabels.Add(fileName(file));
                        lbphIntlabels.Add(lbphTrainedImageCounter);
                        lbphTrainedImageCounter++;
                        richTextBox1.Text += fileName(file) + "\n";
                    }
                    lbphFaceRecognizer = new LBPHFaceRecognizer(1, 8, 8, 8, 400);
                    lbphFaceRecognizer.Train(lbphTrainingImages.ToArray(), lbphIntlabels.ToArray());
                    lbphFaceRecognizer.Save(dataDirectory + "\\trainedDataLBPH.dat");
                }
                catch (Exception ex)
                {
                    MessageBox.Show(ex.ToString());
                    MessageBox.Show("Nothing in binary database, please add at least a face(Simply train the prototype with the Add Face Button).", "Triained faces load", MessageBoxButtons.OK, MessageBoxIcon.Exclamation);
                }
            }
        }
        public bool LoadTrainingData()
        {
            if (File.Exists(LabelsFile))
            {
                try
                {
                    //Clearing Lists In Case of Retrain Operation Occoured.
                    ListOFNames.Clear();
                    ListOfIds.Clear();
                    trainingImages.Clear();

                    //Reading Xml File
                    FileStream filestream = File.OpenRead(LabelsFile);
                    long       filelength = filestream.Length;
                    byte[]     xmlBytes   = new byte[filelength];
                    filestream.Read(xmlBytes, 0, (int)filelength);
                    filestream.Close();

                    MemoryStream xmlStream = new MemoryStream(xmlBytes);
                    using (XmlReader xmlreader = XmlTextReader.Create(xmlStream))
                    {
                        while (xmlreader.Read())
                        {
                            if (xmlreader.IsStartElement())
                            {
                                switch (xmlreader.Name)
                                {
                                case "NAME":
                                    if (xmlreader.Read())
                                    {
                                        ListOfIds.Add(ListOFNames.Count);
                                        ListOFNames.Add(xmlreader.Value.Trim());
                                        NumLabels += 1;
                                    }
                                    break;

                                case "FILE":
                                    if (xmlreader.Read())
                                    {
                                        //PROBLEM HERE IF TRAININGG MOVED
                                        trainingImages.Add(new Image <Gray, byte>(Variables.DataSetFolder + "\\" + xmlreader.Value.Trim()));
                                    }
                                    break;
                                }
                            }
                        }
                    }
                    ContTrain = NumLabels;

                    if (trainingImages.ToArray().Length != 0)
                    {
                        eigen.Train(trainingImages.ToArray(), ListOfIds.ToArray());
                        fisher.Train(trainingImages.ToArray(), ListOfIds.ToArray());
                        Lp.Train(trainingImages.ToArray(), ListOfIds.ToArray());
                        Loaded = true;

                        return(true);
                    }
                    else
                    {
                        return(false);
                    }
                }
                catch (Exception Exp)
                {
                    HasError = true;
                    Debug.WriteLine("Error In Loading Function : " + Exp.Message);
                    return(false);
                }
            }
            else
            {
                return(false);
            }
        }
        /***
         *  Function: private void TrainMachine(FaceIdentity faceIdentity, String name)
         *  Parameter(s): FaceIdentity faceIdentity
         *      Privilege of the face that is being trained to store in the ASSET_INDEX.dat file.
         *                String name
         *      The name of the individual that is being trained; currently, it is not used, but it exists so that in the next version,
         *      the machine can be more customized.
         *  Return Value: void
         ***/
        private void TrainMachine(FaceIdentity faceIdentity, String name)
        {
            // Notify the user that training has begun.
            MessageBox.Show(trainingBeginning, this.Title, MessageBoxButton.OK);

            String[]   fileList = Directory.GetFiles(FileUtilities.TrainingDirectoryName);
            List <Mat> matList  = new List <Mat>();

            foreach (String file in fileList)
            {
                matList.Add(new Mat(file, Emgu.CV.CvEnum.LoadImageType.Unchanged));
            }
            List <Image <Gray, Byte> > list = new List <Image <Gray, Byte> >();

            // Detect each face in each image.
            foreach (Mat mat in matList)
            {
                Image <Gray, Byte> image    = mat.ToImage <Gray, Byte>().Resize(1 / (double)scale_factor, Emgu.CV.CvEnum.Inter.Cubic);
                Rectangle[]        faceList = faceClassifier.DetectMultiScale(image);
                foreach (Rectangle rect in faceList)
                {
                    list.Add(image.Copy(rect).Convert <Gray, Byte>());
                }
            }

            // Make sure that there is at least one face to train.
            if (list.Count() == 0)
            {
                PanicAndTerminateProgram();
            }

            // If a height exists in the CORE_IMAGE_DATA.dat file, resize to that, useful for future training.
            int height = facialRecognitionHeight == 0 ? list[0].Height * scale_factor : facialRecognitionHeight;
            int width  = facialRecognitionWidth == 0 ? list[0].Width * scale_factor : facialRecognitionWidth;

            if (facialRecognitionHeight == 0 || facialRecognitionWidth == 0)
            {
                List <String> lines = new List <String>();
                lines.Add(height + "|" + width);
                File.WriteAllLines(FileUtilities.DirectoryName + "\\" + FileUtilities.CoreImageData, lines.ToArray());
            }
            List <Image <Gray, Byte> > listFinal = new List <Image <Gray, Byte> >();

            foreach (Image <Gray, Byte> image in list)
            {
                listFinal.Add(image.Resize(width, height, Emgu.CV.CvEnum.Inter.Cubic));
            }
            List <int> labelList = new List <int>();
            int        integer   = 0;
            String     prefix    = "";
            String     ident     = "";

            if (faceIdentity == FaceIdentity.FaceAdmin)
            {
                integer = 0;
                prefix  = FileUtilities.AdminTrainedPrefix;
                ident   = adminIdentifier;
            }
            else if (faceIdentity == FaceIdentity.FaceAsset)
            {
                integer = 2;
                prefix  = FileUtilities.AssetTrainedPrefix;
                ident   = auxAdminIdentifier;
            }
            else if (faceIdentity == FaceIdentity.FaceAuxAdmin)
            {
                integer = 1;
                prefix  = FileUtilities.AuxAdminTrainedPrefix;
                ident   = assetIdentifier;
            }
            else
            {
                PanicAndTerminateProgram();
            }
            for (int i = 0; i < list.Count(); i++)
            {
                labelList.Add(integer);
            }

            // Train the machine and write its trained state to a file.
            LBPHFaceRecognizer lbphFaceRecognizer = new LBPHFaceRecognizer();

            lbphFaceRecognizer.Train <Gray, Byte>(listFinal.ToArray(), labelList.ToArray());
            Directory.Delete(FileUtilities.TrainingDirectoryName, true);
            String temp  = categories[integer];
            String fname = FileUtilities.DirectoryName + "\\" + prefix + temp.ToUpper().Replace(' ', '_') + FileUtilities.FileExtension;

            lbphFaceRecognizer.Save(fname);

            // Write everything to the ASSET_INDEX.dat file.
            FileUtilities.TrainingDirectoryDeletion();
            List <String> aboutTraining = new List <String>();

            aboutTraining.Add(name + "^" + ident + "^" + fname);
            File.AppendAllLines(FileUtilities.DirectoryName + "\\" + FileUtilities.AssetIndexData, aboutTraining.ToArray());

            // Notify the used that training has ended.
            MessageBox.Show(trainingEnded, this.Title, MessageBoxButton.OK);
        }
示例#18
0
        private bool Train(string folder)
        {
            string facesPath = Path.Combine(folder, "faces.xml");

            if (!File.Exists(facesPath))
            {
                return(false);
            }

            try
            {
                names.Clear();
                faces.Clear();
                List <int> tmp       = new List <int>();
                FileStream facesInfo = File.OpenRead(facesPath);

                using (XmlReader reader = XmlTextReader.Create(facesInfo))
                {
                    while (reader.Read())
                    {
                        if (reader.IsStartElement())
                        {
                            switch (reader.Name)
                            {
                            case "name":
                                if (reader.Read())
                                {
                                    tmp.Add(names.Count);
                                    names.Add(reader.Value.Trim());
                                }
                                break;

                            case "file":
                                if (reader.Read())
                                {
                                    faces.Add(new Image <Gray, byte>(Path.Combine(Application.StartupPath,
                                                                                  "TrainedFaces",
                                                                                  reader.Value.Trim())));
                                }
                                break;
                            }
                        }
                    }
                }

                facesInfo.Close();

                if (faces.Count == 0)
                {
                    return(false);
                }

                recognizer = new LBPHFaceRecognizer(1, 8, 8, 8, 100);
                recognizer.Train(faces.ToArray(), tmp.ToArray());

                return(true);
            }
            catch
            {
                return(false);
            }
        }
示例#19
0
        static LoadData()
        {
            try
            {
                Console.WriteLine("====================================LOAD DATA========================================");

                myconnection.ConnectionString = Properties.Settings.Default.ConnectionString;
                myconnection.Open();
                string           query = "select employee.userid,employeename,image from employee,image where image.userid=employee.userid";
                OracleCommand    cmd   = new OracleCommand(query, myconnection);
                OracleDataReader dr    = cmd.ExecuteReader();

                while (dr.Read())
                {
                    Labels_ID.Add(Convert.ToInt32(dr["userid"].ToString()));
                    Console.WriteLine("id: " + Convert.ToInt32(dr["userid"].ToString()));
                    Labels_Name.Add(dr["employeename"].ToString());
                    Console.WriteLine("name: " + dr["employeename"].ToString());

                    string     img_name = "image.bmp";
                    FileStream FS       = new FileStream(temp_images + img_name, FileMode.Create);
                    byte[]     blob     = (byte[])dr["IMAGE"];
                    //byte[] blob = Convert.FromBase64String(dr["IMAGE"].ToString()); //for clob
                    FS.Write(blob, 0, blob.Length);
                    FS.Close();
                    FS = null;

                    Bitmap masterImage;
                    using (var fs = new System.IO.FileStream(temp_images + img_name, System.IO.FileMode.Open))
                    {
                        masterImage = new Bitmap(fs);
                    }

                    // Normalizing it to grayscale
                    Image <Gray, Byte> normalizedMasterImage = new Image <Gray, Byte>(masterImage);
                    normalizedMasterImage = normalizedMasterImage.Resize(320, 244, Emgu.CV.CvEnum.Inter.Cubic);
                    _traineImages.Add(normalizedMasterImage);

                    //---------------------delete file after being added----------------------------
                    var filePath = temp_images + img_name;
                    if (File.Exists(filePath))
                    {
                        File.Delete(filePath);
                    }
                }
                myconnection.Close();
                Console.WriteLine("====================================LOAD DATA END========================================");


                //converting data
                faceImages = new Mat[_traineImages.Count];
                for (int i = 0; i < _traineImages.Count; i++)
                {
                    faceImages[i] = _traineImages[i].Mat;
                }

                //display faceImages somewhere


                Console.WriteLine("==============================Training model=======================================");

                recog = new LBPHFaceRecognizer(1, 8, 8, 8, 100);

                //Training model
                if (_traineImages.ToArray().Length != 0)
                {
                    recog.Train(faceImages, Labels_ID.ToArray());
                }

                Console.WriteLine("==============================Training model end=======================================");
            }
            catch (Exception e)
            {
                MessageBox.Show(e.ToString());
                Console.Write(e.ToString());
                //MessageBox.Show("Nothing in binary database, please add at least a face(Simply train the prototype with the Add Face Button).", "Triained faces load", MessageBoxButtons.OK, MessageBoxIcon.Exclamation);
            }
        }
 private void train()
 {
     int[] studentIds = trainData.Select(t => t.StudentId).ToArray();
     Image <Gray, byte>[] imageGrays = trainData.Select(t => t.ImageGray).ToArray();
     recognizer.Train(imageGrays, studentIds);
 }