LBPH face recognizer
Inheritance: FaceRecognizer
コード例 #1
0
        public EmployeeStructure[] recognizeMultipleFaces(Image newImage, FaceRecognizerMethode faceRecognizerMethode)
        {
            var inputImage = new Image<Bgr, Byte>(new Bitmap(newImage));
            Rectangle[] rectangleFace = detection(inputImage, this.pathXMLHaarcascade);
            EmployeeStructure[] employeeStructure;

            if (rectangleFace.Length <= 0)
            {
                employeeStructure = new EmployeeStructure[0];
                employeeStructure[0].result = Result.NoDetected.ToString();
                return employeeStructure;
            }
            else
            {

                Image<Gray, byte> grayFrame = toGrayEqualizeFrame(inputImage);
                employeeStructure = new EmployeeStructure[rectangleFace.Length];

                FaceRecognizer faceRecognition;

                switch (faceRecognizerMethode.ToString())
                {
                    case "EigenFaceRecognizerMethode": faceRecognition = new EigenFaceRecognizer(numComponentsEigen, thresholdEigen); //try catch aca
                        break;
                    case "FisherFaceRecognizerMethode": faceRecognition = new FisherFaceRecognizer(numComponentsFisher, thresholdFisher);
                        break;
                    case "LBPHFaceRecognizerMethode": faceRecognition = new LBPHFaceRecognizer(radiusLBPH, neighborsLBPH, gridXLBPH, gridYLBPH, thresholdLBPH);
                        break;
                    default: faceRecognition = new EigenFaceRecognizer(numComponentsEigen, thresholdEigen);
                        break;
                };

                faceRecognition.Load(pathImg + @"\" + "TrainingSet");

                Parallel.For(0, rectangleFace.Length, i =>
                {
                    Image<Gray, byte> faceEMGUCV = formatRectangleFaces(grayFrame.ToBitmap(), rectangleFace[i]);

                    FaceRecognizer.PredictionResult ER = faceRecognition.Predict(faceEMGUCV);

                    if (ER.Label != -1 /*&& ER.Distance > thresholdEigen*/)
                    {
                        int label = ER.Label;

                        GenericRepository<Employee> emplyeeRepo = unitOfWork.GetRepoInstance<Employee>();
                        Employee em = emplyeeRepo.GetFirstOrDefault(label);

                        employeeStructure[i] = new EmployeeStructure(Result.Recognized.ToString(), em.name, em.middleName, em.lastName, em.email, rectangleFace[0].X, rectangleFace[0].Y, rectangleFace[0].Width, rectangleFace[0].Height);
                    }
                    employeeStructure[i].result = Result.Unknown.ToString();

                });

                return employeeStructure;
            }
        }
コード例 #2
0
        public string saveEmployee(Image newImage, string name, string middleName, string lastName, string email, FaceRecognizerMethode faceRecognizerMethode)
        {
            unitOfWork = new GenericUnitOfWork();
            GenericRepository<Employee> employeeRepo = unitOfWork.GetRepoInstance<Employee>();

            Employee employee = null;
            try
            {

                employee = (employeeRepo.GetAllRecords().Where<Employee>(e => e.email == email)).First<Employee>();
            }
            catch
            {
                Debug.WriteLine("Nuevo usuario");
            }

            //Add Employee if not exist. The email is unique
            if (employee == null)
            {
                employee = new Employee { name = name, middleName = middleName, lastName = lastName, email = email };
                employeeRepo.Add(employee);
                unitOfWork.SaveChanges();
            }

            //I save the image with a guid as a name
            GenericRepository<DistanceResult> distanceResultRepo = unitOfWork.GetRepoInstance<DistanceResult>();
            Guid guid = Guid.NewGuid();

            var inputImage = new Image<Bgr, Byte>(new Bitmap(newImage));
            Rectangle[] rectangleFace = detection(inputImage, pathXMLHaarcascade);

            //The function detection(..) can extract N faces

            if (rectangleFace.Length <= 0)
            {
                return Result.NoDetected.ToString();
            }
            else if (rectangleFace.Length > 1)
            {
                return Result.MultipleFacesDetected.ToString();
            }
            else
            {
                Image<Gray, byte> grayFrame = toGrayEqualizeFrame(inputImage);

                Image<Gray, Byte> faceEMGUCV = formatRectangleFaces(grayFrame.ToBitmap(), rectangleFace[0]);

                faceEMGUCV._EqualizeHist();

                faceEMGUCV.Save(pathImg + @"\" + guid.ToString()+".Jpeg");

                FaceRecognizer faceRecognition;

                switch (faceRecognizerMethode.ToString())
                {
                    case "EigenFaceRecognizerMethode": faceRecognition = new EigenFaceRecognizer(numComponentsEigen, thresholdEigen); //try catch aca
                        break;
                    case "FisherFaceRecognizerMethode": faceRecognition = new FisherFaceRecognizer(numComponentsFisher, thresholdFisher);
                        break;
                    case "LBPHFaceRecognizerMethode": faceRecognition = new LBPHFaceRecognizer(radiusLBPH, neighborsLBPH, gridXLBPH, gridYLBPH, thresholdLBPH);
                        break;
                    default: return null;
                };

                double distance = 2;

                //Save register
                DistanceResult dist = new DistanceResult();
                dist.algorithm = RecognizeBLL.FaceRecognizerMethode.FisherFaceRecognizerMethode.ToString();
                dist.employeeId = employee.employeeId;
                dist.photoName = guid.ToString();
                dist.distance = distance;
                distanceResultRepo.Add(dist);

                unitOfWork.SaveChanges();

                int lengthArrays = distanceResultRepo.GetAllRecords().Count();
                imagesDB = new Image<Gray, Byte>[lengthArrays];
                labels = new int[lengthArrays];
                int i = 0;
                foreach (DistanceResult di in distanceResultRepo.GetAllRecords())
                {
                    //This is to recalculate the faceRecognition and save it, but I think is not necesari declare imageDB and labels as global
                    imagesDB[i] = new Image<Gray, Byte>(pathImg + @"\" + di.photoName + ".Jpeg");
                    labels[i] = di.employeeId;
                    i++;
                }

                if (employeeRepo.GetAllRecords().Count() > 1)
                {
                    faceRecognition.Train(imagesDB, labels);
                    faceRecognition.Save(pathImg + @"\" + "TrainingSet");
                }
                return Result.Saved.ToString();
            }
            //return Result.Error.ToString();
        }
コード例 #3
0
        /***
            Function: public MainWindow(bool train, FaceIdentity fit, String nt, int cn, int fps, bool crd, int mdt, MachineInputSourceKind misk)
            Parameter(s): bool train
                Whether or not the machine is supposed to be training itself.
                          FaceIdentity fit
                The classification of the person that the Machine is training itself to recognize (supposed to be equal to FaceIdentity.FaceNone during
                normal execution).
                          String nt
                The name of the person that the Machine is training itself to recognize (supposed to be equal to "" during normal execution).
                          int cn
                The camera number to used by the 'capture' variable.
                          int fps
                The frame rate of the camera (not used when processing video files).
                          bool crd
                Whether or not the Machine should dispose of resources (could potentially be unstable).
                          int mdt
                The maxDistanceThreshold to use when classifying faces.
                           MachineInputSourceKind misk
                The input source from which the Machine will gather its input.
                           String t
                The title of this window.
                            MachineStartPage msp
                A reference to the main machine window to control.
            Return Value: N/A (Constructor)
        ***/
        public MainWindow(bool train, FaceIdentity fit, String nt, int cn, int fps, bool crd, int mdt,
            MachineInputSourceKind misk, String t, MachineStartPage msp, UIKind uk)
        {
            machineStartPage = msp;
            cameraResourceDisposal = crd;
            maxDistanceThreshold = mdt;
            cameraNumber = cn;
            cameraFrameRate = fps;
            machineInputSourceKind = misk;
            if (misk == MachineInputSourceKind.SourceWebcam) {
                speechRecognitionOnline = true;
                userSpeech = "";
            }
            FileUtilities.DirectoryCreation();
            FileUtilities.TrainingDirectoryCreation();
            faceClassifier = new CascadeClassifier("assets\\haarcascade_frontalface_alt.xml");
            uikind = uk;
            if (uikind == UIKind.UIMachine) {
                admin_focus = new Bitmap("assets\\machine\\admin_focus.jpg");
                secondary_focus = new Bitmap("assets\\machine\\secondary_focus.jpg");
                threat_focus = new Bitmap("assets\\machine\\threat_focus.jpg");
            }
            else
            {
                admin_focus = new Bitmap("assets\\samaritan\\deviant_focus.jpg");
                secondary_focus = new Bitmap("assets\\samaritan\\irrelevant_focus.jpg");
                threat_focus = new Bitmap("assets\\samaritan\\threat_focus.jpg");
            }
            machineIsTraining = train;
            //Check if the CORE_IMAGE_DATA.dat file exists and read the predefined width and height.
            if (File.Exists(FileUtilities.DirectoryName + "\\" + FileUtilities.CoreImageData))
            {
                String[] lines = File.ReadAllLines(FileUtilities.DirectoryName + "\\" + FileUtilities.CoreImageData);
                if (lines.Length != 0)
                {
                    String[] heightWidth = lines[0].Split('|');
                    facialRecognitionHeight = Int32.Parse(heightWidth[0]);
                    facialRecognitionWidth = Int32.Parse(heightWidth[1]);
                }
            }
            FileUtilities.CoreImageDataCreation();
            // Check if the Machine is being trained or not.
            if (!train && fit == FaceIdentity.FaceNone && nt == "")
            {
                String[] lines2 = File.ReadAllLines(FileUtilities.DirectoryName + "\\" + FileUtilities.AssetIndexData);
                List<FaceIdentity> faceIdentityList = new List<FaceIdentity>();
                List<String> nList = new List<String>();
                List<LBPHFaceRecognizer> lbphList = new List<LBPHFaceRecognizer>();
                // Load the trained neural networks, list of names, and classifications.
                foreach (String line in lines2)
                {
                    String[] innerSplit = line.Split('^');
                    String name = innerSplit[0];
                    String identifier = innerSplit[1];
                    String file = innerSplit[2];
                    if (identifier == adminIdentifier)
                        faceIdentityList.Add(FaceIdentity.FaceAdmin);
                    else if (identifier == auxAdminIdentifier)
                        faceIdentityList.Add(FaceIdentity.FaceAuxAdmin);
                    else if (identifier == assetIdentifier)
                        faceIdentityList.Add(FaceIdentity.FaceAsset);
                    else
                        PanicAndTerminateProgram();
                    nList.Add(name);
                    LBPHFaceRecognizer lbph = new LBPHFaceRecognizer();
                    lbph.Load(file);
                    lbphList.Add(lbph);
                }
                focusKindDatabase = faceIdentityList.ToArray();
                nameDatabase = nList.ToArray();
                lbphFaceRecognizerList = lbphList.ToArray();
                // Check to make sure the ANN, name, and classification database lengths are all equal.
                if (focusKindDatabase.Length == nameDatabase.Length && nameDatabase.Length == lbphFaceRecognizerList.Length) ;
                else
                    PanicAndTerminateProgram();
            }
            faceIdentityTraining = fit;
            nameTraining = nt;
            InitializeComponent();
            this.Title = t;

            Closed += (object sender, EventArgs args) =>
            {
                Environment.Exit(1);
            };
        }