public JsonResult RecognizeByteArray(byte[] photo)
        {
            DataPath = Server.MapPath("~/App_Data/Faces");
            FaceRecognizer.Load(DataPath);
            var depthImage = new Image <Gray, byte>(photo.Rank, (photo.Length / photo.Rank))
            {
                Bytes = photo
            };
            var s      = FaceRecognizer.Predict(depthImage).Label;
            var person = PhotoContext.People.FirstOrDefault(x => x.Id == s);

            return(Json(person, JsonRequestBehavior.AllowGet));
        }
示例#2
0
        public void Load(string filename)
        {
            recognizer.Load(filename);


            string     path   = Path.GetDirectoryName(filename);
            FileStream labels = File.OpenRead(Path.Combine(path, "Labels.xml"));

            names.Clear();

            using (XmlReader reader = XmlTextReader.Create(labels))
            {
                while (reader.Read())
                {
                    if (reader.IsStartElement())
                    {
                        switch (reader.Name)
                        {
                        case "name":
                            names.Add(reader.GetAttribute("name"));
                            break;
                        }
                    }
                }
            }

            labels.Close();

            IsTrained = true;
        }
示例#3
0
        private void button2_Click(object sender, EventArgs e)
        {
            if (comboBoxAlgorithm.Text == "EigenFaces")
            {
                try
                {
                    string dataDirectory = Directory.GetCurrentDirectory() + "\\TrainedFaces\\trainedDataEigen.dat";
                    eigenFaceRecognizer = new EigenFaceRecognizer(eigenTrainedImageCounter, 3000);
                    eigenFaceRecognizer.Load(dataDirectory);
                    richTextBox1.Text += "Trained Database Loaded.";
                }
                catch (Exception ex)
                {
                    MessageBox.Show(ex.ToString());
                    MessageBox.Show("Nothing in binary database, please add at least a face(Simply train the prototype with the Add Face Button).", "Triained faces load", MessageBoxButtons.OK, MessageBoxIcon.Exclamation);
                }
            }

            else if (comboBoxAlgorithm.Text == "FisherFaces")
            {
                try
                {
                    string dataDirectory = Directory.GetCurrentDirectory() + "\\TrainedFaces\\trainedDataFisher.dat";

                    fisherFaceRecognizer = new FisherFaceRecognizer(fisherTrainedImageCounter, 3000);
                    fisherFaceRecognizer.Load(dataDirectory);
                    richTextBox1.Text += "Trained Database Loaded.";
                }
                catch (Exception ex)
                {
                    MessageBox.Show(ex.ToString());
                    MessageBox.Show("Nothing in binary database, please add at least a face(Simply train the prototype with the Add Face Button).", "Triained faces load", MessageBoxButtons.OK, MessageBoxIcon.Exclamation);
                }
            }

            else if (comboBoxAlgorithm.Text == "LBPHFaces")
            {
                try
                {
                    string dataDirectory = Directory.GetCurrentDirectory() + "\\TrainedFaces\\trainedDataLBPH.dat";
                    lbphFaceRecognizer = new LBPHFaceRecognizer(1, 8, 8, 8, 400);
                    lbphFaceRecognizer.Load(dataDirectory);
                    richTextBox1.Text += "Trained Database Loaded.";
                }
                catch (Exception ex)
                {
                    MessageBox.Show(ex.ToString());
                    MessageBox.Show("Nothing in binary database, please add at least a face(Simply train the prototype with the Add Face Button).", "Triained faces load", MessageBoxButtons.OK, MessageBoxIcon.Exclamation);
                }
            }
        }
示例#4
0
        public MainWindow()
        {
            InitializeComponent();

            // Initialize detector and recognizer
            detector   = new CascadeClassifier(cascadePath);
            recognizer = new LBPHFaceRecognizer();
            recognizer.Load(trainedData);

            // Load the labels
            names = new Dictionary <int, string>();
            var labels = System.IO.File.ReadAllLines(labelPath);

            foreach (var line in labels)
            {
                var content = line.Split(',');
                names.Add(int.Parse(content[0]), content[1]);
            }
        }
示例#5
0
 public void LoadRecognizerData()
 {
     _LBPHFaceRecognizer.Load(_recognizerFilePath);
     //_faceRecognizer.Load(_recognizerFilePath);
 }
        /***
         *  Function: public MainWindow(bool train, FaceIdentity fit, String nt, int cn, int fps, bool crd, int mdt, MachineInputSourceKind misk)
         *  Parameter(s): bool train
         *      Whether or not the machine is supposed to be training itself.
         *                FaceIdentity fit
         *      The classification of the person that the Machine is training itself to recognize (supposed to be equal to FaceIdentity.FaceNone during
         *      normal execution).
         *                String nt
         *      The name of the person that the Machine is training itself to recognize (supposed to be equal to "" during normal execution).
         *                int cn
         *      The camera number to used by the 'capture' variable.
         *                int fps
         *      The frame rate of the camera (not used when processing video files).
         *                bool crd
         *      Whether or not the Machine should dispose of resources (could potentially be unstable).
         *                int mdt
         *      The maxDistanceThreshold to use when classifying faces.
         *                 MachineInputSourceKind misk
         *      The input source from which the Machine will gather its input.
         *                 String t
         *      The title of this window.
         *                  MachineStartPage msp
         *      A reference to the main machine window to control.
         *  Return Value: N/A (Constructor)
         ***/
        public MainWindow(bool train, FaceIdentity fit, String nt, int cn, int fps, bool crd, int mdt,
                          MachineInputSourceKind misk, String t, MachineStartPage msp, UIKind uk)
        {
            machineStartPage       = msp;
            cameraResourceDisposal = crd;
            maxDistanceThreshold   = mdt;
            cameraNumber           = cn;
            cameraFrameRate        = fps;
            machineInputSourceKind = misk;
            if (misk == MachineInputSourceKind.SourceFile || misk == MachineInputSourceKind.SourceNone || train)
            {
                speechRecognitionOnline = true;
            }
            else
            {
                speechRecognitionOnline = true;
            }
            userSpeech = "";
            FileUtilities.DirectoryCreation();
            FileUtilities.TrainingDirectoryCreation();
            faceClassifier = new CascadeClassifier("assets\\haarcascade_frontalface_alt.xml");
            uikind         = uk;
            if (uikind == UIKind.UIMachine)
            {
                admin_focus     = new Bitmap("assets\\machine\\admin_focus.jpg");
                secondary_focus = new Bitmap("assets\\machine\\secondary_focus.jpg");
                threat_focus    = new Bitmap("assets\\machine\\threat_focus.jpg");
            }
            else
            {
                admin_focus     = new Bitmap("assets\\samaritan\\deviant_focus.jpg");
                secondary_focus = new Bitmap("assets\\samaritan\\irrelevant_focus.jpg");
                threat_focus    = new Bitmap("assets\\samaritan\\threat_focus.jpg");
            }
            machineIsTraining = train;
            //Check if the CORE_IMAGE_DATA.dat file exists and read the predefined width and height.
            if (File.Exists(FileUtilities.DirectoryName + "\\" + FileUtilities.CoreImageData))
            {
                String[] lines = File.ReadAllLines(FileUtilities.DirectoryName + "\\" + FileUtilities.CoreImageData);
                if (lines.Length != 0)
                {
                    String[] heightWidth = lines[0].Split('|');
                    facialRecognitionHeight = Int32.Parse(heightWidth[0]);
                    facialRecognitionWidth  = Int32.Parse(heightWidth[1]);
                }
            }
            FileUtilities.CoreImageDataCreation();
            // Check if the Machine is being trained or not.
            if (!train && fit == FaceIdentity.FaceNone && nt == "")
            {
                String[]                  lines2           = File.ReadAllLines(FileUtilities.DirectoryName + "\\" + FileUtilities.AssetIndexData);
                List <FaceIdentity>       faceIdentityList = new List <FaceIdentity>();
                List <String>             nList            = new List <String>();
                List <LBPHFaceRecognizer> lbphList         = new List <LBPHFaceRecognizer>();
                // Load the trained neural networks, list of names, and classifications.
                foreach (String line in lines2)
                {
                    String[] innerSplit = line.Split('^');
                    String   name       = innerSplit[0];
                    String   identifier = innerSplit[1];
                    String   file       = innerSplit[2];
                    if (identifier == adminIdentifier)
                    {
                        faceIdentityList.Add(FaceIdentity.FaceAdmin);
                    }
                    else if (identifier == auxAdminIdentifier)
                    {
                        faceIdentityList.Add(FaceIdentity.FaceAuxAdmin);
                    }
                    else if (identifier == assetIdentifier)
                    {
                        faceIdentityList.Add(FaceIdentity.FaceAsset);
                    }
                    else
                    {
                        PanicAndTerminateProgram();
                    }
                    nList.Add(name);
                    LBPHFaceRecognizer lbph = new LBPHFaceRecognizer();
                    lbph.Load(file);
                    lbphList.Add(lbph);
                }
                focusKindDatabase      = faceIdentityList.ToArray();
                nameDatabase           = nList.ToArray();
                lbphFaceRecognizerList = lbphList.ToArray();
                // Check to make sure the ANN, name, and classification database lengths are all equal.
                if (focusKindDatabase.Length == nameDatabase.Length && nameDatabase.Length == lbphFaceRecognizerList.Length)
                {
                    ;
                }
                else
                {
                    PanicAndTerminateProgram();
                }
            }
            faceIdentityTraining = fit;
            nameTraining         = nt;
            InitializeComponent();
            this.Title = t;

            Closed += (object sender, EventArgs args) =>
            {
                Environment.Exit(1);
            };
        }