コード例 #1
0
        public EmguFaceDetector(String preloadedTraining = "")
        {
            // model = new LBPHFaceRecognizer();
            // model = new EigenFaceRecognizer();

            model = new FisherFaceRecognizer(2, 3000);

            if (preloadedTraining == "")
            {
                images = new List <Image <Gray, byte> >();
                labels = new List <int>();
                prepareTrainingData();
                model.Train(images.ToArray(), labels.ToArray());
                model.Save("Default");
            }
            else
            {
                if (preloadedTraining == "Default")
                {
                    model.Load(preloadedTraining);
                }
                else
                {
                    model.Load(preloadedTraining);
                }
            }


            cascadeFace = new CascadeClassifier(AppDomain.CurrentDomain.BaseDirectory + "haarcascades\\haarcascade_frontalface_default.xml");
            cascadeEyes = new CascadeClassifier(AppDomain.CurrentDomain.BaseDirectory + "haarcascades\\haarcascade_eye.xml");
        }
コード例 #2
0
 public static void InitializeFaceRecognizer()
 {
     _faceRecognizer?.Dispose();
     _faceRecognizer = new LBPHFaceRecognizer(Properties.Settings.Default.RecognitionRadius, Properties.Settings.Default.RecognitionNeighbours, 8, 8, Properties.Settings.Default.RecognitionThreshold);
     if (!File.Exists(Properties.Settings.Default.RecognitionTrainFile))
     {
         try
         {
             Directory.CreateDirectory(Path.GetDirectoryName(Properties.Settings.Default.RecognitionTrainFile));
             File.Create(Properties.Settings.Default.RecognitionTrainFile).Close();
         }
         catch (Exception ex)
         {
             Debug.WriteLine("Could not create recognition file: " + ex);
         }
     }
     else
     {
         try
         {
             _faceRecognizer.Load(Properties.Settings.Default.RecognitionTrainFile);
             _trained = true;
         }
         catch (Exception ex)
         {
             Debug.WriteLine("Could not load recognition file: " + ex);
         }
     }
 }
コード例 #3
0
        public int RecognizeUser(Image <Gray, byte> userImage)
        {
            faceRecognizer.Load(recognizerPath);
            var res = faceRecognizer.Predict(userImage.Convert <Gray, byte>().Resize(100, 100, Inter.Cubic));

            return(res.Label);
        }
コード例 #4
0
        /// <summary>
        /// Routine to train face recognizer with sample images
        /// </summary>

        /*private void TrainRecognizer(string root)
         * {
         *      // This one was actually used to train the recognizer. I didn't push much effort and satisfied once it
         *      // distinguished all detected faces on the sample image, for the real-world application you might want to
         *      // refer to the following documentation:
         *      // OpenCV documentation and samples: http://docs.opencv.org/3.0-beta/modules/face/doc/facerec/tutorial/facerec_video_recognition.html
         *      // Training sets overview: https://www.kairos.com/blog/60-facial-recognition-databases
         *      // Another OpenCV doc: http://docs.opencv.org/2.4/modules/contrib/doc/facerec/facerec_tutorial.html#face-database
         *
         *      int id = 0;
         *      var ids = new List<int>();
         *      var mats = new List<Mat>();
         *      var namesList = new List<string>();
         *
         *      foreach (string dir in Directory.GetDirectories(root))
         *      {
         *              string name = System.IO.Path.GetFileNameWithoutExtension(dir);
         *              if (name.StartsWith("-"))
         *                      continue;
         *
         *              namesList.Add(name);
         *              UnityEngine.Debug.LogFormat("{0} = {1}", id, name);
         *
         *              foreach (string file in Directory.GetFiles(dir))
         *              {
         *                      var bytes = File.ReadAllBytes(file);
         *                      var texture = new UnityEngine.Texture2D(2, 2);
         *                      texture.LoadImage(bytes); // <--- this one has changed in Unity 2017 API and on that version must be changed
         *
         *                      ids.Add(id);
         *
         *                      // each loaded texture is converted to OpenCV Mat, turned to grayscale (assuming we have RGB source) and resized
         *                      var mat = Unity.TextureToMat(texture);
         *                      mat = mat.CvtColor(ColorConversionCodes.BGR2GRAY);
         *                      if (requiredSize.Width > 0 && requiredSize.Height > 0)
         *                              mat = mat.Resize(requiredSize);
         *                      mats.Add(mat);
         *              }
         *              id++;
         *      }
         *
         *      names = namesList.ToArray();
         *
         *      // train recognizer and save result for the future re-use, while this isn't quite necessary on small training sets, on a bigger set it should
         *      // give serious performance boost
         *      recognizer.Train(mats, ids);
         *      recognizer.Save(root + "/face-recognizer.xml");
         * }*/
        #endregion

        /// <summary>
        /// Initializes scene
        /// </summary>
        protected virtual void Awake()
        {
            // classifier
            FileStorage storageFaces = new FileStorage(faces.text, FileStorage.Mode.Read | FileStorage.Mode.Memory);

            cascadeFaces = new CascadeClassifier();
            if (!cascadeFaces.Read(storageFaces.GetFirstTopLevelNode()))
            {
                throw new System.Exception("FaceProcessor.Initialize: Failed to load faces cascade classifier");
            }

            // recognizer
            // There are three available face recognition algorithms in current version of the OpenCV library (please, refer to the OpenCV documentation for details)
            // Our particular training set was trained and saved with FisherFaceRecognizer() and shuld not work with others, however, you can refer to the "TrainRecognizer"
            // method defined above to instructions and sample code regarding training your own recognizer from the scratch
            //recognizer = FaceRecognizer.CreateLBPHFaceRecognizer();
            //recognizer = FaceRecognizer.CreateEigenFaceRecognizer();
            recognizer = FaceRecognizer.CreateFisherFaceRecognizer();

            // This pre-trained set was quite tiny and contained only those 5 persons that are detected and recognized on the image. We took 5 photos for each person from
            // public images on Google, for a real-world application you will need much more sample data for each persona, for more info refer to the OpenCV documentation
            // (there are some links in the "TrainRecognizer" sample function
            recognizer.Load(new FileStorage(recognizerXml.text, FileStorage.Mode.Read | FileStorage.Mode.Memory));

            // label names
            names = new string[] { "Cooper", "DeGeneres", "Nyongo", "Pitt", "Roberts", "Spacey" };
        }
コード例 #5
0
ファイル: RecognizerEngine.cs プロジェクト: nainauy/FRDesktop
        /// <summary>
        /// Method which recognizes the user from the given image.
        /// </summary>
        /// <param name="userImage"></param>
        /// <returns></returns>
        public FaceRecognizer.PredictionResult RecognizeUser(Image <Gray, byte> userImage)
        {
            _faceRecognizer.Load(_recognizerFilePath);

            ////normalize brightness
            //userImage._EqualizeHist();

            var result = _faceRecognizer.Predict(userImage.Resize(100, 100, Inter.Cubic));

            return(result);
        }
コード例 #6
0
 private void BtnLoad_Click(object sender, System.EventArgs e)
 {
     faceRecognizer = new LBPHFaceRecognizer(1, 8, 8, 8, 100.0);
     if (File.Exists("trainedData.xml"))
     {
         modeltrained = true;
         faceRecognizer.Load("trainedData.xml");
     }
     else
     {
         MessageBox.Show("you didnt train the model. Press train button!");
     }
 }
コード例 #7
0
        public int Recognize(Image <Gray, byte> userImage)
        {
            faceRecognizer_.Load(recognizerFilePath_);
            var result = faceRecognizer_.Predict(userImage.Resize(100, 100, Inter.Cubic));

            int id = 0;

            if (result.Distance <= 150)
            {
                id = result.Label;
            }

            return(id);
        }
コード例 #8
0
        public void loadTrainingFileOfCourseCode(string crsCode)
        {
            var    configFile       = ConfigurationManager.OpenExeConfiguration(ConfigurationUserLevel.None);
            var    settings         = configFile.AppSettings.Settings;
            string trainingFilePath = configFile.AppSettings.Settings["TrainingFilesPath"].Value;

            String trainFile1 = trainingFilePath + crsCode + "-1";
            String trainFile2 = trainingFilePath + crsCode + "-2";
            String trainFile3 = trainingFilePath + crsCode + "-3";

            fr1.Load(trainFile1); //loading the training data
            fr2.Load(trainFile2); //loading the training data
            fr3.Load(trainFile3); //loading the training data
        }
コード例 #9
0
        public Form1()
        {
            InitializeComponent();
            recognizer = new LBPHFaceRecognizer(1, 8, 8, 9, 65);

            classifier     = new CascadeClassifier(haarcascade);
            GPU_classifier = new GpuCascadeClassifier(haarcascade_cuda);

            font = new MCvFont(Emgu.CV.CvEnum.FONT.CV_FONT_HERSHEY_TRIPLEX, 0.5, 0.5);
            if (File.Exists(@"traningdata.xml"))
            {
                recognizer.Load(@"traningdata.xml");
            }
            else
            {
                foreach (var file in Directory.GetFiles(Application.StartupPath + @"\Traning Faces\"))
                {
                    try { temp = new Image <Gray, Byte>(file); }
                    catch { continue; }
                    temp._EqualizeHist();

                    var detectedFaces = classifier.DetectMultiScale(temp, 1.1, 15, new Size(24, 24), Size.Empty);
                    if (detectedFaces.Length == 0)
                    {
                        continue;
                    }

                    temp.ROI = detectedFaces[0];
                    temp     = temp.Copy();
                    temp     = temp.Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                    imagesList.Add(temp);
                    imagesLabels.Add(Path.GetFileNameWithoutExtension(file));
                }
                for (int i = 0; i < imagesList.Count; i++)
                {
                    imagesLabels_indices.Add(i);
                }

                try { recognizer.Train(imagesList.ToArray(), imagesLabels_indices.ToArray()); }
                catch (Exception ex)
                {
                    MessageBox.Show(ex.Message);
                    Environment.Exit(0);
                }
            }
        }
コード例 #10
0
        // Attempt to recognize the user using the parameterized image & the images in the database.
        public User RecognizeUser(Image <Gray, byte> userImage)
        {
            _faceRecognizer.Load(_recognizerFilePath);
            var faceData = _faceRecognizer.Predict(userImage.Resize(100, 100, Inter.Cubic));

            // Threshold value set to 87 after thorough testing & evaluation with a database of 50+ images from the internet.
            if (faceData.Distance < 87)
            {
                var user = _dataStoreAccess.GetUserByFaceId(faceData.Label);
                user.Distance = faceData.Distance;
                return(user);
            }
            else
            {
                return new User {
                           UserName = Config.UnrecognizedUserName, Distance = faceData.Distance
                }
            };
        }
    }
コード例 #11
0
 private void BtnLoad_Click(object sender, System.EventArgs e)
 {
     if (faceRecognizer == null)
     {
         MessageBox.Show("FaceRecognizer is null???");
         return;
     }
     if (File.Exists(trainingFilePath))
     {
         Modeltrained = true;
         faceRecognizer.Load(trainingFilePath);
         StreamReader streamReader = new StreamReader(namesPath);
         while (!streamReader.EndOfStream)
         {
             listOfFileNames.Add(streamReader.ReadLine());
         }
         streamReader.Close();
     }
     else
     {
         MessageBox.Show("you didnt train the model. Press train button!");
     }
 }
コード例 #12
0
    //Eğer biz yeni bir eğitim dosyası yüklemek istersek aşağıdaki kod bloğu çalışır.
    // Belirtilen dosya yolundaki eğitim setini yükler.
    public void FnkEgitimDosyasiniYukle(string filename)    // filename trainedfaces klasörünün yolu
    {
        string dosyauzantisi = Path.GetExtension(filename); //belirtilen yolun uzantısını verir.

        switch (dosyauzantisi)                              //seçilen dosyanın uzantısına göre tanıma algoritması seçilir.
        {
        case (".LBPH"):
            TanimaTuru = "EMGU.CV.LBPHFaceRecognizer";
            recognizer = new LBPHFaceRecognizer(1, 8, 8, 8, 100);    //50
            break;

        case (".FFR"):
            TanimaTuru = "EMGU.CV.FisherFaceRecognizer";
            recognizer = new FisherFaceRecognizer(0, 3500);    //4000
            break;

        case (".EFR"):
            TanimaTuru = "EMGU.CV.EigenFaceRecognizer";
            recognizer = new EigenFaceRecognizer(80, double.PositiveInfinity);
            break;
        }


        recognizer.Load(filename);//seçilen dosya yüklenir.(******)

        //Şimdide Etiketleri yükleme işlemi gerçekleştirilir.
        string filepath = Path.GetDirectoryName(filename);//eğitim dosyasının bulunduğu dizini verir.(sadece dizin.)

        AdSoyadList.Clear();
        if (File.Exists(filepath + "/Labels.xml"))//belirtilen dizinde xml dosyasının olup olmadığının kontrolü yapılır.(*******)
        {
            FileStream FSKisiBilgileri = File.OpenRead(filepath + "/Labels.xml");
            long       filelength      = FSKisiBilgileri.Length;//dosyanın uzunluğunu aldık.
            byte[]     xmlBytes        = new byte[filelength];
            FSKisiBilgileri.Read(xmlBytes, 0, (int)filelength);
            FSKisiBilgileri.Close();

            //xml'deki verileri her seferinde dosyaya ulaşıp alamk yerine
            //memorystream kullanarak bellekte tutarak uygulamanın performansını arttırmayı sağladım.(******)
            MemoryStream xmlStream = new MemoryStream(xmlBytes);

            using (XmlReader xmlreader = XmlTextReader.Create(xmlStream))
            {
                while (xmlreader.Read())
                {
                    if (xmlreader.IsStartElement())
                    {
                        switch (xmlreader.Name)
                        {
                        case "NAME":
                            if (xmlreader.Read())
                            {
                                AdSoyadList.Add(xmlreader.Value.Trim());    //xml dosyasından okuduğu isimleri AdSoyadList listesine ekliyoruz.
                            }
                            break;
                        }
                    }
                }
            }
            ContTrain = NumLabels;
        }
        _DizinKontrol = true;
    }
コード例 #13
0
ファイル: Form1.cs プロジェクト: asamirel/GraduationProject
        public void Train()
        {
            List <Image <Gray, byte> > images = new List <Image <Gray, byte> >();
            //List<int> ids = new List<int>();
            List <int> idsTrainned = new List <int>();

            for (int i = 1; i <= 40; i++)
            {
                // *************************   //
                String        extFacesPath = "E:\\faces\\s" + i;
                DirectoryInfo dInfo        = new DirectoryInfo(extFacesPath);
                System.IO.Directory.CreateDirectory("c:\\img\\s" + i);
                var allImages = dInfo.GetFiles("*.bmp"); //get from this directory all files contain ".bmp"

                int j = 1;
                foreach (var image in allImages)
                {
                    if (j > 10)
                    {
                        break;
                    }
                    MessageBox.Show("here" + j);
                    string             photoPath = extFacesPath + "\\" + image;
                    Image <Gray, byte> img       = new Image <Gray, byte>(photoPath).Resize(64, 64, Inter.Cubic);
                    img.ToBitmap().Save("C:\\img\\s" + i + "\\" + j + ".bmp");
                    //img._EqualizeHist();
                    img.Save(photoPath);
                    images.Add(img);
                    idsTrainned.Add(i);

                    j++;
                }
            }

            var configFile = ConfigurationManager.OpenExeConfiguration(ConfigurationUserLevel.None);
            var settings   = configFile.AppSettings.Settings;

            string h1Path = configFile.AppSettings.Settings["TrainingFilesPath"].Value + "\\" + "h1_";
            string h2Path = configFile.AppSettings.Settings["TrainingFilesPath"].Value + "\\" + "h2_";
            string h3Path = configFile.AppSettings.Settings["TrainingFilesPath"].Value + "\\" + "h3_";

            if (images.Count > 0)
            {
                fr1.Train(images.ToArray(), idsTrainned.ToArray());     //this line is self explanatory
                fr1.Save(h1Path);                                       //saving the trainig

                fr2.Train(images.ToArray(), idsTrainned.ToArray());     //this line is self explanatory
                fr2.Save(h2Path);                                       //saving the trainig

                fr3.Train(images.ToArray(), idsTrainned.ToArray());
                fr3.Save(h3Path);   //saving the trainig

                fr1.Load(h1Path);   // Loading the training data of file 1
                fr2.Load(h2Path);   // Loading the training data of file 2
                fr3.Load(h3Path);   // Loading the training data of file 3
                MessageBox.Show("dsdsds");
                for (int i = 1; i <= 40; i++)
                {
                    // *************************   //
                    String        extFacesPath = "E:\\courses gp\\faces\\s" + i;
                    DirectoryInfo dInfo        = new DirectoryInfo(extFacesPath);

                    var allImages = dInfo.GetFiles("*.bmp"); //get from this directory all files contain ".bmp"

                    int j = 1;
                    foreach (var face in allImages)
                    {
                        if (j <= 10)
                        {
                            string             photoPath = extFacesPath + "\\" + face;
                            Image <Gray, byte> img       = new Image <Gray, byte>(photoPath).Resize(200, 200, Inter.Cubic);
                            //This is used to get the result from testing
                            FaceRecognizer.PredictionResult result = new FaceRecognizer.PredictionResult();
                            result = fr1.Predict(img); //receiving the result
                            if (result.Distance <= 8000)
                            {
                                int testResult = result.Label;
                                if (i == testResult)
                                {
                                    counter1++;
                                }
                            }

                            result = fr2.Predict(img); //receiving the result
                            if (result.Distance <= 3100)
                            {
                                int testResult = result.Label;
                                if (i == testResult)
                                {
                                    counter2++;
                                }
                            }

                            result = fr3.Predict(img); //receiving the result
                            if (result.Distance <= 100)
                            {
                                int testResult = result.Label;
                                if (i == testResult)
                                {
                                    counter3++;
                                }
                            }
                        }
                        j++;
                    }
                }
            }
            MessageBox.Show("dsdsds");
            int accurracy1, accurracy2, accurracy3;

            accurracy1 = counter1 / 40;
            accurracy2 = counter2 / 40;
            accurracy3 = counter3 / 40;
            MessageBox.Show(accurracy1 + "      " + accurracy2 + "   " + accurracy3);


            //FinishAddStudent fas = new FinishAddStudent();
            //fas.Tag = this;
            //fas.Show(this);
            Hide();
        }
コード例 #14
0
        /*
         * SK: Metóda, ktorá sa zavolá, pre vygenerovanie otázky
         * ENG: Method, which is called for generating question
         */
        void GenerateQuestion()
        {
            if (QnA.Count > 0)
            {
                currentQuestion   = Random.Range(0, QnA.Count);
                QuestionText.text = QnA[currentQuestion].Questiion;
                Image.texture     = QnA[currentQuestion].sample;

                Mat image = Unity.TextureToMat(QnA[currentQuestion].sample);

                // Deteguje tváre
                var gray = image.CvtColor(ColorConversionCodes.BGR2GRAY);
                Cv2.EqualizeHist(gray, gray);
                // deteguje zhodné regióny (Faces bounding)

                FileStorage storageFaces = new FileStorage(Faces.text, FileStorage.Mode.Read | FileStorage.Mode.Memory);
                cascadeFaces = new CascadeClassifier();
                if (!cascadeFaces.Read(storageFaces.GetFirstTopLevelNode()))
                {
                    throw new System.Exception("FaceProcessor.Initialize: Failed to load Faces cascade classifier");
                }

                recognizer = FaceRecognizer.CreateFisherFaceRecognizer();
                recognizer.Load(new FileStorage(RecognizerXml.text, FileStorage.Mode.Read | FileStorage.Mode.Memory));
                // popisky (labels)
                names = new string[] { "Cooper", "DeGeneres", "Nyongo", "Pitt", "Roberts", "Spacey" };

                OpenCvSharp.Rect[] rawFaces = cascadeFaces.DetectMultiScale(gray, 1.1, 6);

                foreach (var faceRect in rawFaces)
                {
                    var grayFace = new Mat(gray, faceRect);
                    if (requiredSize.Width > 0 && requiredSize.Height > 0)
                    {
                        grayFace = grayFace.Resize(requiredSize);
                    }

                    int label = -1;

                    /*SK:
                     * pokús sa rozpoznať tvár:
                     * confidence" je v princípe vzdialenosť od vzorky k najbližšej známej tvári
                     * 0 je nejaká „ideálna zhoda“
                     */

                    /*ENG:
                     *  now try to recognize the face:
                     *  confidence" here is actually a misguide. in fact, it's "distance from the sample to the closest known face".
                     *  0 being some "ideal match"
                     */

                    double confidence = 0.0;
                    recognizer.Predict(grayFace, out label, out confidence);
                    faceName = names[label];

                    int          line        = 0;
                    const int    textPadding = 2;
                    const double textScale   = 2.0;
                    string       messge      = String.Format("{0}", names[label], (int)confidence);
                    var          textSize    = Cv2.GetTextSize(messge, HersheyFonts.HersheyPlain, textScale, 1, out line);
                    var          textBox     = new OpenCvSharp.Rect(
                        faceRect.X + (faceRect.Width - textSize.Width) / 2 - textPadding,
                        faceRect.Bottom,
                        textSize.Width + textPadding * 2,
                        textSize.Height + textPadding * 2
                        );
                    faceName = names[label];
                    Debug.Log(faceName);
                }
                // Priradenie obrázku textúre Image komponentu na scéne
                // Asign image to the texture on Image component
                var texture  = Unity.MatToTexture(image);
                var rawImage = Image;
                rawImage.texture = texture;

                var transform = Image.GetComponent <RectTransform>();
                transform.sizeDelta = new Vector2(image.Width, image.Height);

                for (int i = 0; i < Options.Length; i++)
                {
                    Options[i].transform.GetChild(0).GetComponent <Text>().text = QnA[currentQuestion].Answers[i];
                    if (faceName == Options[i].transform.GetChild(0).GetComponent <Text>().text)
                    {
                        Options[i].GetComponent <AnswerScript>().isCorrect = true;
                    }
                    else
                    {
                        Options[i].GetComponent <AnswerScript>().isCorrect = false;
                    }
                }
            }
            else
            {
                GameOver();
            }
        }
コード例 #15
0
 public void Load(string filePath)
 {
     _recognizer.Load(filePath);
 }
コード例 #16
0
 private void loadToolStripMenuItem_Click(object sender, EventArgs e)
 {
     recognizer.Load("traningdata.xml");
 }
コード例 #17
0
 public void LoadRecognizerData()
 {
     _faceRecognizer.Load(_recognizerFilePath);
 }
コード例 #18
0
 public void LoadRecognizerData()
 {
     faceRecognizer.Load(recognizeFilePath);
 }
コード例 #19
0
        /// <summary>
        /// Loads the trained Eigen Recogniser from specified location
        /// </summary>
        /// <param name="filename"></param>
        public void Load_Eigen_Recogniser(string filename)
        {
            //Lets get the recogniser type from the file extension
            string ext = Path.GetExtension(filename);

            switch (ext)
            {
            case (".LBPH"):
                Recognizer_Type = "EMGU.CV.LBPHFaceRecognizer";
                recognizer      = new LBPHFaceRecognizer(1, 8, 8, 8, 100);//50
                break;

            case (".FFR"):
                Recognizer_Type = "EMGU.CV.FisherFaceRecognizer";
                recognizer      = new FisherFaceRecognizer(0, 3500);//4000
                break;

            case (".EFR"):
                Recognizer_Type = "EMGU.CV.EigenFaceRecognizer";
                recognizer      = new EigenFaceRecognizer(80, double.PositiveInfinity);
                break;
            }

            //introduce error checking
            recognizer.Load(filename);

            //Now load the labels
            string direct = Path.GetDirectoryName(filename);

            Names_List.Clear();
            if (File.Exists(direct + "/Labels.xml"))
            {
                FileStream filestream = File.OpenRead(direct + "/Labels.xml");
                long       filelength = filestream.Length;
                byte[]     xmlBytes   = new byte[filelength];
                filestream.Read(xmlBytes, 0, (int)filelength);
                filestream.Close();

                MemoryStream xmlStream = new MemoryStream(xmlBytes);

                using (XmlReader xmlreader = XmlTextReader.Create(xmlStream))
                {
                    while (xmlreader.Read())
                    {
                        if (xmlreader.IsStartElement())
                        {
                            switch (xmlreader.Name)
                            {
                            case "NAME":
                                if (xmlreader.Read())
                                {
                                    Names_List.Add(xmlreader.Value.Trim());
                                }
                                break;
                            }
                        }
                    }
                }
                ContTrain = NumLabels;
            }
            _IsTrained = true;
        }
コード例 #20
0
    /// <summary>
    /// Loads the trained Eigen Recogniser from specified location
    /// </summary>
    /// <param name="filename"></param>
    public void Load_Eigen_Recogniser(string filename)
    {
        //Lets get the recogniser type from the file extension
        string ext = Path.GetExtension(filename);

        switch (ext)
        {
        case (".LBPH"):
            trainParameters.RecognizerType = "EMGU.CV.LBPHFaceRecognizer";
            recognizer = new LBPHFaceRecognizer(
                trainParameters.LBPH.Radius,
                trainParameters.LBPH.Neighbors,
                trainParameters.LBPH.GridX,
                trainParameters.LBPH.GridY,
                trainParameters.LBPH.Treshold);     // 1,8,8,8,100
            break;

        case (".FFR"):
            trainParameters.RecognizerType = "EMGU.CV.FisherFaceRecognizer";
            recognizer = new FisherFaceRecognizer(
                trainParameters.Fisher.Components,
                trainParameters.Fisher.Treshold);     // 0,3500
            break;

        case (".EFR"):
            trainParameters.RecognizerType = "EMGU.CV.EigenFaceRecognizer";
            recognizer = new EigenFaceRecognizer(
                trainParameters.Eigen.Components,
                trainParameters.Eigen.Treshold);     // 80,double.PositiveInfinity
            break;
        }

        //introduce error checking
        recognizer.Load(filename);

        //Now load the labels
        string direct = Path.GetDirectoryName(filename);

        Names_List.Clear();
        if (File.Exists(direct + LABELSXML))
        {
            FileStream filestream = File.OpenRead(direct + LABELSXML);
            long       filelength = filestream.Length;
            byte[]     xmlBytes   = new byte[filelength];
            filestream.Read(xmlBytes, 0, (int)filelength);
            filestream.Close();

            MemoryStream xmlStream = new MemoryStream(xmlBytes);

            using (XmlReader xmlreader = XmlTextReader.Create(xmlStream))
            {
                while (xmlreader.Read())
                {
                    if (xmlreader.IsStartElement())
                    {
                        switch (xmlreader.Name)
                        {
                        case "NAME":
                            if (xmlreader.Read())
                            {
                                Names_List.Add(xmlreader.Value.Trim());
                            }
                            break;
                        }
                    }
                }
            }
            contTrain = numLabels;
        }
        _IsTrained = true;
    }