//-------------------------------------------------------------------------------------//
        //<<<<<----------------FUNCTIONS USED TO TRAIN RECOGNIZER ON TRAINING SET----------->>>>
        //-------------------------------------------------------------------------------------//
        /// <summary>
        /// Trains recognizer on fetched face-label pairs and saves the trained data to recognition variables
        /// </summary>
        public void TrainRecognizer()
        {
            MCvTermCriteria termCrit = new MCvTermCriteria(iMaxItereations, dEPS);
            ImageInDatabase dbTrainingSet = new ImageInDatabase();

            // This will fill the training images array AND labels array
            dbTrainingSet.LoadCompleteTrainingSet();
            recognizer = new EigenObjectRecognizer(dbTrainingSet.m_trainingImages, dbTrainingSet.m_TrainingLabels, dDistTreshHold, ref termCrit);
        }
    private void InitEigenObjectRecognizer() {
      if (trainedImages.Count <= 0) { return; }
      initEigen = true;

      // TermCriteria for face recognition with numbers of trained images like maxIteration
      termCrit = new MCvTermCriteria(trainedImages.Count, 0.001);

      // Eigen face recognizer
      recognizer = new EigenObjectRecognizer(trainedImages.ToArray(), trainedLabels.ToArray(), 5000, ref termCrit);

      initEigen = false;
    }
        public EigenCardDetector(string foldername)
        {
            List<FileInfo> files = new List<FileInfo>(new DirectoryInfo(foldername).GetFiles());
            List<Image<Gray, byte>> images = new List<Image<Gray, byte>>(files.Count);
            foreach (FileInfo info in files)
            {
                Bitmap bit = new Bitmap(info.FullName);
                images.Add(new Image<Gray, byte>(bit));
            }

            MCvTermCriteria crit = new MCvTermCriteria(0.05);
            recog = new EigenObjectRecognizer(images.ToArray(), ref crit);
        }
Example #4
0
        public FacialInfo()
        {
            StreamReader SR = new StreamReader("CVConfig.txt");
            gFacedetection = new HaarCascade(@"haarcascade_frontalface_alt.xml");
            gHanddetection = new HaarCascade(@"haarcascade_hand.xml");

            gCompareBoxes = new List<Image<Gray, byte>>();
            gRecognitionBoxes = new List<Image<Gray, byte>>();


            Image<Gray, byte> Blank = new Image<Gray, byte>(128, 120, new Gray(0.5));
            for (int x = 0; x < 6; x++)
            {
                gCompareBoxes.Add(Blank);
                gRecognitionBoxes.Add(Blank);
            }

            try
            {
                ImageWidth = int.Parse(SR.ReadLine().Split(':')[1]);
                ImageHeight = int.Parse(SR.ReadLine().Split(':')[1]);
                Threshold = int.Parse(SR.ReadLine().Split(':')[1]);
                termcrit = double.Parse(SR.ReadLine().Split(':')[1]);
            }
            catch 
            {
                //default values
                ImageWidth = 128;
                ImageHeight = 120;
                termcrit = 0.001;
                Threshold = 4500;
            }
            SR.Close();

            gUP = new UserProfile();

            MCvTermCriteria termCrit = new MCvTermCriteria(gUP.LoadedUsers.Count, termcrit);
            gEOR = new EigenObjectRecognizer(
               gUP.getImages(),
               gUP.getNumbers(),
                10000,//4500
               ref termCrit);
        }
        private void btnDetect_Click(object sender, EventArgs e)
        {
            current = new Image<Bgr, byte>(filePath).Resize(300, 250, INTER.CV_INTER_CUBIC);
            Image<Gray, byte> grayScale = current.Convert<Gray, byte>();

                MCvAvgComp[][] detected = grayScale.DetectHaarCascade(face, scale, minNeighbors, Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new Size(20, 20));
                foreach (MCvAvgComp d in detected[0])
                {
                    current.Draw(d.rect, new Bgr(Color.LawnGreen), 2);
                    if (trainingImgs.Count > 0)
                    {
                        Image<Gray, byte> dFace = current.Copy(d.rect).Convert<Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                        MCvTermCriteria criteria = new MCvTermCriteria(trainingImgs.Count, epsilon);  //count, epsilon value
                        EigenObjectRecognizer recognize = new EigenObjectRecognizer(trainingImgs.ToArray(), trainingNames.ToArray(), 0, ref criteria);
                        MCvFont font = new MCvFont(FONT.CV_FONT_HERSHEY_TRIPLEX, 1, 1);
                        string name = recognize.Recognize(dFace);
                        current.Draw(name, ref font, new Point(d.rect.X - 2, d.rect.Y - 20), new Bgr(Color.Red));
                    }
                }
            picWebCam.Image = current.ToBitmap();
        }
Example #6
0
        /// <summary>
        /// Jedyny konstruktor. Odczytuje twarze z bazy i inicjuje wewnętrzny obiekt odpowiedzialny za rozpoznawanie twarzy.
        /// </summary>
        public Recognizer()
        {
            readFiles();//odczyt twarzy z bazy

            //ustawienie etykiet i dokładności z jaką ma być wykonywane ropoznawanie
            MCvTermCriteria criteria = new MCvTermCriteria(labels.Length, 0.001);

            //utworzenie nowego obiektu do rozpoznawania twarzy
            //obiekt ten wylicza eigenvectors dla każdej twarzy w bazie
            //oraz dla każdej sprawdzanej twarz, po czym wartości wyliczone
            //dla sprawdzanej twarzy porównuje z wartościami wyliczonymi dla tych
            //twarzy z bazy
            eor = new EigenObjectRecognizer(
                faces,//tablica twarzy
                labels,//etykiety odpowiadające twarzom
                3000,//poziom progowania pomiędzy poszczególnymi eigenvectors
                ref criteria//kryterium
                );

            //utwórz wzorzec do wykrywania twarzy
            haar = new HaarCascade("haarcascade_frontalface_default.xml");
        }
        public static EigenObjectRecognizer CreateRecognizerFromFotosInFolder(String folder,  String pattern, double accuracy, int eigenDistanceThreshold)
        {
            List<Image<Gray, byte>> trainedImages = new List<Image<Gray, byte>>();
            List<String> labels = new List<string>();

             string[] subdirEntries = Directory.GetDirectories(folder);
             foreach (var directory in subdirEntries)
             {
                 string[] fileEntries = Directory.GetFiles(directory);
                 var label = directory.Remove(0, directory.LastIndexOf("\\")+1);
                 foreach (var file in fileEntries.Where(x=>x.Contains(pattern)))
                 {
                     Image<Gray, byte> image = new Image<Gray, byte>(file);
                     trainedImages.Add(image);
                     labels.Add(label);
                 }
             }

             MCvTermCriteria termCrit = new MCvTermCriteria(40, accuracy);

             EigenObjectRecognizer recognizer = new EigenObjectRecognizer(
                 trainedImages.ToArray(),
                 labels.ToArray(),
                 eigenDistanceThreshold,
                 ref termCrit);

             //int i = 1;

             //Image<Gray, float>[] eigenfaces;
             //Image<Gray, float> avg;
             //EigenObjectRecognizer.CalcEigenObjects(trainedImages.ToArray(),ref termCrit,out eigenfaces, out avg);

            //foreach(var eigenface in eigenfaces)
            // {
            //     eigenface.Bitmap.Save(@"e:\data\phototest\eigen" + i + ".bmp");
            //     i++;
            // }
             return recognizer;
        }
Example #8
0
        public CommandService()
        {
            userInfoManager = new UserInfoManager();
            userInfoManager.connect();

            foreach(var user in userInfoManager.ListUsers())
            {
                foreach (var photo in userInfoManager.ListUserPhotoData(user))
                {
                    labels.Add(user.Name);
                    images.Add(photo.Image);

                    Console.WriteLine("Added image");
                }
            }

            MCvTermCriteria crit = new MCvTermCriteria(1.0);

            this.recognizer = new EigenObjectRecognizer(images.ToArray(),
                                                        labels.ToArray(),
                                                        2000,
                                                        ref crit);
        }
        public static void TrainRecognizer()
        {
            var trainingCases = GetTrainingCases();

            if (trainingCases != null
                && trainingCases.Count > 0)
            {
                Image<Gray, byte>[] images = trainingCases.Select(item => item.Image).ToArray();
                string[] labels = trainingCases.Select(item => item.Label).ToArray();

                Recognizer = new EigenObjectRecognizer(images, labels, ref _criteria);
            }
        }
Example #10
0
        private string recognizerall(MCvAvgComp f)
        {
            string[] ten = new string[5];
            ten[0] = "";

            if (trainingImages.ToArray().Length != 0)
            {

                //  /Term Criteria for face recognition with numbers of trained images like max Iteration,eps > =>chinh xac
                MCvTermCriteria termCrit = new MCvTermCriteria(tong, 0.6);
                MCvTermCriteria termCritn = new MCvTermCriteria(tong, 0.7);
                MCvTermCriteria termCritm = new MCvTermCriteria(tong, 0.7);
                MCvTermCriteria termCriteL = new MCvTermCriteria(tong, 0.7);
                MCvTermCriteria termCriteR = new MCvTermCriteria(tong, 0.7);
                //Eigen face recognizer

                EigenObjectRecognizer recognizer = new EigenObjectRecognizer(
                   trainingImages.ToArray(),
                   labels.ToArray(),
                   2000,
                   ref termCrit);

                ten[0] = recognizer.Recognize(resultface);
                /*

                 ///////////////////////////////////////////////////kiem tra nose/
                 if (resultnose != null)
                 {
                     EigenObjectRecognizer recognizernose = new EigenObjectRecognizer(
                        trainingImagenose.ToArray(),
                        labels.ToArray(),
                        1000,
                        ref termCritn);

                     ten[1] = recognizernose.Recognize(resultnose);
                     currentFrame.Draw("nose: "+ten[1], ref font, new Point(f.rect.X - 2, f.rect.Y - 15), new Bgr(Color.DarkBlue));

                 }
                 //////////////////////////////////////////////////////////

                 if (resultmouth != null)
                 {
                        EigenObjectRecognizer recognizermouth = new EigenObjectRecognizer(
                        trainingImagemouth.ToArray(),
                        labels.ToArray(),
                        1000,
                        ref termCritm);

                     ten[2] = recognizermouth.Recognize(resultmouth);
                     currentFrame.Draw("mouth: "+ten[2], ref font, new Point(f.rect.X - 2, f.rect.Y - 30), new Bgr(Color.LightGreen));
                 }

                 if (resulteyeL != null)
                 {
                     EigenObjectRecognizer recognizereyeL = new EigenObjectRecognizer(
                     trainingImageneyeL.ToArray(),
                     labels.ToArray(),
                     1000,
                     ref termCriteL);

                     ten[3] = recognizereyeL.Recognize(resulteyeL);
                     currentFrame.Draw("eyes: "+ten[3], ref font, new Point(f.rect.X - 45, f.rect.Y - 45), new Bgr(Color.LightGreen));
                 }
                 if (resulteyeR != null)
                 {
                     EigenObjectRecognizer recognizereyeR = new EigenObjectRecognizer(
                     trainingImageneyeR.ToArray(),
                     labels.ToArray(),
                     600,
                     ref termCriteR);

                    ten[4] = recognizereyeR.Recognize(resulteyeR);
                    currentFrame.Draw(ten[4], ref font, new Point(f.rect.X +65, f.rect.Y - 45), new Bgr(Color.LightGreen));
                 }

             }

             int tam = 0;
             string name="";
             for (int i = 1; i < 5; i++)
             {
                 if (ten[0] == ten[i]) tam++;
                 if (tam > 2&&ten[0]!=null) { name = ten[0]; break; } else name = "";
             }
                 */
            }
            return ten[0];
        }
Example #11
0
        public void FrameGrabber(object sender, EventArgs e)
        {
            _lastInfo = new List<HeadInformation>();

            CountOfFacesLabel.Text = "0";
            //label4.Text = "";
            NamePersons.Add("");

            //Get the current frame form capture device
            currentFrame = grabber.QueryFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);

            //Convert it to Grayscale
            gray = currentFrame.Convert<Gray, Byte>();

            //Face Detector
            MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(
              face,
              1.2,
              10,
              Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
              new Size(20, 20));

            //Action for each element detected
            foreach (MCvAvgComp f in facesDetected[0])
            {
                t = t + 1;
                result = currentFrame.Copy(f.rect).Convert<Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                //draw the face detected in the 0th (gray) channel with blue color
                currentFrame.Draw(f.rect, new Bgr(Color.Red), 2);

                if (trainingImages.ToArray().Length != 0)
                {
                    //TermCriteria for face recognition with numbers of trained images like maxIteration
                    MCvTermCriteria termCrit = new MCvTermCriteria(ContTrain, 0.001);

                    //Eigen face recognizer
                    EigenObjectRecognizer recognizer = new EigenObjectRecognizer(
                       trainingImages.ToArray(),
                       labels.ToArray(),
                       3000,
                       ref termCrit);

                    name = recognizer.Recognize(result);

                    //Draw the label for each face detected and recognized
                    currentFrame.Draw(name, ref font, new Point(f.rect.X - 2, f.rect.Y - 2), new Bgr(Color.Red));

                }

                NamePersons[t - 1] = name;
                NamePersons.Add("");

                //Set the number of faces detected on the scene
                CountOfFacesLabel.Text = facesDetected[0].Length.ToString();

                //Set the region of interest on the faces

                gray.ROI = f.rect;
                MCvAvgComp[][] eyesDetected = gray.DetectHaarCascade(
                   eye,
                   1.9,
                   5,
                   Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                   new Size(20, 20));
                gray.ROI = Rectangle.Empty;

                foreach (MCvAvgComp ey in eyesDetected[0])
                {
                    Rectangle eyeRect = ey.rect;
                    eyeRect.Inflate(-7, -7);
                    eyeRect.Offset(f.rect.X, f.rect.Y);
                    currentFrame.Draw(eyeRect, new Bgr(Color.Blue), 2);
                }

                //gray.ROI = f.rect;
                //MCvAvgComp[][] mouthDetected = gray.DetectHaarCascade(
                //   mouth,
                //   1.1,
                //   37,
                //   Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                //   new Size(20, 20));
                //gray.ROI = Rectangle.Empty;

                //foreach (MCvAvgComp ey in mouthDetected[0])
                //{
                //    Rectangle mouthRect = ey.rect;
                //    mouthRect.Offset(f.rect.X, f.rect.Y);
                //    currentFrame.Draw(mouthRect, new Bgr(Color.Black), 2);
                //}

                gray.ROI = f.rect;
                MCvAvgComp[][] smileDetected = gray.DetectHaarCascade(
                   smile,
                   2,
                   20,
                   Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                   new Size(20, 20));
                gray.ROI = Rectangle.Empty;

                HeadInformation hi = new HeadInformation();
                hi.IsSmile = false;

                foreach (MCvAvgComp ey in smileDetected[0])
                {
                    Rectangle smileRect = ey.rect;
                    smileRect.Offset(f.rect.X, f.rect.Y);
                    currentFrame.Draw(smileRect, new Bgr(Color.Black), 2);
                    currentFrame.Draw("smile", ref font, new Point(smileRect.X, smileRect.Y), new Bgr(Color.Red));
                    hi.IsSmile = true;
                }

                hi.Head = f.rect;
                if (eyesDetected[0] != null && eyesDetected[0].Length > 0)
                {
                    if (eyesDetected[0].Length == 1)
                    {
                        hi.Eye1 = eyesDetected[0][0].rect;
                    }
                    if (eyesDetected[0].Length == 2)
                    {
                        hi.Eye1 = eyesDetected[0][0].rect;
                        hi.Eye2 = eyesDetected[0][1].rect;
                    }
                }

                hi.Name = name;

                _lastInfo.Add(hi);
            }

            t = 0;

            //Names concatenation of persons recognized
            for (int nnn = 0; nnn < facesDetected[0].Length; nnn++)
            {
                names = names + NamePersons[nnn] + ", ";
            }
            //Show the faces procesed and recognized
            imageBoxFrameGrabber.Image = currentFrame.Resize(800, 600, INTER.CV_INTER_CUBIC);
            ListOfUserLabel.Text = names;
            names = "";
            //Clear the list(vector) of names
            NamePersons.Clear();
        }
Example #12
0
        /// <summary>
        /// Loads the traing data given a (string) folder location
        /// </summary>
        /// <param name="Folder_loacation"></param>
        /// <returns></returns>
        private bool LoadTrainingData(string Folder_loacation)
        {
            Names_List.Clear();
            trainingImages.Clear();

            //We should have 5 sets of trainig data
            for (int i = 0; i < 5; ++i)
            {
                int id = i + 1;
                for (int j = 0; j < 5; ++j)
                {
                    if (File.Exists(Folder_loacation + "\\hand_" + i + "_"+j+".png"))
                    {
                        //using index of 0 so let's increment i by one
                        Names_List.Add("" + id);
                        Console.WriteLine("Opening data " + i + " for training");
                        trainingImages.Add(new Image<Gray, byte>(Folder_loacation + "hand_" + i + "_"+j+".png"));

                    }
                    else return false;
                }
            }

            ContTrain = NumLabels;

            if (trainingImages.ToArray().Length != 0)
            {
                //Eigen face recognizer
                recognizer = new EigenObjectRecognizer(trainingImages.ToArray(),
                Names_List.ToArray(), 5000, ref termCrit); //5000 default
                return true;
            }
            else return false;
        }
        private void button_Click(object sender, RoutedEventArgs e)
        {
            OpenFileDialog openFileDialog = new OpenFileDialog();
            openFileDialog.ShowDialog();
              var filePath =   openFileDialog.FileName;
            Image<Bgr, Byte> image = new Image<Bgr, byte>(filePath); //Read the files as an 8-bit Bgr image
            List<System.Drawing.Rectangle> faces = new List<System.Drawing.Rectangle>();
            List<System.Drawing.Rectangle> eyes = new List<System.Drawing.Rectangle>();

            Detect(image, "haarcascade_frontalface_default.xml", "haarcascade_eye.xml", faces, eyes);

            foreach (System.Drawing.Rectangle face in faces)
                image.Draw(face, new Bgr(System.Drawing.Color.Red), 2);
            foreach (System.Drawing.Rectangle eye in eyes)
                image.Draw(eye, new Bgr(System.Drawing.Color.Blue), 2);

            ImageViewer.Show(image);
            File.WriteAllBytes("test.jpg", image.ToJpegData());

            Image<Gray, Byte> smileImage = new Image<Gray, byte>("happy.jpg"); //Read the files as an 8-bit Bgr image
            Image<Gray, Byte> sadImage = new Image<Gray, byte>("sad.jpg"); //Read the files as an 8-bit Bgr image

            List<Image<Gray, Byte>> trainingList = new List<Image<Gray, byte>>();
            trainingList.Add(smileImage);
            trainingList.Add(sadImage);

            List<string> labelList = new List<string>();
            labelList.Add("happy");
            labelList.Add("sad");
            // labelList.Add(2);

            MCvTermCriteria termCrit = new MCvTermCriteria(10, 0.001);

                //Eigen face recognizer
                EigenObjectRecognizer recognizer = new EigenObjectRecognizer(
                trainingList.ToArray(),
                labelList.ToArray(),
                5000,
                ref termCrit);

            Image<Gray, Byte> inputImage = new Image<Gray, byte>(filePath); //Read the files as an 8-bit Bgr image
            var resizedImage = inputImage.Resize(smileImage.Width, smileImage.Height, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);

            var name = recognizer.Recognize(resizedImage).Label;

            List<int> temp = new List<int>();
            temp.Add(1);
            temp.Add(2);

            EigenFaceRecognizer recogizer2 = new EigenFaceRecognizer(80, double.PositiveInfinity);
            recogizer2.Train(trainingList.ToArray(), temp.ToArray());
               var dd = recogizer2.Predict(resizedImage);

            ImageViewer.Show(resizedImage);
        }
Example #14
0
        /// <summary>
        /// load eigen value in EigenValueTags class
        /// </summary>
        /// <param name="eigenRec">
        /// A <see cref="EigenObjectRecognizer"/>
        /// </param>
        /// <returns>
        /// A <see cref="EigenValueTags"/>
        /// </returns>
        public EigenValueTags RecordEigenValue(EigenObjectRecognizer eigenRec)
        {
            //DELETEME
            //FaceClassifier.WriteEigenValueFile(eigenRec,"","eigenvalue");

            EigenValueTags eigenValueTags = new EigenValueTags();
            const int MAX_EIGEN_LENGTH = 30;
            int nums_train = eigenRec.Labels.Length;

            float[][] eigenMatrix = new float[nums_train][];

            int max_eigenvalueLength = Math.Min(MAX_EIGEN_LENGTH, 4 + nums_train/5);
            if(nums_train < 5)
                max_eigenvalueLength = nums_train;

            for(int i=0;i<nums_train;i++){

                Emgu.CV.Matrix<float> eigenValue = eigenRec.EigenValues[i];

                float[] temp = new float[max_eigenvalueLength];

                for(int k=0; k<max_eigenvalueLength; k++){
                    temp[k] = eigenValue.Data[k,0];
                }
                eigenValueTags.Add(new VTag(temp, eigenRec.Labels[i]));

            }
            Log.Debug("eigenVTags Length = "+ eigenValueTags.eigenTaglist.Count);
            return eigenValueTags;
        }
    /// <summary>
    /// Loads the traing data given a (string) folder location
    /// </summary>
    /// <param name="Folder_loacation"></param>
    /// <returns></returns>
    private bool LoadTrainingData(string Folder_loacation)
    {
        if (File.Exists(Folder_loacation +"\\TrainedLabels.xml"))
        {
            try
            {
                //message_bar.Text = "";
                Names_List.Clear();
                trainingImages.Clear();
                FileStream filestream = File.OpenRead(Folder_loacation + "\\TrainedLabels.xml");
                long filelength = filestream.Length;
                byte[] xmlBytes = new byte[filelength];
                filestream.Read(xmlBytes, 0, (int)filelength);
                filestream.Close();

                MemoryStream xmlStream = new MemoryStream(xmlBytes);

                using (XmlReader xmlreader = XmlTextReader.Create(xmlStream))
                {
                    while (xmlreader.Read())
                    {
                        if (xmlreader.IsStartElement())
                        {
                            switch (xmlreader.Name)
                            {
                                case "NAME":
                                    if (xmlreader.Read())
                                    {
                                        Names_List.Add(xmlreader.Value.Trim());
                                        NumLabels += 1;
                                    }
                                    break;
                                case "FILE":
                                    if (xmlreader.Read())
                                    {
                                        //PROBLEM HERE IF TRAININGG MOVED
                                        trainingImages.Add(new Image<Gray, byte>(Application.StartupPath + "\\TrainedFaces\\" + xmlreader.Value.Trim()));
                                    }
                                    break;
                            }
                        }
                    }
                }
                ContTrain = NumLabels;

                if (trainingImages.ToArray().Length != 0)
                {
                    //Eigen face recognizer
                    recognizer = new EigenObjectRecognizer(trainingImages.ToArray(),
                    Names_List.ToArray(), 5000, ref termCrit); //5000 default
                    return true;
                }
                else return false;
            }
            catch (Exception ex)
            {
                Error = ex.ToString();
                return false;
            }
        }
        else return false;
    }
        private void CompositionTarget_Rendering(object sender, EventArgs e)
        {
            _status.Fill = _rd;

            #region Recognition
            currentFrame = grabber.QueryFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
            gray = currentFrame.Convert<Gray, Byte>();

            var size = new System.Drawing.Size(20, 20);
            var window = new System.Drawing.Size(grabber.Width, grabber.Height);

            _rects = _faceClassifier.DetectMultiScale(gray, 1.2, 10, size, window);

            foreach (var f in _rects)
            {
                result = currentFrame.Copy(f).Convert<Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                _status.Fill = new SolidColorBrush(Colors.Green);
                currentFrame.Draw(f, new Bgr(System.Drawing.Color.Red), 2);

                //if we have already trained
                if (CommonData.TrainingImages.Count > 0)
                {
                    MCvTermCriteria termCrit = new MCvTermCriteria(ContTrain, 0.001);
                    //Eigen face recognizer
                    recognizer = new EigenObjectRecognizer(
                       CommonData.TrainingImages.ToArray(),
                       CommonData.Names.ToArray(),
                       3000,
                       ref termCrit);

                    string name = recognizer.Recognize(result);
                    currentFrame.Draw(name, ref font, new System.Drawing.Point(f.X - 2, f.Y - 2),
                        new Bgr(System.Drawing.Color.LightGreen));
                }

                //finally draw the source
                _imgCamera.Source = ImageHelper.ToBitmapSource(currentFrame);
            }
            #endregion
        }
        public void UpdateRecognizer()
        {
            //TermCriteria for face recognition with numbers of trained images like maxIteration
            MCvTermCriteria termCrit = new MCvTermCriteria(ContTrain, 0.001);

            //Eigen face recognizer
            recognizer = new EigenObjectRecognizer(
               trainingImages.ToArray(),
               labels.ToArray(),
               threashold,
               ref termCrit);
        }
 private bool LoadTrainingData()
 {
     mydb = new DBConn();
     allname = mydb.getAllImageID();
     string[] allname_st = allname.Select(x => x.ToString()).ToArray();
     trainingImages = mydb.getTrainedImageList();
     
     //trainingImages = mydb.getRawTrainedImageList();  
         if (mydb.getImageCount() > 0)
         {
             
             if (trainingImages.Length != 0)
             {
                 //set round and ...
                 //termCrit = new MCvTermCriteria(mydb.getImageCount(), 0.001);
                 termCrit = new MCvTermCriteria(5000, 0.0001);
                  //Eigen face recognizer
                 recognizer = new EigenObjectRecognizer(trainingImages, allname_st, maxRecognizeTreshold, ref termCrit);
                 
                 return true;
             }
             else
             {
                 return false;
             }                    
         }
         else
         {
             return false;
         }           
 }
 public void Dispose()
 {
     recognizer = null;
     trainingImages = null;
     allname = null;
     Error = null;
     GC.Collect();
 }
        void FrameGrabber(object sender, EventArgs e)
        {
            currentFrame = grabber.QueryFrame();
            if (currentFrame != null)
            {
                if (counter >= 1)
                {
                    label5.Text     = "Selected character found in frame";
                    labcounter.Text = ":" + counter + "times.";
                }

                //label3.Text = "0";
                //label4.Text = "";
                //NamePersons.Add("");
                try
                {
                    //Get the current frame form capture device
                    currentFrame = grabber.QueryFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                }
                catch (Exception ex)
                {
                    Application.Idle += new EventHandler(checkoutput);
                    MessageBox.Show("Video was Ended", "Frame Query Exit", MessageBoxButtons.OK, MessageBoxIcon.Information);
                }
                //Convert it to Grayscale
                gray = currentFrame.Convert <Gray, Byte>();

                //Face Detector
                MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(
                    face,
                    1.1,
                    10,
                    Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                    new Size(20, 20));
                pro = 0;

                //Action for each element detected
                foreach (MCvAvgComp f in facesDetected[0])
                {
                    t      = t + 1;
                    result = currentFrame.Copy(f.rect).Convert <Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                    //draw the face detected in the 0th (gray) channel with blue color
                    currentFrame.Draw(f.rect, new Bgr(Color.Red), 2);

                    imageBox2.Image = result;
                    if (FaceImgaes.ToArray().Length != 0)
                    {
                        //TermCriteria for face recognition with numbers of trained images like maxIteration
                        MCvTermCriteria termCrit = new MCvTermCriteria(ContTrain, 0.001);

                        //Eigen face recognizer

                        Emgu.CV.EigenObjectRecognizer recognizer = new Emgu.CV.EigenObjectRecognizer(
                            FaceImgaes.ToArray(),
                            labels.ToArray(),
                            2000,
                            ref termCrit);

                        name = recognizer.Recognize(result);
                        if (pro == 0)
                        {
                            if (name != "")
                            {
                                counter++;
                                currentFrame.Draw(f.rect, new Bgr(Color.Green), 2);
                                pro++;
                            }



                            //Draw the label for each face detected and recognized
                            currentFrame.Draw(name, ref font, new Point(f.rect.X - 2, f.rect.Y - 2), new Bgr(Color.LightGreen));
                        }
                    }
                    else
                    {
                        imageBox2.Image = currentFrame;
                    }



                    //Set the number of faces detected on the scene
                    label4.Text = facesDetected[0].Length.ToString();

                    /*
                     *
                     * //Set the region of interest on the faces
                     *
                     * gray.ROI = f.rect;
                     * MCvAvgComp[][] eyesDetected = gray.DetectHaarCascade(
                     * eye,
                     * 1.1,
                     * 10,
                     * Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                     * new Size(20, 20));
                     * gray.ROI = Rectangle.Empty;
                     *
                     * foreach (MCvAvgComp ey in eyesDetected[0])
                     * {
                     *  Rectangle eyeRect = ey.rect;
                     *  eyeRect.Offset(f.rect.X, f.rect.Y);
                     *  currentFrame.Draw(eyeRect, new Bgr(Color.Blue), 2);
                     * }
                     */
                }
                t = 0;



                //Show the faces procesed and recognized
                imageBoxFrameGrabber.Image = currentFrame;
                names = "";
                //Clear the list(vector) of names
                NamePersons.Clear();
            }
            else
            {
                btn_decrec.Enabled = true;
                label16.Text       = "";
            }
        }
Example #21
0
        internal string getFaceTag(Bitmap sourceBmp)
        {
            //Get the current frame form capture device
            currentFrame = new Image<Bgr, byte>(sourceBmp).Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_LINEAR);

            if (currentFrame != null)
            {
                gray_frame = currentFrame.Convert<Gray, Byte>();

                //Face Detector
                MCvAvgComp[][] facesDetected = gray_frame.DetectHaarCascade(
                    Face,
                    1.2,
                    1,
                    Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                    new System.Drawing.Size(20, 20));

                foreach (MCvAvgComp f in facesDetected[0])
                {
                    t = t + 1;
                    result = currentFrame.Copy(f.rect).Convert<Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                    //draw the face detected in the 0th (gray) channel with blue color
                    //currentFrame.Draw(f.rect, new Bgr(Color.Red), 2);

                    if (trainingImages.ToArray().Length != 0)
                    {
                        //TermCriteria for face recognition with numbers of trained images like maxIteration
                        MCvTermCriteria termCrit = new MCvTermCriteria(ContTrain, 0.001);

                        //Eigen face recognizer
                        EigenObjectRecognizer recognizer = new EigenObjectRecognizer(
                           trainingImages.ToArray(),
                           labels.ToArray(),
                           3000,
                           ref termCrit);

                        name = recognizer.Recognize(result) ;
                        if (!name.Equals("")&&name!=null)
                        {
                            return name;
                        }
                    }
                }
            }
            return "Sanmeet" ;
        }
Example #22
0
        public void TestEigenObjectRecognizer()
        {
            Image<Gray, Byte>[] images = new Image<Gray, byte>[20];
             for (int i = 0; i < images.Length; i++)
             {
            images[i] = new Image<Gray, byte>(200, 200);
            images[i].SetRandUniform(new MCvScalar(0), new MCvScalar(255));
             }
             MCvTermCriteria termCrit = new MCvTermCriteria(10, 1.0e-6);

             EigenObjectRecognizer rec = new EigenObjectRecognizer(images, ref termCrit);
             foreach (Image<Gray, Byte> img in images)
             {
            rec.Recognize(img);
            //Trace.WriteLine(rec.Recognize(img));
             }
        }
        void FrameGrabber(object sender, EventArgs e)
        {
            label3.Text = "0";
            //label4.Text = "";
            NamePersons.Add("");

            //mengambil queryFrame dari gambar
            DateTime StarTime = DateTime.Now;
            currentFrame = grabber.QueryFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);

                    //konversi
                    gray = currentFrame.Convert<Gray, Byte>();

                    //yang terdeteksi
                    MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(
                  face,
                  1.2,
                  10,
                  Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                  new Size(20, 20));

                    //Aksi untuk setiap elemen terdeteksi
                    foreach (MCvAvgComp f in facesDetected[0])
                    {
                        t = t + 1;
                        result = currentFrame.Copy(f.rect).Convert<Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                        currentFrame.Draw(f.rect, new Bgr(Color.Red), 2);

                        if (trainingImages.ToArray().Length != 0)
                        {
                        MCvTermCriteria termCrit = new MCvTermCriteria(ContTrain, 0.001);

                        EigenObjectRecognizer recognizer = new EigenObjectRecognizer(
                           trainingImages.ToArray(),
                           labels.ToArray(),
                           3000,
                           ref termCrit);

                        name = recognizer.Recognize(result);

                        currentFrame.Draw(name, ref font, new Point(f.rect.X - 2, f.rect.Y - 2), new Bgr(Color.LightGreen));

                        }

                            NamePersons[t-1] = name;
                            NamePersons.Add("");

                        //jumlah yang terdeteksi
                        label3.Text = facesDetected[0].Length.ToString();

                    }
                        t = 0;

                        //nama yang terdeteksi
                    for (int nnn = 0; nnn < facesDetected[0].Length; nnn++)
                    {
                        names = names + NamePersons[nnn] + ", ";
                    }
                    //tampilan pada imageboxframeGrabber
                    imageBoxFrameGrabber.Image = currentFrame;
                    DateTime endTime = DateTime.Now;
                    textBox2.Text = (endTime - StarTime).ToString();
                    label4.Text = names;
                    names = "";
                    NamePersons.Clear();
        }
Example #24
0
        void FrameGrabber(object sender, EventArgs e)
        {
            label3.Text = "0";
            //label4.Text = "";
            NamePersons.Add("");

            //Get the current frame form capture device
            currentFrame = grabber.QueryFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);

            //Convert it to Grayscale
            gray = currentFrame.Convert<Gray, Byte>();

            //Face Detector
            MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(
              face,
              1.2,
              10,
              Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
              new Size(20, 20));

            foreach (MCvAvgComp f in facesDetected[0])
            {
                t = t + 1;
                result = currentFrame.Copy(f.rect).Convert<Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                //draw the face detected in the 0th (gray) channel with blue color
                currentFrame.Draw(f.rect, new Bgr(Color.Red), 2);

                if (trainingImages.ToArray().Length != 0)
                {
                    //TermCriteria for face recognition with numbers of trained images like maxIteration
                    MCvTermCriteria termCrit = new MCvTermCriteria(ContTrain, 0.001);

                    //Eigen face recognizer
                    EigenObjectRecognizer recognizer = new EigenObjectRecognizer(
                       trainingImages.ToArray(),
                       labels.ToArray(),
                       3000,
                       ref termCrit);

                    name = recognizer.Recognize(result);

                    //Draw the label for each face detected and recognized
                    currentFrame.Draw(name, ref font, new Point(f.rect.X - 2, f.rect.Y - 2), new Bgr(Color.LightGreen));

                }

                NamePersons[t - 1] = name;
                NamePersons.Add("");

                //Set the number of faces detected on the scene
                label3.Text = facesDetected[0].Length.ToString();

            }
            t = 0;

            //Names concatenation of persons recognized
            for (int nnn = 0; nnn < facesDetected[0].Length; nnn++)
            {
                names = names + NamePersons[nnn] + ", ";
            }
            //Show the faces procesed and recognized
            imageBoxFrameGrabber.Image = currentFrame;
            label4.Text = names;

            names = "";
            //Clear the list(vector) of names
            NamePersons.Clear();
        }
 /// <summary>
 /// Dispose of Class call Garbage Collector
 /// </summary>
 public void Dispose()
 {
     recognizer = null;
     trainingImages = null;
     Names_List = null;
     Error = null;
     GC.Collect();
 }
Example #26
0
        public static void Train()
        {
            List < TrainingItem > items = TrainingItem.Load(trainingSetCSVFile);

            Image<Gray, Byte>[] trainingImages = new Image<Gray, Byte>[items.Count];
            String[] labels = new String[items.Count];

            for (int index = 0; index < items.Count; index++)
            {
                trainingImages[index] = new Image<Gray, byte>(trainingSetDir + @"\" + items[index].FileName);
                labels[index] = Path.GetFileNameWithoutExtension(items[index].Label);
            }

            MCvTermCriteria termCrit = new MCvTermCriteria(maxEigenFaces, 0.001);

            // Initialize Eigen Recognizer Object
            recognizer = new EigenObjectRecognizer(
                trainingImages,
                labels,
                eigenDistanceThreshold,
                ref termCrit);

            // Remove all old eigen images
            DirectoryInfo eigenDir = new DirectoryInfo(eigenImageDir);
            FileInfo[] eigenFiles = eigenDir.GetFiles();
            foreach (FileInfo file in eigenFiles)
                file.Delete();

            // Save the average Image
            recognizer.AverageImage.Save(averageImageName);

            // Save all eigen images
            for (int index = 0; index < recognizer.EigenImages.Length; index++)
            {
                String fileName = eigenImageName + (index + 1) + imageExt;
                recognizer.EigenImages[index].ToBitmap().Save(fileName);
            }

            // Print eigen values
            Console.WriteLine(String.Format("Num eigen values: {0}", recognizer.EigenValues.Length));
            Console.WriteLine(String.Format("Num rows in eigen values: {0}", recognizer.EigenValues[0].Rows));
            Console.WriteLine(String.Format("Num cols in eigen values: {0}", recognizer.EigenValues[0].Cols));
        }
Example #27
0
        public void TestEigenObjects()
        {
            String[] fileNames = new string[] { "stuff.jpg", "squares.gif", "lena.jpg" };

             int width = 100, height = 100;
             MCvTermCriteria termCrit = new MCvTermCriteria(3, 0.001);

             #region using batch method
             Image<Gray, Byte>[] imgs = Array.ConvertAll<String, Image<Gray, Byte>>(fileNames,
             delegate(String file)
             {
                return new Image<Gray, Byte>(file).Resize(width, height, CvEnum.INTER.CV_INTER_LINEAR);
             });

             EigenObjectRecognizer imgRecognizer1 = new EigenObjectRecognizer(imgs, ref termCrit);
             for (int i = 0; i < imgs.Length; i++)
             {
            Assert.AreEqual(i.ToString(), imgRecognizer1.Recognize(imgs[i]));
             }

             XmlDocument xDoc = Toolbox.XmlSerialize<EigenObjectRecognizer>(imgRecognizer1);
             EigenObjectRecognizer imgRecognizer2 = Toolbox.XmlDeserialize<EigenObjectRecognizer>(xDoc);

             for (int i = 0; i < imgs.Length; i++)
             {
            Assert.AreEqual(i.ToString(), imgRecognizer2.Recognize(imgs[i]));
             }

             System.Runtime.Serialization.Formatters.Binary.BinaryFormatter
             formatter = new System.Runtime.Serialization.Formatters.Binary.BinaryFormatter();

             Byte[] bytes;
             using (MemoryStream ms = new MemoryStream())
             {
            formatter.Serialize(ms, imgRecognizer1);
            bytes = ms.GetBuffer();
             }
             using (MemoryStream ms2 = new MemoryStream(bytes))
             {
            EigenObjectRecognizer imgRecognizer3 = (EigenObjectRecognizer)formatter.Deserialize(ms2);
            for (int i = 0; i < imgs.Length; i++)
            {
               Assert.AreEqual(i.ToString(), imgRecognizer3.Recognize(imgs[i]));
            }
             }
             #endregion
        }
        public User RecognizeFace(Image<Gray, byte> face)
        {
            using (var context = new FaceRecognitionContext())
            {
                var faces = new List<Image<Gray, byte>>();
                var ids = new List<string>();

                foreach (var user in context.Users)
                {
                    var reconogizedFace = new Image<Gray, byte>(user.Face.GetBitmap());
                    var id = user.Id.ToString(CultureInfo.InvariantCulture);

                    faces.Add(reconogizedFace);
                    ids.Add(id);
                }

                if (ids.Any())
                {
                    var termCrit = new MCvTermCriteria(ids.Count(), 0.001);
                    var recognizedFaces = new EigenObjectRecognizer(faces.ToArray(), ids.ToArray(), 2500, ref termCrit);

                    var label = recognizedFaces.Recognize(face);

                    if (!String.IsNullOrEmpty(label))
                    {
                        var id = int.Parse(label);
                        return context.Users.SingleOrDefault(x => x.Id == id);        
                    }

                    
                }

                return null;
            }
        }
Example #29
0
        /// <summary>
        /// Process PCA and save a serialized recognizer in specified savepath
        /// </summary>
        /// <param name="faces">
        /// A <see cref="List<FaceTag>"/>
        /// </param>
        /// <param name="savepath">
        /// A <see cref="System.String"/>
        /// </param>
        /// <param name="filename">
        /// A <see cref="System.String"/>
        /// </param>
        //private void ProcessPCA(List<FaceTag> faces){
        public EigenValueTags ProcessPCA(Face[] faces)
        {
            Log.Debug("ProcessPCA Started...");
            int MINFACES = 3;
            int numsFaces = faces.Length;

            List<Image<Gray, Byte>> train_imagesList = new List<Image<Gray, byte>>();
            List<string> train_labelsList = new List<string>();

            // load faces from detected data
            List<int> banList = new List<int>();

            // filter too small number of faces
            for(int i=0;i<faces.Length;i++){
                uint cnt = 0;
                for(int j=0;j<faces.Length;j++){
                    if(i==j || faces[i].Tag.Name.Equals(faces[j].Tag.Name)){
                        cnt++;
                    }
                }
                if(cnt < MINFACES)
                    banList.Add(i);
            }

            for(int k=0;k<faces.Length;k++){
                if(faces[k].Tag == null || banList.Contains(k)){
                    //Log.Debug("null Tag :: id = {0}, name = {0}",faces[k].Id,faces[k].Name);
                    continue;
                }

                train_labelsList.Add(faces[k].Tag.Name);
                train_imagesList.Add(ImageTypeConverter.ConvertPixbufToGrayCVImage(faces[k].iconPixbuf));
            }

            //FIXME
            for(int k=0; k<train_imagesList.Count;k++){
                train_imagesList[k] = train_imagesList[k].Resize(100,100);
            }

            string[] train_labels = train_labelsList.ToArray();
            Image<Gray, Byte>[] train_images = train_imagesList.ToArray();

            MCvTermCriteria crit = new MCvTermCriteria(0.0001);
            EigenObjectRecognizer eigenRec = new EigenObjectRecognizer(train_images,train_labels,5000,ref crit);

            string path = Path.Combine (FSpot.Global.BaseDirectory, "eigen.dat");
            // Serialize
            SerializeUtil.Serialize(path, eigenRec);

            // save recognized data into file of eigen value and into EigenValueTags class
            EigenValueTags eigenVtags = RecordEigenValue(eigenRec);

            Log.Debug("ProcessPCA ended...");

            return eigenVtags;
        }
Example #30
0
        void FrameGrabber(object sender, EventArgs e)
        {
            label3.Text = "0";
            //label4.Text = "";
            NamePersons.Add("");

            //Get the current frame form capture device
            currentFrame = grabber.QueryFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);

                    //Convert it to Grayscale
                    gray = currentFrame.Convert<Gray, Byte>();

                    //Face Detector
                    MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(
                  face,
                  1.2,
                  10,
                  Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                  new Size(20, 20));

                    //Action for each element detected
                    foreach (MCvAvgComp f in facesDetected[0])
                    {
                        t = t + 1;
                        result = currentFrame.Copy(f.rect).Convert<Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                        //draw the face detected in the 0th (gray) channel with blue color
                        currentFrame.Draw(f.rect, new Bgr(Color.Red), 2);

                        if (trainingImages.ToArray().Length != 0)
                        {
                            //TermCriteria for face recognition with numbers of trained images like maxIteration
                        MCvTermCriteria termCrit = new MCvTermCriteria(ContTrain, 0.001);

                        //Eigen face recognizer
                        EigenObjectRecognizer recognizer = new EigenObjectRecognizer(
                           trainingImages.ToArray(),
                           labels.ToArray(),
                           3000,
                           ref termCrit);

                        name = recognizer.Recognize(result);
                        if (!emailSent)
                        {
                            emailSent = true;
                            System.Diagnostics.Debug.WriteLine("sending Email");
                            email_send(currentFrame);
                        }
            //************* THIS WORKS!! *************************//
                        if (name == "" && intruderFrameCount < 20 && sentSMS == false)
                        {
                            intruderFrameCount++;
                        }
                        else if (intruderFrameCount == 20 && sentSMS == false)
                        {
                            sentSMS = true;
                            System.Diagnostics.Debug.WriteLine("sending Email");
                            email_send(currentFrame);
              //                          var request = (HttpWebRequest)WebRequest.Create("http://gb4.site40.net/HeardFood/sendSms.php?body=intruder&to=4105043967");
              //                          var response = (HttpWebResponse)request.GetResponse();
                        }
            //                        intruderFrameCount = name == "" ? intruderFrameCount + 1 : 0;
                        if (f.rect.X > 180 || f.rect.X < 80)
                        {
                            currentFrame.Draw(name + " is distracted", ref font, new Point(f.rect.X - 2, f.rect.Y - 2), new Bgr(Color.Red));
                        }
                        else
                        {

                            currentFrame.Draw(name + " is alert", ref font, new Point(f.rect.X - 2, f.rect.Y - 2), new Bgr(Color.Cyan));
                            //Draw the label for each face detected and recognized

                        }
                        }

                            NamePersons[t-1] = name;
                            NamePersons.Add("");

                        //Set the number of faces detected on the scene
                        label3.Text = facesDetected[0].Length.ToString();

                        /*
                        //Set the region of interest on the faces

                        gray.ROI = f.rect;
                        MCvAvgComp[][] eyesDetected = gray.DetectHaarCascade(
                           eye,
                           1.1,
                           10,
                           Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                           new Size(20, 20));
                        gray.ROI = Rectangle.Empty;

                        foreach (MCvAvgComp ey in eyesDetected[0])
                        {
                            Rectangle eyeRect = ey.rect;
                            eyeRect.Offset(f.rect.X, f.rect.Y);
                            currentFrame.Draw(eyeRect, new Bgr(Color.Blue), 2);
                        }
                         */

                    }
                        t = 0;

                        //Names concatenation of persons recognized
                    for (int nnn = 0; nnn < facesDetected[0].Length; nnn++)
                    {
                        names = names + NamePersons[nnn] + ", ";
                    }
                    //Show the faces procesed and recognized
                    imageBoxFrameGrabber.Image = currentFrame;
                    label4.Text = names;
                    names = "";
                    //Clear the list(vector) of names
                    NamePersons.Clear();
        }
Example #31
-1
        /// <summary>
        /// Given savepath and filename, create a csv file containing set of eigen values.
        /// The csv is formatted according to WEKA classifer.
        /// </summary>
        /// <param name="eigenRec">
        /// A <see cref="EigenObjectRecognizer"/>
        /// </param>
        /// <param name="savepath">
        /// A <see cref="System.String"/>
        /// </param>
        /// <param name="filename">
        /// A <see cref="System.String"/>
        /// </param>
        public static void WriteEigenValueFile(EigenObjectRecognizer eigenRec, string savepath, string filename)
        {
            // don't store eigen value more than this number
            const int MAX_EIGEN_LENGTH = 30;

            int nums_train = eigenRec.Labels.Length;

            float[] eigenvalue_float = new float[nums_train];
            float[][] eigenMatrix = new float[nums_train][];

            TextWriter tw = new StreamWriter(savepath+filename+".csv");

            int max_eigenvalueLength = Math.Min(MAX_EIGEN_LENGTH, 4 + nums_train/5);
            if(nums_train < 5)
                max_eigenvalueLength = nums_train;

            // write header
            for(int i=0;i<max_eigenvalueLength;i++){
                tw.Write("a"+i+",");
            }
            tw.WriteLine("class");

            for(int i=0;i<nums_train;i++){
                Emgu.CV.Matrix<float> eigenValue = eigenRec.EigenValues[i];

                for(int k=0; k<max_eigenvalueLength; k++)
                    tw.Write(eigenValue.Data[k,0]+",");

                tw.WriteLine(eigenRec.Labels[i]);
            }
            tw.Close();
        }