Fisher face recognizer
상속: FaceRecognizer
        private bool LoadTrainingData()
        {
            mydb = new DBConn();
            imagelabel = mydb.getLabelNumList().ToArray();
            imageStringlabel = mydb.getLabelList().ToArray();
            trainingImages = mydb.getTrainedImageList();
            Itrainingimage = trainingImages;
            if (mydb.getImageCount() > 0)
            {

                if (trainingImages.Length != 0)
                {
                    f_recognize = new FisherFaceRecognizer(0, 123.0);
                    f_recognize.Train(Itrainingimage, imagelabel);
                    return true;
                }
                else
                {
                    return false;
                }
            }
            else
            {
                return false;
            }      
            
            
        }
예제 #2
0
        static void train(string trainingSheetPath, string outMatrixPath)
        {
            try
            {
                string genderFilesdirectory = Path.GetDirectoryName(trainingSheetPath);
                int imageSize = 64;
                using (StreamReader sr = new StreamReader(trainingSheetPath))
                {
                    string line = sr.ReadLine();
                    while (line != null)
                    {
                        string[] data = line.Split(';');
                        string filePath = genderFilesdirectory + data[0];
                        imageList.Add(new Image<Gray, byte>(filePath).Resize(imageSize, imageSize, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC));
                        labelList.Add(int.Parse(data[1]));
                        line = sr.ReadLine();
                    }
                }

                faceRecognizer = new FisherFaceRecognizer(imageList.Count, imageList.Count + 75);
                faceRecognizer.Train(imageList.ToArray(), labelList.ToArray());
                faceRecognizer.Save(outMatrixPath);

            }
            catch (Exception ex)
            {
                throw ex;
            }
        }
        public override int FaceRecognition()
        {
            FaceDetect(this.userImage);

            foreach (CImage tempImage in lDatabaseImages)
            {
                FaceDetect(tempImage);
            }

            int ContTrain = lDatabaseImages.Count;  // Количество изображений для тренировки.
            //Image<Gray, byte> result;
            foreach (MCvAvgComp f in aDetectedFaces[0])
            {
                if (this.lDatabaseImages.ToArray().Length != 0)
                {
                    MCvTermCriteria termCrit = new MCvTermCriteria(ContTrain, 0.0001);
                    FisherFaceRecognizer modelRecognition = new FisherFaceRecognizer(0, 3500);

                    Image<Gray, Byte>[] im = this.lDatabaseImages.Select(w => w.grayImage).ToArray();

                    modelRecognition.Train(this.lDatabaseImages.Select(w => w.grayImage).ToArray(), lIntDatabaseImagesLabels.ToArray());
                    FaceRecognizer.PredictionResult resultRecognition = new FaceRecognizer.PredictionResult();
                    resultRecognition = modelRecognition.Predict(userImage.grayImage);
                    // Зависимость степени похожести от разности изображений приблизительно имет вид
                    // 10 -> 99%; 100 -> 90%; 750 -> 50%, 10000 -> 1%
                    // тогда необходимо ввести шкалу (например, линейную):
                    // Алгоритм расчёта степени похожести.
                    float threshold = 750;              // пороговое значение, равное 50% похожести изображений (установленно экспериментально)
                    float thresholdMismatch = 10000;    // пороговое значение несовпадения изоюражений (равное 1%, установлено экспериментально)
                    if (resultRecognition.Distance < threshold)
                        similarityDegree = (100 - (resultRecognition.Distance * 50.0 / threshold)).ToString();
                    else
                        similarityDegree = (50 - (resultRecognition.Distance * 50 / thresholdMismatch)).ToString();
                }
            }

            return 0;
        }
예제 #4
0
        private void faceAnalysis()
        {
            //Data for face emotion when data base is not ready.
            Image<Gray, byte>[] imageList = new Image<Gray, Byte>[70];
            imageList[0] = new Image<Gray, byte>(@"..\..\emotionData\netural1.jpg");
            imageList[1] = new Image<Gray, byte>(@"..\..\emotionData\sad1.jpg");
            imageList[2] = new Image<Gray, byte>(@"..\..\emotionData\angry1.jpg");
            imageList[3] = new Image<Gray, byte>(@"..\..\emotionData\happy1.jpg");
            imageList[4] = new Image<Gray, byte>(@"..\..\emotionData\happy2.jpg");
            imageList[5] = new Image<Gray, byte>(@"..\..\emotionData\happy3.jpg");
            imageList[6] = new Image<Gray, byte>(@"..\..\emotionData\happy4.jpg");
            imageList[7] = new Image<Gray, byte>(@"..\..\emotionData\happy5.jpg");
            imageList[8] = new Image<Gray, byte>(@"..\..\emotionData\happy6.jpg");
            imageList[9] = new Image<Gray, byte>(@"..\..\emotionData\happy7.jpg");
            imageList[10] = new Image<Gray, byte>(@"..\..\emotionData\375.jpg");
            imageList[11] = new Image<Gray, byte>(@"..\..\emotionData\376.jpg");
            imageList[12] = new Image<Gray, byte>(@"..\..\emotionData\377.jpg");
            imageList[13] = new Image<Gray, byte>(@"..\..\emotionData\378.jpg");
            imageList[14] = new Image<Gray, byte>(@"..\..\emotionData\379.jpg");
            imageList[15] = new Image<Gray, byte>(@"..\..\emotionData\380.jpg");
            imageList[16] = new Image<Gray, byte>(@"..\..\emotionData\381.jpg");
            imageList[17] = new Image<Gray, byte>(@"..\..\emotionData\382.jpg");
            imageList[18] = new Image<Gray, byte>(@"..\..\emotionData\383.jpg");
            imageList[19] = new Image<Gray, byte>(@"..\..\emotionData\384.jpg");
            imageList[20] = new Image<Gray, byte>(@"..\..\emotionData\385.jpg");
            imageList[21] = new Image<Gray, byte>(@"..\..\emotionData\386.jpg");
            imageList[22] = new Image<Gray, byte>(@"..\..\emotionData\387.jpg");
            imageList[23] = new Image<Gray, byte>(@"..\..\emotionData\388.jpg");
            imageList[24] = new Image<Gray, byte>(@"..\..\emotionData\389.jpg");
            imageList[25] = new Image<Gray, byte>(@"..\..\emotionData\390.jpg");
            imageList[26] = new Image<Gray, byte>(@"..\..\emotionData\391.jpg");
            imageList[27] = new Image<Gray, byte>(@"..\..\emotionData\392.jpg");
            imageList[28] = new Image<Gray, byte>(@"..\..\emotionData\393.jpg");
            imageList[29] = new Image<Gray, byte>(@"..\..\emotionData\394.jpg");
            imageList[30] = new Image<Gray, byte>(@"..\..\emotionData\395.jpg");
            imageList[31] = new Image<Gray, byte>(@"..\..\emotionData\396.jpg");
            imageList[32] = new Image<Gray, byte>(@"..\..\emotionData\545.jpg");
            imageList[33] = new Image<Gray, byte>(@"..\..\emotionData\546.jpg");
            imageList[34] = new Image<Gray, byte>(@"..\..\emotionData\547.jpg");
            imageList[35] = new Image<Gray, byte>(@"..\..\emotionData\548.jpg");
            imageList[36] = new Image<Gray, byte>(@"..\..\emotionData\579.jpg");
            imageList[37] = new Image<Gray, byte>(@"..\..\emotionData\580.jpg");
            imageList[38] = new Image<Gray, byte>(@"..\..\emotionData\581.jpg");
            imageList[39] = new Image<Gray, byte>(@"..\..\emotionData\582.jpg");
            imageList[40] = new Image<Gray, byte>(@"..\..\emotionData\583.jpg");
            imageList[41] = new Image<Gray, byte>(@"..\..\emotionData\584.jpg");
            imageList[42] = new Image<Gray, byte>(@"..\..\emotionData\585.jpg");
            imageList[43] = new Image<Gray, byte>(@"..\..\emotionData\586.jpg");
            imageList[44] = new Image<Gray, byte>(@"..\..\emotionData\587.jpg");
            imageList[45] = new Image<Gray, byte>(@"..\..\emotionData\588.jpg");
            imageList[46] = new Image<Gray, byte>(@"..\..\emotionData\589.jpg");
            imageList[47] = new Image<Gray, byte>(@"..\..\emotionData\590.jpg");
            imageList[48] = new Image<Gray, byte>(@"..\..\emotionData\557.jpg");
            imageList[49] = new Image<Gray, byte>(@"..\..\emotionData\558.jpg");
            imageList[50] = new Image<Gray, byte>(@"..\..\emotionData\559.jpg");
            imageList[51] = new Image<Gray, byte>(@"..\..\emotionData\560.jpg");
            imageList[52] = new Image<Gray, byte>(@"..\..\emotionData\561.jpg");
            imageList[53] = new Image<Gray, byte>(@"..\..\emotionData\562.jpg");
            imageList[54] = new Image<Gray, byte>(@"..\..\emotionData\563.jpg");
            imageList[55] = new Image<Gray, byte>(@"..\..\emotionData\564.jpg");
            imageList[56] = new Image<Gray, byte>(@"..\..\emotionData\565.jpg");
            imageList[57] = new Image<Gray, byte>(@"..\..\emotionData\566.jpg");
            imageList[58] = new Image<Gray, byte>(@"..\..\emotionData\567.jpg");
            imageList[59] = new Image<Gray, byte>(@"..\..\emotionData\568.jpg");
            imageList[60] = new Image<Gray, byte>(@"..\..\emotionData\569.jpg");
            imageList[61] = new Image<Gray, byte>(@"..\..\emotionData\570.jpg");
            imageList[62] = new Image<Gray, byte>(@"..\..\emotionData\571.jpg");
            imageList[63] = new Image<Gray, byte>(@"..\..\emotionData\572.jpg");
            imageList[64] = new Image<Gray, byte>(@"..\..\emotionData\573.jpg");
            imageList[65] = new Image<Gray, byte>(@"..\..\emotionData\574.jpg");
            imageList[66] = new Image<Gray, byte>(@"..\..\emotionData\575.jpg");
            imageList[67] = new Image<Gray, byte>(@"..\..\emotionData\576.jpg");
            imageList[68] = new Image<Gray, byte>(@"..\..\emotionData\577.jpg");
            imageList[69] = new Image<Gray, byte>(@"..\..\emotionData\578.jpg");

            String[] emoList = new String[70];
            emoList[0] = "Netural";
            emoList[1] = "Sad";
            emoList[2] = "Angry";
            emoList[3] = "Smile";
            emoList[4] = "Smile";
            emoList[5] = "Smile";
            emoList[6] = "Smile";
            emoList[7] = "Smile";
            emoList[8] = "Smile";
            emoList[9] = "Smile";
            emoList[10] = "Angry";
            emoList[11] = "Angry";
            emoList[12] = "Angry";
            emoList[13] = "Angry";
            emoList[14] = "Angry";
            emoList[15] = "Angry";
            emoList[16] = "Angry";
            emoList[17] = "Angry";
            emoList[18] = "Angry";
            emoList[19] = "Angry";
            emoList[20] = "Angry";
            emoList[21] = "Angry";
            emoList[22] = "Angry";
            emoList[23] = "Angry";
            emoList[24] = "Angry";
            emoList[25] = "Angry";
            emoList[26] = "Angry";
            emoList[27] = "Angry";
            emoList[28] = "Angry";
            emoList[29] = "Angry";
            emoList[30] = "Angry";
            emoList[31] = "Angry";
            emoList[32] = "Angry";
            emoList[33] = "Angry";
            emoList[34] = "Angry";
            emoList[35] = "Angry";
            emoList[36] = "Angry";
            emoList[37] = "Angry";
            emoList[38] = "Angry";
            emoList[39] = "Angry";
            emoList[40] = "Angry";
            emoList[41] = "Angry";
            emoList[42] = "Angry";
            emoList[43] = "Angry";
            emoList[44] = "Angry";
            emoList[45] = "Angry";
            emoList[46] = "Angry";
            emoList[47] = "Angry";
            emoList[48] = "Angry";
            emoList[49] = "Angry";
            emoList[50] = "Angry";
            emoList[51] = "Angry";
            emoList[52] = "Angry";
            emoList[53] = "Angry";
            emoList[54] = "Angry";
            emoList[55] = "Angry";
            emoList[56] = "Angry";
            emoList[57] = "Angry";
            emoList[58] = "Angry";
            emoList[59] = "Angry";
            emoList[60] = "Angry";
            emoList[61] = "Angry";
            emoList[62] = "Angry";
            emoList[63] = "Angry";
            emoList[64] = "Angry";
            emoList[65] = "Angry";
            emoList[66] = "Angry";
            emoList[67] = "Angry";
            emoList[68] = "Angry";
            emoList[69] = "Angry";

            int[] label = new int[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69 };
            //Use the variable to detect the number of times these emotion appear in the video by frames;
            int smile = 0;
            int angry = 0;
            int netural = 0;
            int sad = 0;
            int noDetect = 0;
            totalFrame = grayFrameList.Count;
            List<Image<Gray, byte>> trainfaceList = new List<Image<Gray, byte>>();

            Rectangle[] trainFace;
            for (int i = 0; i < imageList.Length; i++)
            {
                trainFace = faceHaar.DetectMultiScale(imageList[i], 1.1, 20, new Size(20, 20), Size.Empty);
                foreach (Rectangle f in trainFace)
                {
                    trainfaceList.Add(imageList[i].Copy(f).Resize(64, 64, INTER.CV_INTER_CUBIC));
                }
            }

            //Detect the face in the frames and do emotion detect using fisher eigen face recognizer
            double maxDistance = 1000; //-> Range from 1000 to 5000

            FisherFaceRecognizer fisher = new FisherFaceRecognizer(imageList.Length, maxDistance);
            fisher.Train(trainfaceList.ToArray(), label);
            FaceRecognizer.PredictionResult result = new FaceRecognizer.PredictionResult();

            for (int i = 0; i < totalFrame; i++)
            {
                result = fisher.Predict(grayFrameList.ElementAt(i));
                lbmsg.Text = totalFrame.ToString();
                int num = result.Label;

                if (num == -1)
                {
                    faceEmotions.Add("No Detect");
                }
                else
                {
                    faceEmotions.Add(emoList[num].ToString());
                }

                if (num == -1)
                {
                    noDetect++;
                }
                else if (emoList[num].Equals("Smile"))
                {
                    smile++;
                }
                else if (emoList[num] == "Sad")
                {
                    sad++;
                }
                else if (emoList[num] == "Angry")
                {
                    angry++;
                }
                else if (emoList[num] == "Netural")
                {
                    netural++;
                }
                else
                {
                    // it should not come here .
                }
            }

            if (angry > smile && angry > sad && angry > netural)
            {
                lbEmotionConclusion2.Text = "There is a risk of child abuse";
            }

            //get total sum of the 5 emotion data.
            int total = smile + sad + angry + netural + noDetect;
            //put them into the percentage (emotion/frames * 100)
            double smilePer = (Convert.ToDouble(smile) / totalFrame) * 100;
            double sadPer = (Convert.ToDouble(sad) / totalFrame) * 100;
            double angryPer = (Convert.ToDouble(angry) / totalFrame) * 100;
            double neturalPer = (Convert.ToDouble(netural) / totalFrame) * 100;
            double noDetectPer = (Convert.ToDouble(noDetect) / totalFrame) * 100;

            lbmsg.Text = "Smile Count:" + smile.ToString() + " ,Sad Count:" + sad.ToString() + " ,Angry Count:" + angry.ToString() + " ,Netural Count:" + netural.ToString() + " ,No Detect Count:" + noDetect.ToString();
            String[] emotionList = { "Smile", "Sad", "Angry", "Netural", "No Detect" };
            double[] emotionPer = { smilePer, sadPer, angryPer, neturalPer, noDetectPer };

            //Designing the Pie Chart
            emotionChart.Series.Add("Emotion");
            emotionChart.Series["Emotion"].ChartType = System.Windows.Forms.DataVisualization.Charting.SeriesChartType.Pie; // set the chart to pie
            emotionChart.Series["Emotion"].ChartArea = "ChartArea1";

            //Add data to pie chart
            for (int i = 0; i < emotionList.Count(); i++)
            {
                emotionChart.Series["Emotion"].Points.AddXY(emotionList[i], emotionPer[i]);
            }

            emotionChart.Series["Emotion"].BorderWidth = 1;
            emotionChart.Series["Emotion"].BorderColor = System.Drawing.Color.FromArgb(26, 59, 105);
            emotionChart.Series["Emotion"].Points[0].Color = System.Drawing.Color.LightGreen;
            emotionChart.Series["Emotion"].Points[1].Color = System.Drawing.Color.Blue;
            emotionChart.Series["Emotion"].Points[2].Color = System.Drawing.Color.Red;
            emotionChart.Series["Emotion"].Points[3].Color = System.Drawing.Color.Gray;
            emotionChart.Series["Emotion"].Points[4].Color = System.Drawing.Color.Black;

            emotionChart.Titles.Add("Anaylsis of Face Expression (Frame By Frame)");
            emotionChart.Series["Emotion"]["PieLabelStyle"] = "Disabled";
            //chart1.Series["Emotion"].Label = "#VALX";

            emotionChart.Legends.Add("Legend1");
            emotionChart.Legends["Legend1"].Enabled = true;
            emotionChart.Legends["Legend1"].Docking = Docking.Bottom;
            emotionChart.Legends["Legend1"].Alignment = System.Drawing.StringAlignment.Center;
            emotionChart.Series["Emotion"].LegendText = "#VALX (#PERCENT{P2}) ";
            emotionChart.DataManipulator.Sort(PointSortOrder.Ascending, emotionChart.Series["Emotion"]);
        }
 public void Dispose()
 {
     f_recognize = null;
     trainingImages = null;
     imagelabel = null;
     imageStringlabel = null;
     GC.Collect();
 }  
예제 #6
0
        public string saveEmployee(Image newImage, string name, string middleName, string lastName, string email, FaceRecognizerMethode faceRecognizerMethode)
        {
            unitOfWork = new GenericUnitOfWork();
            GenericRepository<Employee> employeeRepo = unitOfWork.GetRepoInstance<Employee>();

            Employee employee = null;
            try
            {

                employee = (employeeRepo.GetAllRecords().Where<Employee>(e => e.email == email)).First<Employee>();
            }
            catch
            {
                Debug.WriteLine("Nuevo usuario");
            }

            //Add Employee if not exist. The email is unique
            if (employee == null)
            {
                employee = new Employee { name = name, middleName = middleName, lastName = lastName, email = email };
                employeeRepo.Add(employee);
                unitOfWork.SaveChanges();
            }

            //I save the image with a guid as a name
            GenericRepository<DistanceResult> distanceResultRepo = unitOfWork.GetRepoInstance<DistanceResult>();
            Guid guid = Guid.NewGuid();

            var inputImage = new Image<Bgr, Byte>(new Bitmap(newImage));
            Rectangle[] rectangleFace = detection(inputImage, pathXMLHaarcascade);

            //The function detection(..) can extract N faces

            if (rectangleFace.Length <= 0)
            {
                return Result.NoDetected.ToString();
            }
            else if (rectangleFace.Length > 1)
            {
                return Result.MultipleFacesDetected.ToString();
            }
            else
            {
                Image<Gray, byte> grayFrame = toGrayEqualizeFrame(inputImage);

                Image<Gray, Byte> faceEMGUCV = formatRectangleFaces(grayFrame.ToBitmap(), rectangleFace[0]);

                faceEMGUCV._EqualizeHist();

                faceEMGUCV.Save(pathImg + @"\" + guid.ToString()+".Jpeg");

                FaceRecognizer faceRecognition;

                switch (faceRecognizerMethode.ToString())
                {
                    case "EigenFaceRecognizerMethode": faceRecognition = new EigenFaceRecognizer(numComponentsEigen, thresholdEigen); //try catch aca
                        break;
                    case "FisherFaceRecognizerMethode": faceRecognition = new FisherFaceRecognizer(numComponentsFisher, thresholdFisher);
                        break;
                    case "LBPHFaceRecognizerMethode": faceRecognition = new LBPHFaceRecognizer(radiusLBPH, neighborsLBPH, gridXLBPH, gridYLBPH, thresholdLBPH);
                        break;
                    default: return null;
                };

                double distance = 2;

                //Save register
                DistanceResult dist = new DistanceResult();
                dist.algorithm = RecognizeBLL.FaceRecognizerMethode.FisherFaceRecognizerMethode.ToString();
                dist.employeeId = employee.employeeId;
                dist.photoName = guid.ToString();
                dist.distance = distance;
                distanceResultRepo.Add(dist);

                unitOfWork.SaveChanges();

                int lengthArrays = distanceResultRepo.GetAllRecords().Count();
                imagesDB = new Image<Gray, Byte>[lengthArrays];
                labels = new int[lengthArrays];
                int i = 0;
                foreach (DistanceResult di in distanceResultRepo.GetAllRecords())
                {
                    //This is to recalculate the faceRecognition and save it, but I think is not necesari declare imageDB and labels as global
                    imagesDB[i] = new Image<Gray, Byte>(pathImg + @"\" + di.photoName + ".Jpeg");
                    labels[i] = di.employeeId;
                    i++;
                }

                if (employeeRepo.GetAllRecords().Count() > 1)
                {
                    faceRecognition.Train(imagesDB, labels);
                    faceRecognition.Save(pathImg + @"\" + "TrainingSet");
                }
                return Result.Saved.ToString();
            }
            //return Result.Error.ToString();
        }
예제 #7
0
        public EmployeeStructure[] recognizeMultipleFaces(Image newImage, FaceRecognizerMethode faceRecognizerMethode)
        {
            var inputImage = new Image<Bgr, Byte>(new Bitmap(newImage));
            Rectangle[] rectangleFace = detection(inputImage, this.pathXMLHaarcascade);
            EmployeeStructure[] employeeStructure;

            if (rectangleFace.Length <= 0)
            {
                employeeStructure = new EmployeeStructure[0];
                employeeStructure[0].result = Result.NoDetected.ToString();
                return employeeStructure;
            }
            else
            {

                Image<Gray, byte> grayFrame = toGrayEqualizeFrame(inputImage);
                employeeStructure = new EmployeeStructure[rectangleFace.Length];

                FaceRecognizer faceRecognition;

                switch (faceRecognizerMethode.ToString())
                {
                    case "EigenFaceRecognizerMethode": faceRecognition = new EigenFaceRecognizer(numComponentsEigen, thresholdEigen); //try catch aca
                        break;
                    case "FisherFaceRecognizerMethode": faceRecognition = new FisherFaceRecognizer(numComponentsFisher, thresholdFisher);
                        break;
                    case "LBPHFaceRecognizerMethode": faceRecognition = new LBPHFaceRecognizer(radiusLBPH, neighborsLBPH, gridXLBPH, gridYLBPH, thresholdLBPH);
                        break;
                    default: faceRecognition = new EigenFaceRecognizer(numComponentsEigen, thresholdEigen);
                        break;
                };

                faceRecognition.Load(pathImg + @"\" + "TrainingSet");

                Parallel.For(0, rectangleFace.Length, i =>
                {
                    Image<Gray, byte> faceEMGUCV = formatRectangleFaces(grayFrame.ToBitmap(), rectangleFace[i]);

                    FaceRecognizer.PredictionResult ER = faceRecognition.Predict(faceEMGUCV);

                    if (ER.Label != -1 /*&& ER.Distance > thresholdEigen*/)
                    {
                        int label = ER.Label;

                        GenericRepository<Employee> emplyeeRepo = unitOfWork.GetRepoInstance<Employee>();
                        Employee em = emplyeeRepo.GetFirstOrDefault(label);

                        employeeStructure[i] = new EmployeeStructure(Result.Recognized.ToString(), em.name, em.middleName, em.lastName, em.email, rectangleFace[0].X, rectangleFace[0].Y, rectangleFace[0].Width, rectangleFace[0].Height);
                    }
                    employeeStructure[i].result = Result.Unknown.ToString();

                });

                return employeeStructure;
            }
        }
예제 #8
0
        private void faceAnalysis()
        {
            //Data for face emotion when data base is not ready.
            Image<Gray, byte>[] imageList = new Image<Gray, Byte>[70];
            imageList[0] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/netural1.jpg");
            imageList[1] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/sad1.jpg");
            imageList[2] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/angry1.jpg");
            imageList[3] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/happy1.jpg");
            imageList[4] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/happy2.jpg");
            imageList[5] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/happy3.jpg");
            imageList[6] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/happy4.jpg");
            imageList[7] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/happy5.jpg");
            imageList[8] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/happy6.jpg");
            imageList[9] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/happy7.jpg");
            imageList[10] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/375.jpg");
            imageList[11] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/376.jpg");
            imageList[12] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/377.jpg");
            imageList[13] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/378.jpg");
            imageList[14] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/379.jpg");
            imageList[15] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/380.jpg");
            imageList[16] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/381.jpg");
            imageList[17] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/382.jpg");
            imageList[18] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/383.jpg");
            imageList[19] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/384.jpg");
            imageList[20] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/385.jpg");
            imageList[21] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/386.jpg");
            imageList[22] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/387.jpg");
            imageList[23] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/388.jpg");
            imageList[24] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/389.jpg");
            imageList[25] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/390.jpg");
            imageList[26] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/391.jpg");
            imageList[27] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/392.jpg");
            imageList[28] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/393.jpg");
            imageList[29] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/394.jpg");
            imageList[30] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/395.jpg");
            imageList[31] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/396.jpg");
            imageList[32] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/545.jpg");
            imageList[33] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/546.jpg");
            imageList[34] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/547.jpg");
            imageList[35] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/548.jpg");
            imageList[36] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/579.jpg");
            imageList[37] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/580.jpg");
            imageList[38] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/581.jpg");
            imageList[39] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/582.jpg");
            imageList[40] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/583.jpg");
            imageList[41] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/584.jpg");
            imageList[42] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/585.jpg");
            imageList[43] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/586.jpg");
            imageList[44] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/587.jpg");
            imageList[45] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/588.jpg");
            imageList[46] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/589.jpg");
            imageList[47] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/590.jpg");
            imageList[48] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/557.jpg");
            imageList[49] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/558.jpg");
            imageList[50] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/559.jpg");
            imageList[51] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/560.jpg");
            imageList[52] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/561.jpg");
            imageList[53] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/562.jpg");
            imageList[54] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/563.jpg");
            imageList[55] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/564.jpg");
            imageList[56] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/565.jpg");
            imageList[57] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/566.jpg");
            imageList[58] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/567.jpg");
            imageList[59] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/568.jpg");
            imageList[60] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/569.jpg");
            imageList[61] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/570.jpg");
            imageList[62] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/571.jpg");
            imageList[63] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/572.jpg");
            imageList[64] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/573.jpg");
            imageList[65] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/574.jpg");
            imageList[66] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/575.jpg");
            imageList[67] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/576.jpg");
            imageList[68] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/577.jpg");
            imageList[69] = new Image<Gray, byte>("C:/Users/L33506/Desktop/emotionData/578.jpg");

            String[] emoList = new String[70];
            emoList[0] = "Netural";
            emoList[1] = "Sad";
            emoList[2] = "Angry";
            emoList[3] = "Smile";
            emoList[4] = "Smile";
            emoList[5] = "Smile";
            emoList[6] = "Smile";
            emoList[7] = "Smile";
            emoList[8] = "Smile";
            emoList[9] = "Smile";
            emoList[10] = "Angry";
            emoList[11] = "Angry";
            emoList[12] = "Angry";
            emoList[13] = "Angry";
            emoList[14] = "Angry";
            emoList[15] = "Angry";
            emoList[16] = "Angry";
            emoList[17] = "Angry";
            emoList[18] = "Angry";
            emoList[19] = "Angry";
            emoList[20] = "Angry";
            emoList[21] = "Angry";
            emoList[22] = "Angry";
            emoList[23] = "Angry";
            emoList[24] = "Angry";
            emoList[25] = "Angry";
            emoList[26] = "Angry";
            emoList[27] = "Angry";
            emoList[28] = "Angry";
            emoList[29] = "Angry";
            emoList[30] = "Angry";
            emoList[31] = "Angry";
            emoList[32] = "Angry";
            emoList[33] = "Angry";
            emoList[34] = "Angry";
            emoList[35] = "Angry";
            emoList[36] = "Angry";
            emoList[37] = "Angry";
            emoList[38] = "Angry";
            emoList[39] = "Angry";
            emoList[40] = "Angry";
            emoList[41] = "Angry";
            emoList[42] = "Angry";
            emoList[43] = "Angry";
            emoList[44] = "Angry";
            emoList[45] = "Angry";
            emoList[46] = "Angry";
            emoList[47] = "Angry";
            emoList[48] = "Angry";
            emoList[49] = "Angry";
            emoList[50] = "Angry";
            emoList[51] = "Angry";
            emoList[52] = "Angry";
            emoList[53] = "Angry";
            emoList[54] = "Angry";
            emoList[55] = "Angry";
            emoList[56] = "Angry";
            emoList[57] = "Angry";
            emoList[58] = "Angry";
            emoList[59] = "Angry";
            emoList[60] = "Angry";
            emoList[61] = "Angry";
            emoList[62] = "Angry";
            emoList[63] = "Angry";
            emoList[64] = "Angry";
            emoList[65] = "Angry";
            emoList[66] = "Angry";
            emoList[67] = "Angry";
            emoList[68] = "Angry";
            emoList[69] = "Angry";

            int[] label = new int[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69 };
            //Use the variable to detect the number of times these emotion appear in the video by frames;
            noDetect = 0;
            totalFrame = grayFrameList.Count;
            List<Image<Gray, byte>> trainfaceList = new List<Image<Gray, byte>>();

            Rectangle[] trainFace;
            for (int i = 0; i < imageList.Length; i++)
            {
                trainFace = faceHaar.DetectMultiScale(imageList[i], 1.1, 20, new Size(20, 20), Size.Empty);
                foreach (Rectangle f in trainFace)
                {
                    trainfaceList.Add(imageList[i].Copy(f).Resize(64, 64, INTER.CV_INTER_CUBIC));
                }
            }

            //Detect the face in the frames and do emotion detect using fisher eigen face recognizer
            double maxDistance = 1000; //-> Range from 1000 to 5000

            FisherFaceRecognizer fisher = new FisherFaceRecognizer(imageList.Length, maxDistance);
            fisher.Train(trainfaceList.ToArray(), label);
            FaceRecognizer.PredictionResult result = new FaceRecognizer.PredictionResult();

            for (int i = 0; i < totalFrame; i++)
            {
                result = fisher.Predict(grayFrameList.ElementAt(i));
                int num = result.Label;

                if (num == -1)
                {
                    faceEmotions.Add("No Detect");
                }
                else
                {
                    faceEmotions.Add(emoList[num].ToString());
                }

                if (num == -1)
                {
                    noDetect++;
                }
                else if (emoList[num].Equals("Smile"))
                {
                    smile++;
                }
                else if (emoList[num] == "Sad")
                {
                    sad++;
                }
                else if (emoList[num] == "Angry")
                {
                    angry++;
                }
                else if (emoList[num] == "Netural")
                {
                    neutral++;
                }
                else
                {
                    // it should not come here .
                }
            }

            //get total sum of the 5 emotion data.
            int total = smile + sad + angry + neutral + noDetect;
            //put them into the percentage (emotion/frames * 100)
            double smilePer = (Convert.ToDouble(smile) / totalFrame) * 100;
            double sadPer = (Convert.ToDouble(sad) / totalFrame) * 100;
            double angryPer = (Convert.ToDouble(angry) / totalFrame) * 100;
            double neturalPer = (Convert.ToDouble(neutral) / totalFrame) * 100;
            double noDetectPer = (Convert.ToDouble(noDetect) / totalFrame) * 100;

            //lbmsg.Text = "Smile Count:" + smile.ToString() + " ,Sad Count:" + sad.ToString() + " ,Angry Count:" + angry.ToString() + " ,Netural Count:" + netural.ToString() + " ,No Detect Count:" + noDetect.ToString();
            String[] emotionList = { "Smile", "Sad", "Angry", "Netural", "No Detect" };
            double[] emotionPer = { smilePer, sadPer, angryPer, neturalPer, noDetectPer };
        }