/// <summary>
        /// Predict emotion on the basis of wrinkles and distances
        /// </summary>
        /// <param name="bmp">Input image on which emotion need to detect</param>
        /// <returns>Business model containing images of different steps and output result</returns>
        public EmotionBusinessModel DetectEmotionsDistanceAndWrinkledBased(Bitmap bmp)
        {
            Image <Bgr, Byte> img = new Image <Bgr, Byte>(bmp);

            Mat[] mats = new Mat[1];
            mats[0] = img.Mat;

            return(DetectEmotionsDistanceAndWrinkledBased(mats, FDIPMAin.DetectFace(mats[0])));
        }
コード例 #2
0
        /// <summary>
        /// Train system using multiple files and then call ML algorithm to train
        /// </summary>
        /// <param name="files">List of file names to train machiene</param>
        /// <returns>Returns a model of ML layer</returns>

        public List <EmotionTrainingMLModel> TrainSystemForEmotion(string[] files)
        {
            using (Logger logger = new Logger())
            {
                List <EmotionTrainingMLModel> emotionList = new List <EmotionTrainingMLModel>();
                int index = 0;
                foreach (string file in files)
                {
                    logger.LogIntoFile(Log.logType.INFO, (string)ConstantsLoader.getInstance().getValue(EnumConstant.emotionTrainingMessage));
                    Mat image = new Mat(file);
                    List <Rectangle> faces = new List <Rectangle>();
                    faces = FDIPMAin.DetectFace(image);

                    if (faces.Count > 0)
                    {
                        EDRFeatureExtraction featureExtracter = new EDRFeatureExtraction();

                        Mat shortImage = new Mat(image, faces[0]);
                        CvInvoke.Resize(shortImage, shortImage, new Size(320, 240), 0, 0, Inter.Linear);

                        faces = new List <Rectangle>();
                        faces.Add(new Rectangle(0, 0, shortImage.Width, shortImage.Height));
                        List <double> distances = featureExtracter.findDistances(shortImage, faces, index);

                        EmotionTrainingMLModel dataModel = new EmotionTrainingMLModel();
                        for (int i = 0; i < 14; i++)
                        {
                            var          value        = distances.ElementAt(i);
                            PropertyInfo propertyInfo = dataModel.GetType().GetProperty(listOfDistances[i]);
                            propertyInfo.SetValue(dataModel, value, null);
                        }
                        dataModel.noseWrinkles        = featureExtracter.findWrinkles(shortImage, featureExtracter.getOnNose());
                        dataModel.betweenEyesWrinkles = featureExtracter.findWrinkles(shortImage, featureExtracter.getBetweenEyes());
                        dataModel.Label = Path.GetFileName(Path.GetDirectoryName(file));
                        emotionList.Add(dataModel);
                    }
                    index++;
                }
                return(emotionList);
            }
        }
コード例 #3
0
        private void ProcessFrame(object sender, EventArgs arg)
        {
            #region Face Detection Region
            if (captureInProgress)
            {
                //FileInfo file = new FileInfo("‪E:\\training2\\1.jpg");
                // Mat ImageMat = new Mat(file.FullName);


                Mat ImageMat = capt.QueryFrame();
                imageList.Images.Add(ImageMat.ToImage <Bgr, byte>().ToBitmap());
                for (int j = 0; j < imageList.Images.Count; j++)
                {
                    ListViewItem item = new ListViewItem();
                    item.ImageIndex = j;
                    imageListView.Items.Add(item);
                }
                string emotions = "";
                if (Methodology.SelectedIndex == 1)
                {
                    emotions = EDRIPMainDistanceAndWrinkledBased.getInstance().DetectEmotionsDistanceAndWrinkledBased(new Mat[] { ImageMat }, FDIPMAin.DetectFace(ImageMat)).outputResult;
                }
                else if (Methodology.SelectedIndex == 0)
                {
                    emotions = EDRIPMainDistanceBased.getInstance().DetectEmotionsDistanceBased(new Mat[] { ImageMat }).outputResult;
                }
                if (emotions != null)
                {
                    List <double> dfs = distributionsEmotions.dists;

                    foreach (var series in chart2.Series)
                    {
                        series.Points.Clear();
                    }
                    chart2.Series["Conf"].Points.AddXY("anger", dfs[0]);
                    chart2.Series["Conf"].Points.AddXY("smile", dfs[1]);
                    chart2.Series["Conf"].Points.AddXY("sad", dfs[2]);
                    chart2.Series["Conf"].Points.AddXY("surprise", dfs[3]);
                    chart2.Series["Conf"].Points.AddXY("neutral", dfs[4]);
                    chart2.Series["Conf"].Points.AddXY("fear", dfs[5]);
                    chart2.Series["Conf"].Points.AddXY("disgust", dfs[6]);
                }


                List <Rectangle> faces = new List <Rectangle>();
                faces = FDIPMAin.DetectFace(ImageMat);
                Mat shortImage = faces.Count > 0 ? new Mat(ImageMat, faces[0]) : ImageMat;
                CvInvoke.Resize(shortImage, shortImage, new Size(320, 240), 0, 0, Inter.Linear);
                shortImage = ImageMat;
                //if(faces.Count>0&&i%3==0)
                //{

                //    ImageMat.Save("E:\\dataset\\smile\\" + i/3 + ".jpg");
                //}
                //CvInvoke.Imshow("Orignal Face", shortImage);
                faces = new List <Rectangle>();
                faces.Add(new Rectangle(0, 0, shortImage.Width, shortImage.Height));
                Image <Bgr, byte> I2 = shortImage.ToImage <Bgr, byte>();
                GC.Collect();
                Logger log = new Logger();
                log.LogIntoFile(Log.logType.INFO, "message to be written");
                //I2.Draw(emotions, new Point(10, 30), FontFace.HersheyPlain, 2.0, new Bgr(10, 0, 255));
                imageBox1.Image = I2;
                i++;
            }

            #endregion

            else
            {
                imageBox1.Image = capt.QueryFrame();
            }
            if (FrameCount <= TotalFrames && Video.Checked)
            {
                trackBar1.Value = FrameCount;
                FrameCount++;
            }
            else if (Video.Checked)
            {
                FrameCount        = 0;
                trackBar1.Value   = FrameCount;
                Play.Text         = "Play";
                Application.Idle -= ProcessFrame;
            }
        }
コード例 #4
0
        private void Upload_Click(object sender, EventArgs e)
        {
            Stream fileStream = null;

            UploadImage.Multiselect = true;
            if ((UploadImage.ShowDialog() == DialogResult.OK) && (fileStream = UploadImage.OpenFile()) != null)
            {
                string   fileName = UploadImage.FileName;
                FileInfo file     = new FileInfo(UploadImage.FileName);
                Mat      ImageMat = new Mat(file.FullName);

                string emotions          = "";
                EmotionBusinessModel mod = new EmotionBusinessModel();
                if (Methodology.SelectedIndex == 1)
                {
                    mod = EDRIPMainDistanceAndWrinkledBased.getInstance().DetectEmotionsDistanceAndWrinkledBased(new Mat[] { ImageMat }, FDIPMAin.DetectFace(ImageMat));
                }

                else if (Methodology.SelectedIndex == 2)
                {
                    mod = EDRIPMainReducedDimensionsBased.getInstance().DetectEmotionsReducedDimentionsBased(new Mat[] { ImageMat });
                }
                //   emotions = EDRIPMainReducedDimensionsBased.getInstance().DetectEmotionsReducedDimentionsBased(new Mat[] { ImageMat }).outputResult;
                else if (Methodology.SelectedIndex == 3)
                {
                    OpticalflowView(UploadImage);
                    OpticalFlow.Visible = true;
                }
                else if (Methodology.SelectedIndex == 4)
                {
                    EDRIPHybridMethodologyBasedMain.getInstance().testSystemForEmotionHybrid(UploadImage.FileNames);
                }
                emotions = mod.outputMessage;
                CvInvoke.Resize(ImageMat, ImageMat, new Size(320, 240), 0, 0, Inter.Linear);
                Image <Bgr, byte> I2 = mod.images[0].image.ToImage <Bgr, byte>();
                GC.Collect();
                Logger log = new Logger();
                log.LogIntoFile(Log.logType.INFO, "message to be written");

                //  I2.Draw(emotions, new Point(10, 30), FontFace.HersheyPlain, 2.0, new Bgr(10, 0, 255));
                imageBox1.Image = I2;
            }
        }