Exemplo n.º 1
0
        private void train_Click(object sender, EventArgs e)
        {
            //string line;
            //DirectoryInfo dir = new DirectoryInfo("E:/libbs");
            //dir.Create();
            //StreamReader file = new StreamReader("SystemLibPath.txt");
            //while ((line = file.ReadLine()) != null)
            //{
            //    Console.WriteLine(line);
            //    FileInfo f = new FileInfo(line);

            //    File.Copy(line, dir.FullName + "/" + f.Name);

            //}
            EDRIPMain.getInstance().loadPredictor();
            FolderBrowserDialog fbd    = new FolderBrowserDialog();
            DialogResult        result = fbd.ShowDialog();

            string[] files = { };
            if (result == System.Windows.Forms.DialogResult.OK)
            {
                files = Directory.GetFiles(fbd.SelectedPath, "*", SearchOption.AllDirectories);
            }

            if (Methodology.SelectedIndex == 1)
            {
                EDRIPMainDistanceAndWrinkledBased.getInstance().TrainSystemForEmotionDistanceAndWrinkledBased(files);
            }

            else if (Methodology.SelectedIndex == 0)
            {
                EDRIPMainDistanceBased.getInstance().TrainSystemForEmotionDistancedBased(files);
            }
            else if (Methodology.SelectedIndex == 2)
            {
                EDRIPMainReducedDimensionsBased.getInstance().TrainSystemForEmotionReducedDimentionsBased(files);
            }
            else if (Methodology.SelectedIndex == 3)
            {
                OpticalflowViewTrain(files);
            }
            else if (Methodology.SelectedIndex == 4)
            {
                EDRIPHybridMethodologyBasedMain.getInstance().TrainSystemForEmotionHybrid(files);
            }
        }
Exemplo n.º 2
0
        private void Upload_Click(object sender, EventArgs e)
        {
            Stream fileStream = null;

            UploadImage.Multiselect = true;
            if ((UploadImage.ShowDialog() == DialogResult.OK) && (fileStream = UploadImage.OpenFile()) != null)
            {
                string   fileName = UploadImage.FileName;
                FileInfo file     = new FileInfo(UploadImage.FileName);
                Mat      ImageMat = new Mat(file.FullName);

                string emotions          = "";
                EmotionBusinessModel mod = new EmotionBusinessModel();
                if (Methodology.SelectedIndex == 1)
                {
                    mod = EDRIPMainDistanceAndWrinkledBased.getInstance().DetectEmotionsDistanceAndWrinkledBased(new Mat[] { ImageMat }, FDIPMAin.DetectFace(ImageMat));
                }

                else if (Methodology.SelectedIndex == 2)
                {
                    mod = EDRIPMainReducedDimensionsBased.getInstance().DetectEmotionsReducedDimentionsBased(new Mat[] { ImageMat });
                }
                //   emotions = EDRIPMainReducedDimensionsBased.getInstance().DetectEmotionsReducedDimentionsBased(new Mat[] { ImageMat }).outputResult;
                else if (Methodology.SelectedIndex == 3)
                {
                    OpticalflowView(UploadImage);
                    OpticalFlow.Visible = true;
                }
                else if (Methodology.SelectedIndex == 4)
                {
                    EDRIPHybridMethodologyBasedMain.getInstance().testSystemForEmotionHybrid(UploadImage.FileNames);
                }
                emotions = mod.outputMessage;
                CvInvoke.Resize(ImageMat, ImageMat, new Size(320, 240), 0, 0, Inter.Linear);
                Image <Bgr, byte> I2 = mod.images[0].image.ToImage <Bgr, byte>();
                GC.Collect();
                Logger log = new Logger();
                log.LogIntoFile(Log.logType.INFO, "message to be written");

                //  I2.Draw(emotions, new Point(10, 30), FontFace.HersheyPlain, 2.0, new Bgr(10, 0, 255));
                imageBox1.Image = I2;
            }
        }
Exemplo n.º 3
0
        private void ProcessFrame(object sender, EventArgs arg)
        {
            #region Face Detection Region
            if (captureInProgress)
            {
                //FileInfo file = new FileInfo("‪E:\\training2\\1.jpg");
                // Mat ImageMat = new Mat(file.FullName);


                Mat ImageMat = capt.QueryFrame();
                imageList.Images.Add(ImageMat.ToImage <Bgr, byte>().ToBitmap());
                for (int j = 0; j < imageList.Images.Count; j++)
                {
                    ListViewItem item = new ListViewItem();
                    item.ImageIndex = j;
                    imageListView.Items.Add(item);
                }
                string emotions = "";
                if (Methodology.SelectedIndex == 1)
                {
                    emotions = EDRIPMainDistanceAndWrinkledBased.getInstance().DetectEmotionsDistanceAndWrinkledBased(new Mat[] { ImageMat }, FDIPMAin.DetectFace(ImageMat)).outputResult;
                }
                else if (Methodology.SelectedIndex == 0)
                {
                    emotions = EDRIPMainDistanceBased.getInstance().DetectEmotionsDistanceBased(new Mat[] { ImageMat }).outputResult;
                }
                if (emotions != null)
                {
                    List <double> dfs = distributionsEmotions.dists;

                    foreach (var series in chart2.Series)
                    {
                        series.Points.Clear();
                    }
                    chart2.Series["Conf"].Points.AddXY("anger", dfs[0]);
                    chart2.Series["Conf"].Points.AddXY("smile", dfs[1]);
                    chart2.Series["Conf"].Points.AddXY("sad", dfs[2]);
                    chart2.Series["Conf"].Points.AddXY("surprise", dfs[3]);
                    chart2.Series["Conf"].Points.AddXY("neutral", dfs[4]);
                    chart2.Series["Conf"].Points.AddXY("fear", dfs[5]);
                    chart2.Series["Conf"].Points.AddXY("disgust", dfs[6]);
                }


                List <Rectangle> faces = new List <Rectangle>();
                faces = FDIPMAin.DetectFace(ImageMat);
                Mat shortImage = faces.Count > 0 ? new Mat(ImageMat, faces[0]) : ImageMat;
                CvInvoke.Resize(shortImage, shortImage, new Size(320, 240), 0, 0, Inter.Linear);
                shortImage = ImageMat;
                //if(faces.Count>0&&i%3==0)
                //{

                //    ImageMat.Save("E:\\dataset\\smile\\" + i/3 + ".jpg");
                //}
                //CvInvoke.Imshow("Orignal Face", shortImage);
                faces = new List <Rectangle>();
                faces.Add(new Rectangle(0, 0, shortImage.Width, shortImage.Height));
                Image <Bgr, byte> I2 = shortImage.ToImage <Bgr, byte>();
                GC.Collect();
                Logger log = new Logger();
                log.LogIntoFile(Log.logType.INFO, "message to be written");
                //I2.Draw(emotions, new Point(10, 30), FontFace.HersheyPlain, 2.0, new Bgr(10, 0, 255));
                imageBox1.Image = I2;
                i++;
            }

            #endregion

            else
            {
                imageBox1.Image = capt.QueryFrame();
            }
            if (FrameCount <= TotalFrames && Video.Checked)
            {
                trackBar1.Value = FrameCount;
                FrameCount++;
            }
            else if (Video.Checked)
            {
                FrameCount        = 0;
                trackBar1.Value   = FrameCount;
                Play.Text         = "Play";
                Application.Idle -= ProcessFrame;
            }
        }