/// <summary>
        /// Predict emotion on the basis of wrinkles and distances
        /// </summary>
        /// <param name="ImageMats">Input image on which emotion need to detect</param>
        /// <returns>Business model containing images of different steps and output result</returns>
        public EmotionBusinessModel DetectEmotionsDistanceAndWrinkledBased(Mat[] ImageMats, List <Rectangle> facess)
        {
            EmotionBusinessModel output = new EmotionBusinessModel();

            EmotionBusinessModel.outputmsgs = new List <string>();
            try
            {
                using (Logger logger = new Logger())
                {
                    KeyValuePair <string, Color> keyPair = new KeyValuePair <string, Color>("smile", Color.Yellow);
                    keyPairs.Add(keyPair);
                    keyPair = new KeyValuePair <string, Color>("sad", Color.Blue);
                    keyPairs.Add(keyPair);
                    keyPair = new KeyValuePair <string, Color>("surprise", Color.SlateGray);
                    keyPairs.Add(keyPair);
                    keyPair = new KeyValuePair <string, Color>("anger", Color.Red);
                    keyPairs.Add(keyPair);
                    keyPair = new KeyValuePair <string, Color>("disgust", Color.Purple);
                    keyPairs.Add(keyPair);
                    keyPair = new KeyValuePair <string, Color>("fear", Color.Black);
                    keyPairs.Add(keyPair);
                    keyPair = new KeyValuePair <string, Color>("neutral", Color.Green);
                    keyPairs.Add(keyPair);
                    Mat ImageMat = ImageMats[0];
                    logger.LogIntoFile(Log.logType.INFO, (string)ConstantsLoader.getInstance().getValue(EnumConstant.emotionDetectionMessage));
                    List <Rectangle> faces = new List <Rectangle>();
                    faces = facess;
                    //faces = FDIPMAin.DetectFace(ImageMat);
                    output.faceRect = new List <Rectangle>();
                    foreach (Rectangle face in faces)
                    {
                        List <Rectangle> fcs = new List <Rectangle>();
                        fcs.Add(face);
                        output.outputResult = EDRIPDistanceAndWrinklesBasedFeatures.getInstance().FindEmotionDistanceAndWrinkleBased(ImageMat, fcs, 0);
                        EmotionBusinessModel.outputmsgs.Add(output.outputResult);
                        output.faceRect.Add(face);
                    }
                    imageModel img = new imageModel();
                    if (faces.Count > 0)
                    {
                        img.image = CvmlUtility.getInstance().annotate(ImageMat, faces[0], output.outputResult, Color.Blue);
                    }

                    //img.image=drawRect(ImageMat.ToImage<Bgr, byte>(), faces[0], output.outputResult);
                    img.label = "Final emotion detected image with rectangle on face";
                    output.images.Add(img);
                    output.outputMessage = "Emotion detected successfully in given image using wrinkled and distance based approach";
                    output.success       = true;
                }
            }

            catch (Exception e)
            {
                string text = ExceptionHandle.GetExceptionMessage(e);
                output.success      = false;
                output.outputResult = "neutral";
                output.e            = e;
            }
            return(output);
        }
Пример #2
0
        private void Upload_Click(object sender, EventArgs e)
        {
            Stream fileStream = null;

            UploadImage.Multiselect = true;
            if ((UploadImage.ShowDialog() == DialogResult.OK) && (fileStream = UploadImage.OpenFile()) != null)
            {
                string   fileName = UploadImage.FileName;
                FileInfo file     = new FileInfo(UploadImage.FileName);
                Mat      ImageMat = new Mat(file.FullName);

                string emotions          = "";
                EmotionBusinessModel mod = new EmotionBusinessModel();
                if (Methodology.SelectedIndex == 1)
                {
                    mod = EDRIPMainDistanceAndWrinkledBased.getInstance().DetectEmotionsDistanceAndWrinkledBased(new Mat[] { ImageMat }, FDIPMAin.DetectFace(ImageMat));
                }

                else if (Methodology.SelectedIndex == 2)
                {
                    mod = EDRIPMainReducedDimensionsBased.getInstance().DetectEmotionsReducedDimentionsBased(new Mat[] { ImageMat });
                }
                //   emotions = EDRIPMainReducedDimensionsBased.getInstance().DetectEmotionsReducedDimentionsBased(new Mat[] { ImageMat }).outputResult;
                else if (Methodology.SelectedIndex == 3)
                {
                    OpticalflowView(UploadImage);
                    OpticalFlow.Visible = true;
                }
                else if (Methodology.SelectedIndex == 4)
                {
                    EDRIPHybridMethodologyBasedMain.getInstance().testSystemForEmotionHybrid(UploadImage.FileNames);
                }
                emotions = mod.outputMessage;
                CvInvoke.Resize(ImageMat, ImageMat, new Size(320, 240), 0, 0, Inter.Linear);
                Image <Bgr, byte> I2 = mod.images[0].image.ToImage <Bgr, byte>();
                GC.Collect();
                Logger log = new Logger();
                log.LogIntoFile(Log.logType.INFO, "message to be written");

                //  I2.Draw(emotions, new Point(10, 30), FontFace.HersheyPlain, 2.0, new Bgr(10, 0, 255));
                imageBox1.Image = I2;
            }
        }