/// <summary> /// Predict emotion on the basis of wrinkles and distances /// </summary> /// <param name="ImageMats">Input image on which emotion need to detect</param> /// <returns>Business model containing images of different steps and output result</returns> public EmotionBusinessModel DetectEmotionsDistanceAndWrinkledBased(Mat[] ImageMats, List <Rectangle> facess) { EmotionBusinessModel output = new EmotionBusinessModel(); EmotionBusinessModel.outputmsgs = new List <string>(); try { using (Logger logger = new Logger()) { KeyValuePair <string, Color> keyPair = new KeyValuePair <string, Color>("smile", Color.Yellow); keyPairs.Add(keyPair); keyPair = new KeyValuePair <string, Color>("sad", Color.Blue); keyPairs.Add(keyPair); keyPair = new KeyValuePair <string, Color>("surprise", Color.SlateGray); keyPairs.Add(keyPair); keyPair = new KeyValuePair <string, Color>("anger", Color.Red); keyPairs.Add(keyPair); keyPair = new KeyValuePair <string, Color>("disgust", Color.Purple); keyPairs.Add(keyPair); keyPair = new KeyValuePair <string, Color>("fear", Color.Black); keyPairs.Add(keyPair); keyPair = new KeyValuePair <string, Color>("neutral", Color.Green); keyPairs.Add(keyPair); Mat ImageMat = ImageMats[0]; logger.LogIntoFile(Log.logType.INFO, (string)ConstantsLoader.getInstance().getValue(EnumConstant.emotionDetectionMessage)); List <Rectangle> faces = new List <Rectangle>(); faces = facess; //faces = FDIPMAin.DetectFace(ImageMat); output.faceRect = new List <Rectangle>(); foreach (Rectangle face in faces) { List <Rectangle> fcs = new List <Rectangle>(); fcs.Add(face); output.outputResult = EDRIPDistanceAndWrinklesBasedFeatures.getInstance().FindEmotionDistanceAndWrinkleBased(ImageMat, fcs, 0); EmotionBusinessModel.outputmsgs.Add(output.outputResult); output.faceRect.Add(face); } imageModel img = new imageModel(); if (faces.Count > 0) { img.image = CvmlUtility.getInstance().annotate(ImageMat, faces[0], output.outputResult, Color.Blue); } //img.image=drawRect(ImageMat.ToImage<Bgr, byte>(), faces[0], output.outputResult); img.label = "Final emotion detected image with rectangle on face"; output.images.Add(img); output.outputMessage = "Emotion detected successfully in given image using wrinkled and distance based approach"; output.success = true; } } catch (Exception e) { string text = ExceptionHandle.GetExceptionMessage(e); output.success = false; output.outputResult = "neutral"; output.e = e; } return(output); }
public Form2() { InitializeComponent(); CvmlUtility.getInstance().setDllSearchDirectory((string)ConstantsLoader.getInstance().getValue(EnumConstant.libsPath)); EDRIPMain.getInstance().loadPredictor(); DsDevice[] captureDevices; // Get the set of directshow devices that are video inputs. captureDevices = DsDevice.GetDevicesOfCat(FilterCategory.VideoInputDevice); cameraList.DisplayMember = "Text"; cameraList.ValueMember = "Value"; for (int idx = 0; idx < captureDevices.Length; idx++) { cameraList.Items.Add(new { Text = idx + ":" + captureDevices.ElementAt(idx).Name, Value = idx });// Do something with the device here... } }