/// <summary> /// Predict emotion on the basis of wrinkles and distances /// </summary> /// <param name="ImageMats">Input image on which emotion need to detect</param> /// <returns>Business model containing images of different steps and output result</returns> public EmotionBusinessModel DetectEmotionsDistanceAndWrinkledBased(Mat[] ImageMats, List <Rectangle> facess) { EmotionBusinessModel output = new EmotionBusinessModel(); EmotionBusinessModel.outputmsgs = new List <string>(); try { using (Logger logger = new Logger()) { KeyValuePair <string, Color> keyPair = new KeyValuePair <string, Color>("smile", Color.Yellow); keyPairs.Add(keyPair); keyPair = new KeyValuePair <string, Color>("sad", Color.Blue); keyPairs.Add(keyPair); keyPair = new KeyValuePair <string, Color>("surprise", Color.SlateGray); keyPairs.Add(keyPair); keyPair = new KeyValuePair <string, Color>("anger", Color.Red); keyPairs.Add(keyPair); keyPair = new KeyValuePair <string, Color>("disgust", Color.Purple); keyPairs.Add(keyPair); keyPair = new KeyValuePair <string, Color>("fear", Color.Black); keyPairs.Add(keyPair); keyPair = new KeyValuePair <string, Color>("neutral", Color.Green); keyPairs.Add(keyPair); Mat ImageMat = ImageMats[0]; logger.LogIntoFile(Log.logType.INFO, (string)ConstantsLoader.getInstance().getValue(EnumConstant.emotionDetectionMessage)); List <Rectangle> faces = new List <Rectangle>(); faces = facess; //faces = FDIPMAin.DetectFace(ImageMat); output.faceRect = new List <Rectangle>(); foreach (Rectangle face in faces) { List <Rectangle> fcs = new List <Rectangle>(); fcs.Add(face); output.outputResult = EDRIPDistanceAndWrinklesBasedFeatures.getInstance().FindEmotionDistanceAndWrinkleBased(ImageMat, fcs, 0); EmotionBusinessModel.outputmsgs.Add(output.outputResult); output.faceRect.Add(face); } imageModel img = new imageModel(); if (faces.Count > 0) { img.image = CvmlUtility.getInstance().annotate(ImageMat, faces[0], output.outputResult, Color.Blue); } //img.image=drawRect(ImageMat.ToImage<Bgr, byte>(), faces[0], output.outputResult); img.label = "Final emotion detected image with rectangle on face"; output.images.Add(img); output.outputMessage = "Emotion detected successfully in given image using wrinkled and distance based approach"; output.success = true; } } catch (Exception e) { string text = ExceptionHandle.GetExceptionMessage(e); output.success = false; output.outputResult = "neutral"; output.e = e; } return(output); }
/// <summary> /// Train machiene based on wrinkles and distances /// </summary> /// <param name="emotionDistances">Distances and wrinkles features to train emotion</param> public void trainSystemForEmotionDistanceAndWrinkleBased(List <EmotionTrainingMLModel> emotionDistances) { db.saveToDbDistanceAndWrinkleBased(EDRMLUtil.getInstance().ConvertToDataModels(emotionDistances)); List <EmotionDataModel> emotionsDistances = db.getDataFromDatabaseDistanceAndWrinkleBased(); createWekaFileDistanceAndWrinkleBased(emotionsDistances); MLMain.getInstance().trainMachineForEmotionUsingWeka((string)ConstantsLoader.getInstance().getValue(EnumConstant.emotionMethod2Arff), (string)ConstantsLoader.getInstance().getValue(EnumConstant.emotionMethod2Model)); }
private static weka.classifiers.functions.MultilayerPerceptron loadModel(String name, java.io.File path) { // weka.classifiers.bayes.HMM .HMMEstimator.packageManagement.Package p=new ; weka.classifiers.functions.MultilayerPerceptron classifier; FileInputStream fis = new FileInputStream((string)ConstantsLoader.getInstance().getValue(EnumConstant.libsPath) + "\\models\\" + "models" + name + ".model"); ObjectInputStream ois = new ObjectInputStream(fis); classifier = (weka.classifiers.functions.MultilayerPerceptron)ois.readObject(); ois.close(); return(classifier); }
public Form2() { InitializeComponent(); CvmlUtility.getInstance().setDllSearchDirectory((string)ConstantsLoader.getInstance().getValue(EnumConstant.libsPath)); EDRIPMain.getInstance().loadPredictor(); DsDevice[] captureDevices; // Get the set of directshow devices that are video inputs. captureDevices = DsDevice.GetDevicesOfCat(FilterCategory.VideoInputDevice); cameraList.DisplayMember = "Text"; cameraList.ValueMember = "Value"; for (int idx = 0; idx < captureDevices.Length; idx++) { cameraList.Items.Add(new { Text = idx + ":" + captureDevices.ElementAt(idx).Name, Value = idx });// Do something with the device here... } }
/// <summary> /// Train system using multiple files and then call ML algorithm to train /// </summary> /// <param name="files">List of file names to train machiene</param> /// <returns>Returns a model of ML layer</returns> public List <EmotionTrainingMLModel> TrainSystemForEmotion(string[] files) { using (Logger logger = new Logger()) { List <EmotionTrainingMLModel> emotionList = new List <EmotionTrainingMLModel>(); int index = 0; foreach (string file in files) { logger.LogIntoFile(Log.logType.INFO, (string)ConstantsLoader.getInstance().getValue(EnumConstant.emotionTrainingMessage)); Mat image = new Mat(file); List <Rectangle> faces = new List <Rectangle>(); faces = FDIPMAin.DetectFace(image); if (faces.Count > 0) { EDRFeatureExtraction featureExtracter = new EDRFeatureExtraction(); Mat shortImage = new Mat(image, faces[0]); CvInvoke.Resize(shortImage, shortImage, new Size(320, 240), 0, 0, Inter.Linear); faces = new List <Rectangle>(); faces.Add(new Rectangle(0, 0, shortImage.Width, shortImage.Height)); List <double> distances = featureExtracter.findDistances(shortImage, faces, index); EmotionTrainingMLModel dataModel = new EmotionTrainingMLModel(); for (int i = 0; i < 14; i++) { var value = distances.ElementAt(i); PropertyInfo propertyInfo = dataModel.GetType().GetProperty(listOfDistances[i]); propertyInfo.SetValue(dataModel, value, null); } dataModel.noseWrinkles = featureExtracter.findWrinkles(shortImage, featureExtracter.getOnNose()); dataModel.betweenEyesWrinkles = featureExtracter.findWrinkles(shortImage, featureExtracter.getBetweenEyes()); dataModel.Label = Path.GetFileName(Path.GetDirectoryName(file)); emotionList.Add(dataModel); } index++; } return(emotionList); } }
/// <summary> /// Create weka file of wrinkles and distances /// </summary> public void createWekaFileDistanceAndWrinkleBased(List <EmotionDataModel> emotionsDistances) { List <string> features = new List <string>(); foreach (EmotionDataModel obj in emotionsDistances) { string wekaData = obj.LEB1_CPLE1 + "," + obj.LEB2_CPLE2 + "," + obj.REB1_CPRE1 + "," + obj.REB2_CPRE2 + "," + obj.OPEN_LE1 + "," + obj.OPEN_LE2 + "," + obj.OPEN_RE1 + "," + obj.OPEN_RE2 + "," + obj.EXP_MO + "," + obj.OPEN_MO1 + "," + obj.OPEN_MO2 + "," + obj.OPEN_MO3 + "," + obj.NS_LPLIP + "," + obj.NS_RPLIP + "," + obj.betweenEyesWrinkles + "," + obj.noseWrinkles + "," + obj.Label; features.Add(wekaData); } // string[] features = { "1,4.5,34,apple", "2,56,3.4,banana", "3.5,6.5,34,orange" }; string[] lOD = { "LEB1_CPLE1 numeric", "LEB2_CPLE2 numeric", "REB1_CPRE1 numeric", "REB2_CPRE2 numeric", "OPEN_LE1 numeric", "OPEN_LE2 numeric", "OPEN_RE1 numeric", "OPEN_RE2 numeric", "EXP_MO numeric", "OPEN_MO1 numeric", "OPEN_MO2 numeric", "OPEN_MO3 numeric", "NS_LPLIP numeric", "NS_RPLIP numeric", "betweenEyesWrinkles numeric", "noseWrinkles numeric" }; List <string> attributes = lOD.ToList <string>(); string classHeader = "label {anger, smile, sad, surprise, neutral, fear, disgust}"; attributes.Add(classHeader); MLMain.getInstance().createWekaFile("%% weka file for methodology2 emotion", "Emotion", attributes, features, (string)ConstantsLoader.getInstance().getValue(EnumConstant.emotionMethod2Arff)); }
/// <summary> /// Get calculated emotion after applying machiene learning algorithm based on wrinkles and distances /// </summary> /// <param name="emotionDistances">Distances and wrinkles features to test emotion probilities</param> /// <returns>Returns a array probilities of emotion </returns> public List <double> getCalculatedEmotionsProbilitiesDistanceAndWrinkleBased(EmotionTrainingMLModel emotionDistances) { string[] attributeArray = { "LEB1_CPLE1", "LEB2_CPLE2", "REB1_CPRE1", "REB2_CPRE2", "OPEN_LE1", "OPEN_LE2", "OPEN_RE1", "OPEN_RE2", "EXP_MO", "OPEN_MO1", "OPEN_MO2", "OPEN_MO3", "NS_LPLIP", "NS_RPLIP", "betweenEyesWrinkles", "noseWrinkles" }; string[] classNames = { "anger", "smile", "sad", "surprise", "neutral", "fear", "disgust" }; double[] dataValues = { emotionDistances.LEB1_CPLE1, emotionDistances.LEB2_CPLE2, emotionDistances.REB1_CPRE1, emotionDistances.REB2_CPRE2, emotionDistances.OPEN_LE1, emotionDistances.OPEN_LE2, emotionDistances.OPEN_RE1, emotionDistances.OPEN_RE2, emotionDistances.EXP_MO, emotionDistances.OPEN_MO1, emotionDistances.OPEN_MO2, emotionDistances.OPEN_MO3, emotionDistances.NS_LPLIP, emotionDistances.NS_RPLIP, emotionDistances.betweenEyesWrinkles, emotionDistances.noseWrinkles }; string classHeader = "label"; string defaultclass = "neutral"; return(MLMain.getInstance().testMLPPredictionsUsingWeka(attributeArray, classNames, dataValues, classHeader, defaultclass, (string)ConstantsLoader.getInstance().getValue(EnumConstant.emotionMethod2Model))); }