/// <summary> /// Get emotion features using distances and wrinkles /// </summary> /// <param name="ImageMat">Image on which distances need to calculate</param> /// <param name="faces">Rectangles of faces</param> /// <param name="index">Index of images in case of multiple images</param> /// <returns>Returns the list of distances</returns> public EmotionTrainingMLModel FindEmotions(Mat ImageMat, List <Rectangle> faces, int index) { EmotionTrainingMLModel dataModel = new EmotionTrainingMLModel(); if (faces.Count > 0) { Mat shortImage = new Mat(ImageMat, faces[0]); CvInvoke.Resize(shortImage, shortImage, new Size(320, 240), 0, 0, Inter.Linear); faces = new List <Rectangle>(); faces.Add(new Rectangle(0, 0, shortImage.Width, shortImage.Height)); List <double> distances = findDistances(shortImage, faces, index); double noseWrinkles = findWrinkles(shortImage, onNose); double betweenEyesWrinkles = findWrinkles(shortImage, betweenEyes); dataModel = new EmotionTrainingMLModel(); for (int i = 0; i < 14; i++) { var value = distances.ElementAt(i); PropertyInfo propertyInfo = dataModel.GetType().GetProperty(listOfDistances[i]); propertyInfo.SetValue(dataModel, value, null); } dataModel.noseWrinkles = noseWrinkles; dataModel.betweenEyesWrinkles = betweenEyesWrinkles; dataModel.Label = ""; //emotions= EDRMLMain.getInstance().getCalculatedEmotions(dataModel); } return(dataModel); }
/// <summary> /// Find features of face for distance and wrinkles based /// </summary> /// <param name="ImageMat">Input image for features detection</param> /// <param name="faces">List of facse rectangles onm face</param> /// <param name="index">Index of image in case of multiple images</param> /// <returns>Return the predicted emotions based on distances and wrinkles</returns> public string FindEmotionDistanceAndWrinkleBased(Mat ImageMat, List <Rectangle> faces, int index) { EmotionTrainingMLModel dataModel = new EmotionTrainingMLModel(); EDRFeatureExtraction obj = new EDRFeatureExtraction(); dataModel = obj.FindEmotions(ImageMat, faces, index); return(EDRMLMainDistanceAndWrinkleBased.getInstance().getCalculatedEmotionsDistanceAndWrinkleBased(dataModel)); }
/// <summary> /// Get calculated emotion after applying machiene learning algorithm based on wrinkles and distances /// </summary> /// <param name="emotionDistances">Distances and wrinkles features to test emotion probilities</param> /// <returns>Returns a array probilities of emotion </returns> public List <double> getCalculatedEmotionsProbilitiesDistanceAndWrinkleBased(EmotionTrainingMLModel emotionDistances) { string[] attributeArray = { "LEB1_CPLE1", "LEB2_CPLE2", "REB1_CPRE1", "REB2_CPRE2", "OPEN_LE1", "OPEN_LE2", "OPEN_RE1", "OPEN_RE2", "EXP_MO", "OPEN_MO1", "OPEN_MO2", "OPEN_MO3", "NS_LPLIP", "NS_RPLIP", "betweenEyesWrinkles", "noseWrinkles" }; string[] classNames = { "anger", "smile", "sad", "surprise", "neutral", "fear", "disgust" }; double[] dataValues = { emotionDistances.LEB1_CPLE1, emotionDistances.LEB2_CPLE2, emotionDistances.REB1_CPRE1, emotionDistances.REB2_CPRE2, emotionDistances.OPEN_LE1, emotionDistances.OPEN_LE2, emotionDistances.OPEN_RE1, emotionDistances.OPEN_RE2, emotionDistances.EXP_MO, emotionDistances.OPEN_MO1, emotionDistances.OPEN_MO2, emotionDistances.OPEN_MO3, emotionDistances.NS_LPLIP, emotionDistances.NS_RPLIP, emotionDistances.betweenEyesWrinkles, emotionDistances.noseWrinkles }; string classHeader = "label"; string defaultclass = "neutral"; return(MLMain.getInstance().testMLPPredictionsUsingWeka(attributeArray, classNames, dataValues, classHeader, defaultclass, (string)ConstantsLoader.getInstance().getValue(EnumConstant.emotionMethod2Model))); }
/// <summary> /// Train system using multiple files and then call ML algorithm to train /// </summary> /// <param name="files">List of file names to train machiene</param> /// <returns>Returns a model of ML layer</returns> public List <EmotionTrainingMLModel> TrainSystemForEmotion(string[] files) { using (Logger logger = new Logger()) { List <EmotionTrainingMLModel> emotionList = new List <EmotionTrainingMLModel>(); int index = 0; foreach (string file in files) { logger.LogIntoFile(Log.logType.INFO, (string)ConstantsLoader.getInstance().getValue(EnumConstant.emotionTrainingMessage)); Mat image = new Mat(file); List <Rectangle> faces = new List <Rectangle>(); faces = FDIPMAin.DetectFace(image); if (faces.Count > 0) { EDRFeatureExtraction featureExtracter = new EDRFeatureExtraction(); Mat shortImage = new Mat(image, faces[0]); CvInvoke.Resize(shortImage, shortImage, new Size(320, 240), 0, 0, Inter.Linear); faces = new List <Rectangle>(); faces.Add(new Rectangle(0, 0, shortImage.Width, shortImage.Height)); List <double> distances = featureExtracter.findDistances(shortImage, faces, index); EmotionTrainingMLModel dataModel = new EmotionTrainingMLModel(); for (int i = 0; i < 14; i++) { var value = distances.ElementAt(i); PropertyInfo propertyInfo = dataModel.GetType().GetProperty(listOfDistances[i]); propertyInfo.SetValue(dataModel, value, null); } dataModel.noseWrinkles = featureExtracter.findWrinkles(shortImage, featureExtracter.getOnNose()); dataModel.betweenEyesWrinkles = featureExtracter.findWrinkles(shortImage, featureExtracter.getBetweenEyes()); dataModel.Label = Path.GetFileName(Path.GetDirectoryName(file)); emotionList.Add(dataModel); } index++; } return(emotionList); } }
/// <summary> /// Convert from business model to ML model /// </summary> /// <param name="emotion">IP layer model</param> /// <returns>ML layer model</returns> public EmotionTrainingMLModel ConvertFromBusinessObject(EmotionTrainingDataModel emotion) { EmotionTrainingMLModel E = new EmotionTrainingMLModel(emotion.LEB1_CPLE1, emotion.LEB2_CPLE2, emotion.REB1_CPRE1, emotion.REB2_CPRE2, emotion.OPEN_LE1, emotion.OPEN_LE2, emotion.OPEN_RE1, emotion.OPEN_RE2, emotion.OPEN_MO1, emotion.OPEN_MO2, emotion.OPEN_MO3, emotion.EXP_MO, emotion.NS_LPLIP, emotion.NS_RPLIP, emotion.noseWrinkles, emotion.betweenEyesWrinkles, emotion.Label); return(E); }