Example #1
0
        /// <summary>
        /// Callback for when the <see cref="PredictEmotionButton"/> is clicked. This launches a new task which calls <see cref="MachineLearning.PredictEmotion"/> with the
        /// <see cref="currentImageFileInfo"/> and <see cref="selectedFaceDataType"/>, which results are then used to update <see cref="PredictedEmotionMainLabel"/> and
        /// <see cref="PredictedEmotionsListBox"/>.
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        private async void PredictEmotionButton_Click(object sender, RoutedEventArgs e)
        {
            var originalLabel = PredictEmotionButton.Content;

            PredictedEmotionMainLabel.Content = "Predicting...";
            PredictedEmotionsListBox.Items.Clear();
            PredictEmotionButton.Content   = (string)PredictEmotionButton.Content + " (Predicting...)";
            PredictEmotionButton.IsEnabled = false;
            FaceOutput predictedEmotion = null;
            Dictionary <string, float> predictedEmotionsWithAllLabels = null;

            MachineLearning.DrawPointsOfLandmarks(currentImageFileInfo);

            await Task.Run(() =>
            {
                MachineLearning.PredictEmotion(currentImageFileInfo, selectedFaceDataType, out var prediction, out var predictionWithLabels);
                predictedEmotion = prediction;
                predictedEmotionsWithAllLabels = predictionWithLabels;
            });

            PredictedEmotionMainLabel.Content = $"{FirstLetterToUpper(predictedEmotion.PredictedEmotion)}" +
                                                $" ({FloatToPercent(predictedEmotionsWithAllLabels[predictedEmotion.PredictedEmotion])})";

            PredictedEmotionsListBox.Items.Clear();
            predictedEmotionsWithAllLabels = predictedEmotionsWithAllLabels.OrderByDescending(c => c.Value).ToDictionary(i => i.Key, i => i.Value);
            foreach (var predictedEmotionLabelAndScore in predictedEmotionsWithAllLabels)
            {
                PredictedEmotionsListBox.Items.Add(
                    $"{FirstLetterToUpper(predictedEmotionLabelAndScore.Key)}: {FloatToPercent(predictedEmotionLabelAndScore.Value)}");
            }

            PredictedEmotionsListBox.Items.RemoveAt(0);

            PredictEmotionButton.Content   = originalLabel;
            PredictEmotionButton.IsEnabled = true;
        }
    /// <summary>
    /// Predict the emotion of an image.
    /// </summary>
    /// <param name="imageFileInfo"><see cref="FileInfo"/> of the image file.</param>
    /// <param name="TFaceData">Type of face data that the parameters should be used.</param>
    /// <param name="predictedEmotion">The emotion that was predicted.</param>
    /// <param name="predictedEmotionWithAllLabels">All the other emotions with their scores appended after.</param>
    public static void PredictEmotion(FileInfo imageFileInfo, Type TFaceData, out FaceOutput predictedEmotion, out Dictionary <string, float> predictedEmotionWithAllLabels)
    {
        if (mlContext == null)
        {
            mlContext = new MLContext();
        }

        if (model == null)
        {
            model = mlContext.Model.Load(GetModelZipFileName(TFaceData), out var dataView);
        }

        predictedEmotion = null;
        predictedEmotionWithAllLabels = null;

        // Not using generics because different function calls are required anyway.
        if (TFaceData == typeof(FaceData1))
        {
            using (var predictor = mlContext.Model.CreatePredictionEngine <FaceData1, FaceOutput>(model))
            {
                using (var fd = Dlib.GetFrontalFaceDetector())
                    using (var sp = ShapePredictor.Deserialize(GetFile(ShapePredictorFileName).FullName))
                    {
                        var faceDataFromImage = GetFaceData1FromImage(imageFileInfo, sp, fd, false);
                        faceDataFromImage.Emotion = ""; // Get rid of label, as this is what we want to know.

                        predictedEmotion = predictor.Predict(faceDataFromImage);
                    }

                // Prediction with all labels.
                predictedEmotionWithAllLabels = new Dictionary <string, float>();
                var slotNames = new VBuffer <ReadOnlyMemory <char> >();
                predictor.OutputSchema.GetColumnOrNull("Label")?.GetKeyValues(ref slotNames);
                var names = new string[slotNames.Length];
                var num   = 0;
                foreach (var denseValue in slotNames.DenseValues())
                {
                    predictedEmotionWithAllLabels.Add(denseValue.ToString(), predictedEmotion.Scores[num++]);
                }

                Console.WriteLine("Predicted Emotion: " + predictedEmotion.PredictedEmotion);
                Console.WriteLine($"Scores: {string.Join(" ", predictedEmotion.Scores)}");
            }
        }
        else if (TFaceData == typeof(FaceData2))
        {
            using (var predictor = mlContext.Model.CreatePredictionEngine <FaceData2, FaceOutput>(model))
            {
                using (var fd = Dlib.GetFrontalFaceDetector())
                    using (var sp = ShapePredictor.Deserialize(GetFile(ShapePredictorFileName).FullName))
                    {
                        var faceDataFromImage = GetFaceData2FromImage(imageFileInfo, sp, fd, false);
                        faceDataFromImage.Emotion = ""; // Get rid of label, as this is what we want to know.

                        predictedEmotion = predictor.Predict(faceDataFromImage);
                    }

                // Prediction with all labels.
                predictedEmotionWithAllLabels = new Dictionary <string, float>();
                var slotNames = new VBuffer <ReadOnlyMemory <char> >();
                predictor.OutputSchema.GetColumnOrNull("Label")?.GetKeyValues(ref slotNames);
                var names = new string[slotNames.Length];
                var num   = 0;
                foreach (var denseValue in slotNames.DenseValues())
                {
                    predictedEmotionWithAllLabels.Add(denseValue.ToString(), predictedEmotion.Scores[num++]);
                }

                Console.WriteLine("Predicted Emotion: " + predictedEmotion.PredictedEmotion);
                Console.WriteLine($"Scores: {string.Join(" ", predictedEmotion.Scores)}");
            }
        }
        else if (TFaceData == typeof(FaceData3))
        {
            using (var predictor = mlContext.Model.CreatePredictionEngine <FaceData3, FaceOutput>(model))
            {
                using (var fd = Dlib.GetFrontalFaceDetector())
                    using (var sp = ShapePredictor.Deserialize(GetFile(ShapePredictorFileName).FullName))
                    {
                        var faceDataFromImage = GetFaceData3FromImage(imageFileInfo, sp, fd, false);
                        faceDataFromImage.Emotion = ""; // Get rid of label, as this is what we want to know.

                        predictedEmotion = predictor.Predict(faceDataFromImage);
                    }

                // Prediction with all labels.
                predictedEmotionWithAllLabels = new Dictionary <string, float>();
                var slotNames = new VBuffer <ReadOnlyMemory <char> >();
                predictor.OutputSchema.GetColumnOrNull("Label")?.GetKeyValues(ref slotNames);
                var names = new string[slotNames.Length];
                var num   = 0;
                foreach (var denseValue in slotNames.DenseValues())
                {
                    predictedEmotionWithAllLabels.Add(denseValue.ToString(), predictedEmotion.Scores[num++]);
                }

                Console.WriteLine("Predicted Emotion: " + predictedEmotion.PredictedEmotion);
                Console.WriteLine($"Scores: {string.Join(" ", predictedEmotion.Scores)}");
            }
        }
    }