Beispiel #1
0
        /// <summary>
        /// Updates the corresponding tracked identity object in the local identity store, used when emotions are detected for a certain (detected) face.
        /// </summary>
        /// <param name="emotion">The emotion object as generated by the Cognitive Services Emotion API.</param>
        /// <remarks>
        /// Assumes the concerning face is also (or rather, already) detected by the face tracker (Windows FaceAnalysis namespace).
        /// </remarks>
        public void DetectedEmotion(Emotion emotion)
        {
            var identity = FindNearestIdentity(emotion.FaceRectangle);

            if (identity != null)
            {
                identity.EmotionScores = emotion.Scores;
            }
        }
        public Emotions GetMaxEmotion(Microsoft.ProjectOxford.Emotion.Contract.Emotion personEmotion)
        {
            var emotions = personEmotion.Scores.ToRankedList();

            Emotions maxEmotion = new Emotions {
                EmotionName  = Emotions.emotion.Happiness,
                EmotionValue = 0
            };

            foreach (var emotion in emotions)
            {
                if (emotion.Value > maxEmotion.EmotionValue)
                {
                    maxEmotion.EmotionValue = emotion.Value;
                    maxEmotion.EmotionName  = (Emotions.emotion)Enum.Parse(typeof(Emotions.emotion), emotion.Key);
                }
            }
            return(maxEmotion);
        }
        private void SetNewEmotions()
        {
            var originalHeight = _model.Photo.GetFullImageHeight();
            var originalWidth  = _model.Photo.GetFullImageWidth();

            for (var i = 0; i < _model.Emotions.Length; i++)
            {
                Emotion emotion          = _model.Emotions[i];
                var     strongestEmotion = GetStrongestEmotion(emotion.Scores);
                View    faceView;
                if (strongestEmotion != null)
                {
                    faceView = new Image
                    {
                        Source = ImageSource.FromResource($"IntelligentPx.Resources.Emotions.{strongestEmotion}.png"),
                        Aspect = Aspect.AspectFit
                    };
                }
                else
                {
                    faceView = new BoxView {
                        BackgroundColor = Color.Black, Opacity = 0.4
                    };
                }

                ImageLayout.Children.Add(faceView,
                                         Constraint.RelativeToView(Image, (rl, v) => v.X + emotion.FaceRectangle.Left * v.Width / originalWidth),
                                         Constraint.RelativeToView(Image, (rl, v) => v.Y + emotion.FaceRectangle.Top * v.Height / originalHeight),
                                         Constraint.RelativeToView(Image, (rl, v) => emotion.FaceRectangle.Width * v.Width / originalWidth),
                                         Constraint.RelativeToView(Image, (rl, v) => emotion.FaceRectangle.Height * v.Height / originalHeight));

                ImageLayout.Children.Add(new Label {
                    Text = i.ToString(), TextColor = Color.White, FontAttributes = FontAttributes.Bold
                },
                                         Constraint.RelativeToView(faceView, (rl, v) => v.X - 10),
                                         Constraint.RelativeToView(faceView, (rl, v) => v.Y + 10));
            }
        }