Esempio n. 1
0
        private async Task <FaceEmotionDetection> DetectFaceAndEmotionsAsync(IRandomAccessStream imageStream)//MediaFile inputFile)
        {
            try
            {
                // Get emotions from the specified stream
                //This method can receive either a stream or a URL as an argument
                Emotion[] emotionResult = await emotionServiceClient.RecognizeAsync(imageStream.AsStream());

                // Assuming the picture has one face, retrieve emotions for the
                // first item in the returned array
                var faceEmotion = emotionResult[0]?.Scores.ToRankedList().Cast <FaceEmotionDetection>();
                //tblEmotion.Text = faceEmotion.ToString() ;

                // Create a list of face attributes that the
                // app will need to retrieve
                var requiredFaceAttributes = new FaceAttributeType[] {
                    FaceAttributeType.Age,
                    FaceAttributeType.Gender,
                    FaceAttributeType.Smile,
                    FaceAttributeType.FacialHair,
                    FaceAttributeType.HeadPose,
                    FaceAttributeType.Glasses
                };
                // Get a list of faces in a picture
                var faces = await faceServiceClient.DetectAsync(imageStream.AsStream(),
                                                                false, false, requiredFaceAttributes);

                // Assuming there is only one face, store its attributes
                var faceAttributes = faces[0]?.FaceAttributes;

                FaceEmotionDetection faceEmotionDetection = new FaceEmotionDetection();
                faceEmotionDetection.Age       = faceAttributes.Age;
                faceEmotionDetection.Emotion   = faceEmotion.FirstOrDefault().Key;
                faceEmotionDetection.Glasses   = faceAttributes.Glasses.ToString();
                faceEmotionDetection.Smile     = faceAttributes.Smile;
                faceEmotionDetection.Gender    = faceAttributes.Gender;
                faceEmotionDetection.Moustache = faceAttributes.FacialHair.Moustache;
                faceEmotionDetection.Beard     = faceAttributes.FacialHair.Beard;


                return(faceEmotionDetection);
            }
            catch (Exception ex)
            {
                await DisplayAlert("Error", ex.Message, "OK");

                return(null);
            }
        }
Esempio n. 2
0
        private async void BtnEmotions_Click(object sender, RoutedEventArgs e)

        {
            try

            {
                FaceEmotionDetection faceEmotion = await DetectFaceAndEmotionsAsync(imageStream);

                //    emotionresult = await this.emotionServiceClient.RecognizeAsync(imageStream.AsStream());

                //    if (emotionresult != null)

                //    {

                //        EmotionScores score = emotionresult[0].Scores;

                //        tblEmotion.Text = "Your Emotions are : \n" +

                //            "Happiness: " + (score.Happiness) * 100 + " %" + "\n" +

                //            "Sadness: " + (score.Sadness) * 100 + " %" + "\n" +

                //            "Surprise: " + (score.Surprise) * 100 + " %" + "\n" +

                //            "Neutral: " + (score.Neutral) * 100 + " %" + "\n" +

                //            "Anger: " + (score.Anger) * 100 + " %" + "\n" +

                //            "Contempt: " + (score.Contempt) * 100 + " %" + "\n" +

                //            "Disgust: " + (score.Disgust) * 100 + " %" + "\n" +

                //            "Fear: " + (score.Fear) * 100 + " %" + "\n";

                //    }
            }

            catch

            {
                tblEmotion.Text = "Error Returning the Emotions from API";
            }
        }