Beispiel #1
0
        public async Task DetectEmotion(byte[] imgBytes = null, string path = null)
        {
            try
            {
                Stream imageFileStream = null;
                if (!string.IsNullOrEmpty(path))
                {
                    imageFileStream = File.OpenRead(path);
                }
                else if (imgBytes != null)
                {
                    imageFileStream = new MemoryStream(imgBytes);
                }

                EmotionServiceClient emotionServiceClient = new EmotionServiceClient(EmotionApiKey);

                Emotion[] emotionResult = await emotionServiceClient.RecognizeAsync(imageFileStream);

                var asd = emotionResult;

                this.DetectedEmotion = emotionResult;
            }
            catch (Exception ex)
            {
                this.DetectedEmotion = Enumerable.Empty <Emotion>();
            }
        }
Beispiel #2
0
        private async void GetEmotionResultForImage(string filePath)
        {
            var fileStream = File.OpenRead(filePath);

            var emotionServiceClient = new EmotionServiceClient(
                ConfigurationManager.AppSettings.Get("EmotionApiKey"),
                ConfigurationManager.AppSettings.Get("EmotionApiEndpoint"));

            var result = await emotionServiceClient.RecognizeAsync(fileStream);

            var emotions = new List <string>();

            foreach (var emotion in result)
            {
                var keyValuePairs  = emotion.Scores.ToRankedList();
                var activeEmotions = keyValuePairs.Where(x => x.Value > 0.01).OrderByDescending(x => x.Value);

                foreach (var activeEmotion in activeEmotions)
                {
                    var emotionInPercent = (activeEmotion.Value * 100).ToString("#0.##");
                    emotions.Add($"{activeEmotion.Key} {emotionInPercent}%");
                }
            }

            var resultWindow = new ResultWindow
            {
                InputImage = new BitmapImage(new Uri(filePath)),
                Attributes = emotions,
                Owner      = this
            };

            resultWindow.ShowDialog();

            fileStream.Dispose();
        }
        private async void getEmotion_Click(object sender, RoutedEventArgs e)
        {
            try {
                emotionResult = await emotionServiceClient.RecognizeAsync(imageStream.AsStream());

                if (emotionResult != null)
                {
                    Scores score     = emotionResult[0].Scores;
                    double Happiness = score.Happiness * 10000;
                    double Sadness   = score.Sadness * 10000;
                    double Surprise  = score.Surprise * 10000;
                    double Fear      = score.Fear * 10000;
                    double Anger     = score.Anger * 10000;
                    double Contempt  = score.Contempt * 10000;
                    double Disgust   = score.Disgust * 10000;
                    double Neutral   = score.Neutral * 10000;
                    output.Text = "Your emotion are: \nThe value of Emotion range from 0 to 10000\n\n" +
                                  "\tHappiness\t: " + (int)Happiness + "\n" +
                                  "\tSadness\t: " + (int)Sadness + "\n" +
                                  "\tSurprise\t: " + (int)Surprise + "\n" +
                                  "\tFear\t\t: " + (int)Fear + "\n" +
                                  "\tAnger\t\t: " + (int)Anger + "\n" +
                                  "\tContempt\t: " + (int)Contempt + "\n" +
                                  "\tDisgust\t: " + (int)Disgust + "\n\n" +
                                  "\tNeutral\t: " + (int)Neutral + "\n";
                }
            }catch {
                output.Text = "Error returning the emotion";
            }
        }
Beispiel #4
0
        public async Task Recognizeemotion()
        {
            try
            {
                if (photo != null)
                {
                    using (var photoStream = photo.GetStream())
                    {
                        Emotion[] emotionResult = await emotionClient.RecognizeAsync(photoStream);

                        if (emotionResult.Any())
                        {
                            // Emotions detected are happiness, sadness, surprise, anger, fear, contempt, disgust, or neutral.
                            emotionResultLabel.Text = emotionResult.FirstOrDefault().Scores.ToRankedList().FirstOrDefault().Key;
                            emotion.IsVisible       = true;
                        }
                        photo.Dispose();
                    }
                }
            }
            catch (Exception ex)
            {
                Debug.WriteLine(ex.Message);
            }
        }
Beispiel #5
0
        public async void AnalyzeEmotion(ThreadPoolTimer timer)
        {
            if (!EmotionProcessingSemaphore.Wait(0))
            {
                return;
            }
            using (var captureStream = new InMemoryRandomAccessStream())
            {
                await _mediaCapture.CapturePhotoToStreamAsync(ImageEncodingProperties.CreateJpeg(), captureStream);

                captureStream.Seek(0);

                try {
                    emotion = await EmotionServiceClient.RecognizeAsync(captureStream.AsStream());

                    System.Diagnostics.Debug.WriteLine(DateTime.Now);
                }
                // Catch and display Face API errors.
                catch (FaceAPIException f)
                {
                    System.Diagnostics.Debug.WriteLine(f.ErrorMessage, f.ErrorCode);
                }
                // Catch and display all other errors.
                catch (Exception e)
                {
                    System.Diagnostics.Debug.WriteLine(e.Data);
                }
            }
            EmotionProcessingSemaphore.Release();
        }
Beispiel #6
0
        async Task <IEnumerable <RawEmotion> > CaptureEmotionAsync()
        {
            _isProcessing = true;

            RawEmotion[] result;

            try
            {
                var photoFile = await _photoService.CreateAsync();

                var imageProperties = ImageEncodingProperties.CreateBmp();
                await _mediaManager.CapturePhotoToStorageFileAsync(imageProperties, photoFile);

                result = await _emotionClient.RecognizeAsync(await photoFile.OpenStreamForReadAsync());
            }
            finally
            {
                await _photoService.CleanupAsync();

                _isProcessing = false;
            }

            return(result.IsNullOrEmpty()
                ? await Task.FromResult(Enumerable.Empty <RawEmotion>())
                : result);
        }
        async void OnPhotoClicked(object sender, EventArgs e)
        {
            await CrossMedia.Current.Initialize();

            // Take photo
            if (CrossMedia.Current.IsCameraAvailable || CrossMedia.Current.IsTakePhotoSupported)
            {
                photo = await CrossMedia.Current.TakePhotoAsync(new StoreCameraMediaOptions
                {
                    Name      = "emotion.jpg",
                    PhotoSize = PhotoSize.Small
                });

                if (photo != null)
                {
                    image.Source = ImageSource.FromStream(photo.GetStream);
                }
                else
                {
                    return;
                }
            }
            else
            {
                await DisplayAlert("No Camera", "Camera unavailable.", "OK");
            }

            ((Button)sender).IsEnabled  = false;
            activityIndicator.IsRunning = true;

            // Recognize emotion
            try
            {
                if (photo != null)
                {
                    using (var photoStream = photo.GetStream())
                    {
                        Emotion[] emotionResult = await emotionClient.RecognizeAsync(photoStream);

                        if (emotionResult.Any())
                        {
                            // Emotions detected are happiness, sadness, surprise, anger, fear, contempt, disgust, or neutral.
                            emotionResultLabel.Text = emotionResult.FirstOrDefault().Scores.ToRankedList().FirstOrDefault().Key;
                        }
                        photo.Dispose();
                    }
                }
            }
            catch (Exception ex)
            {
                Debug.WriteLine(ex.Message);
            }
            if (emotionResultLabel.Text != null)
            {
                await postEmotionAsync();
            }

            activityIndicator.IsRunning = false;
            ((Button)sender).IsEnabled  = true;
        }
Beispiel #8
0
        public async Task <CogniviteResult> ProcessImage(string url)
        {
            VisualFeature[] visualFeatures =
            {
                VisualFeature.Adult,     VisualFeature.Categories, VisualFeature.Color, VisualFeature.Description, VisualFeature.Faces,
                VisualFeature.ImageType, VisualFeature.Tags
            };

            CogniviteResult result = new CogniviteResult();

            result.VisionTask  = _visionServiceClient.AnalyzeImageAsync(url, visualFeatures);
            result.EmotionTask = _emotionServiceClient.RecognizeAsync(url);
            result.FaceTask    = _faceServiceClient.DetectAsync(url, false, true, new[] { FaceAttributeType.Gender, FaceAttributeType.Age, FaceAttributeType.Smile, FaceAttributeType.Glasses });

            await Task.WhenAll(result.VisionTask, result.EmotionTask, result.FaceTask);

            var enTxt = result.VisionTask.Result.Description.Captions[0].Text;
            var frTxt = await TranslatorService.Translate(enTxt, "en", "fr");

            var esTxt = await TranslatorService.Translate(enTxt, "en", "es");

            result.VisionTask.Result.Description.Captions = new[] { new Caption {
                                                                        Text = frTxt
                                                                    }, result.VisionTask.Result.Description.Captions[0], new Caption {
                                                                        Text = esTxt
                                                                    }, };

            return(result);
        }
Beispiel #9
0
        /// <summary>
        /// Uploads the image to Project Oxford and detect emotions.
        /// </summary>
        /// <param name="imageFilePath">The image file path.</param>
        /// <returns></returns>
        private async Task <Emotion[]> UploadAndDetectEmotions(string url)
        {
            Debug.WriteLine("EmotionServiceClient is created");

            //
            // Create Project Oxford Emotion API Service client
            //
            EmotionServiceClient emotionServiceClient = new EmotionServiceClient(_subscriptionKey);

            Debug.WriteLine("Calling EmotionServiceClient.RecognizeAsync()...");
            try
            {
                //
                // Detect the emotions in the URL
                //
                Emotion[] emotionResult = await emotionServiceClient.RecognizeAsync(url);

                return(emotionResult);
            }
            catch (Exception exception)
            {
                Debug.WriteLine("Detection failed. Please make sure that you have the right subscription key and proper URL to detect.");
                Debug.WriteLine(exception.ToString());
                return(null);
            }
        }
Beispiel #10
0
        private async void emotion_Clicked(object sender, EventArgs e)
        {
            var emotionClient = new EmotionServiceClient("ccd94bca7ccc4e0d9b45b200088990e9");
            await CrossMedia.Current.Initialize();

            if (CrossMedia.Current.IsCameraAvailable && CrossMedia.Current.IsTakePhotoSupported)
            {
                // Supply media options for saving our photo after it's taken.
                var mediaOptions = new Plugin.Media.Abstractions.StoreCameraMediaOptions
                {
                    Directory = "Receipts",
                    Name      = $"{DateTime.UtcNow}.jpg"
                };

                // Take a photo of the business receipt.
                var file = await CrossMedia.Current.TakePhotoAsync(mediaOptions);

                using (var photoStream = file.GetStream())
                {
                    Emotion[] emotionResult = await emotionClient.RecognizeAsync(photoStream);

                    if (emotionResult.Any())
                    {
                        // Emotions detected are happiness, sadness, surprise, anger, fear, contempt, disgust, or neutral.
                        lblrating.Text = emotionResult.FirstOrDefault().Scores.ToRankedList().FirstOrDefault().Key;
                    }
                    file.Dispose();
                }
            }

            //var visionclient = new VisionServiceClient(visionKey);
            //var result = await visionclient.DescribeAsync(file.GetStream());
            //LblResult.Text = result.Description.Captions.First().Text;
        }
Beispiel #11
0
        public async Task <ActionResult> Emotion(string img)
        {
            img = string.IsNullOrWhiteSpace(img) ? "/images/Emotions/pissed2.jpg" : img;
            string subscriptionKey = ConfigurationManager.AppSettings["EmotionApiKey"];
            string imageUrl        = $"{_baseUrl}{img}";

            // Using the SDK



            EmotionServiceClient service = new EmotionServiceClient(subscriptionKey);

            Emotion[] result = await service.RecognizeAsync(imageUrl);



/*
 *          // Using the WebApi
 *          var url = "https://westus.api.cognitive.microsoft.com/emotion/v1.0/recognize";
 *          var requestService = new CognitiveServicesRequest();
 *          var response = await requestService.MakeRequest(url, subscriptionKey, requestService.CreateImageRequestObject(imageUrl));
 */
            var viewModel = new EmotionViewModel
            {
                ImageUrl     = imageUrl,
                SDKResult    = result,
                JsonResponse = ""
            };

            return(View(viewModel));
        }
Beispiel #12
0
        private async Task <Emotion[]> GetEmotionsAsync(StorageFile imageFile)
        {
            //
            // Create Project Oxford Emotion API Service client
            //
            EmotionServiceClient emotionServiceClient = new EmotionServiceClient("9fa5d104b72046f085aa1b4b379f03a3");

            Debug.WriteLine("Calling EmotionServiceClient.RecognizeAsync()...");
            try
            {
                using (Stream imageFileStream = await Task.Run <Stream>(() => File.OpenRead(imageFile.Path)))
                {
                    //
                    // Detect the emotions in the file
                    //
                    var emotionResult = await emotionServiceClient.RecognizeAsync(imageFileStream);

                    return(emotionResult);
                }
            }
            catch (Exception exception)
            {
                Debug.WriteLine(exception.ToString());
                return(null);
            }
        }
Beispiel #13
0
        /// <summary>
        /// Uploads the video to Project Oxford and detects emotions
        /// </summary>
        /// <param name="imageFilePath"></param>
        /// <returns></returns>
        public static async Task <Emotion[]> UploadAndStreamDetectEmotionsAsync(string imageFilePath)
        {
            // Create Project Oxford Emotion API Service client


            try
            {
                var index     = random.Next(0, subscriptionKeyArray.Length);
                var renderKey = subscriptionKeyArray[index];
                EmotionServiceClient emotionServiceMaterClient = new EmotionServiceClient(renderKey);
                using (Stream imageFileStream = File.OpenRead(imageFilePath))
                {
                    //Emotion[] emotionResult = null;
                    // Detect the emotions in the URL
                    var emotionResult = await emotionServiceMaterClient.RecognizeAsync(imageFileStream);

                    if (emotionResult == null)
                    {
                        renderKey = subscriptionKeyArray.FirstOrDefault(z => z != renderKey);
                        var emotionServiceSlaveClient = new EmotionServiceClient(renderKey);
                        emotionResult = await emotionServiceSlaveClient.RecognizeAsync(imageFileStream);
                    }
                    //var emotionResult = Task.Run(() =>
                    //     Instance.RecognizeAsync(imageFileStream)).Result;
                    return(emotionResult);
                }
            }
            catch (Exception ex)
            {
                return(null);
            }
        }
Beispiel #14
0
        async void GetEmotions(object sender, object e)
        {
            if (!IsFacePresent)
            {
                return;
            }
            dt.Stop();
            var ms = new MemoryStream();

            try
            {
                await MC.CapturePhotoToStreamAsync(ImageEncodingProperties.CreateJpeg(), ms.AsRandomAccessStream());
            }
            catch
            {
                dt.Start();
                return;
            }
            ms.Position = 0L;
            var Emo = await Oxford.RecognizeAsync(ms);

            if (Emo != null && Emo.Length > 0)
            {
                var Face = Emo[0];
                System.Diagnostics.Debug.WriteLine(Face.Scores.Happiness);
                txt.Text = Face.Scores.Happiness.ToString();
            }
            dt.Start();
        }
Beispiel #15
0
        public async void DetectAndExtractFaces(Stream imageStream)
        {
            Emotion[] emotions = await emoClient.RecognizeAsync(imageStream);

            var emoPicture = new EmoPicture();
            //emoPicture.Faces = ExtractFaces(emotions,emoPicture);
        }
        private async void GetEmotionDetails(WriteableBitmap bitmap, Face[] faces)
        {
            if (bitmap != null)
            {
                try
                {
                    BitmapEncoder faceDetectEncoder = new JpegBitmapEncoder();
                    // create frame from the writable bitmap and add to encoder
                    faceDetectEncoder.Frames.Add(BitmapFrame.Create(bitmap));

                    MemoryStream imageFileStream = new MemoryStream();
                    faceDetectEncoder.Save(imageFileStream);

                    Emotion[] emotions = await _emotionServiceClient.RecognizeAsync(imageFileStream);

                    DetectedFaces?.Invoke(faces?.ToArray(), emotions?.ToArray());
                    DoDelayTimer();
                }
                catch (Exception ex)
                {
                    Debug.WriteLine("GetEmotionDetails exception : " + ex.Message);
                }
            }

            DetectedFaces?.Invoke(faces?.ToArray(), null);
            DoDelayTimer();
        }
Beispiel #17
0
        public async Task <ObservableCollection <Face> > UpdateEmotions(string selectedFile, ObservableCollection <Face> faces,
                                                                        string subscriptionKeyEmotions)
        {
            using (var fileStreamEmotions = File.OpenRead(selectedFile))
            {
                var emotionServiceClient = new EmotionServiceClient(subscriptionKeyEmotions);
                var emotions             = await emotionServiceClient.RecognizeAsync(fileStreamEmotions, faces.Select(
                                                                                         face => new Rectangle
                {
                    Height = face.Height,
                    Left = face.Left,
                    Top = face.Top,
                    Width = face.Width
                }).ToArray());

                foreach (var emotion in emotions)
                {
                    foreach (var face in faces.Where(face => face.Height == emotion.FaceRectangle.Height &&
                                                     face.Left == emotion.FaceRectangle.Left &&
                                                     face.Top == emotion.FaceRectangle.Top &&
                                                     face.Width == emotion.FaceRectangle.Width))
                    {
                        face.Scores = emotion.Scores;
                        face.CalculateEmotion();
                    }
                }

                return(faces);
            }
        }
Beispiel #18
0
        private async Task <ObservableCollection <Face> > UpdateEmotions(string selectedFile, ObservableCollection <Face> faces)
        {
            using (var fileStreamEmotions = File.OpenRead(selectedFile))
            {
                var emotionServiceClient = new EmotionServiceClient(_subscriptionKeyEmotions);
                var emotions             = await emotionServiceClient.RecognizeAsync(fileStreamEmotions, faces.Select(
                                                                                         face => new Rectangle
                {
                    Height = face.Height,
                    Left = face.Left,
                    Top = face.Top,
                    Width = face.Width
                }).ToArray());

                foreach (var emotion in emotions)
                {
                    foreach (var face in faces.Where(face => face.Height == emotion.FaceRectangle.Height &&
                                                     face.Left == emotion.FaceRectangle.Left &&
                                                     face.Top == emotion.FaceRectangle.Top &&
                                                     face.Width == emotion.FaceRectangle.Width))
                    {
                        var mappedScores = AutoMapper.Mapper.Map <Microsoft.ProjectOxford.Emotion.Contract.Scores>(emotion.Scores);
                        face.Scores = mappedScores;
                        face.CalculateEmotion();
                    }
                }

                return(faces);
            }
        }
        private async void GetEmotionDetails(SoftwareBitmap bitmap, List <Face> faces)
        {
            if (bitmap != null)
            {
                try
                {
                    using (var randomAccessStream = new InMemoryRandomAccessStream())
                    {
                        var encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, randomAccessStream);

                        encoder.SetSoftwareBitmap(bitmap);
                        await encoder.FlushAsync();

                        randomAccessStream.Seek(0);

                        Emotion[] emotions = await _emotionServiceClient.RecognizeAsync(randomAccessStream.AsStream());

                        ProcessResults(faces?.ToArray(), emotions?.ToArray(), bitmap);
                    }
                }
                catch (Exception ex)
                {
                    Debug.WriteLine("GetEmotionDetails exception : " + ex.Message);
                    ProcessResults(faces?.ToArray(), null, bitmap);
                }
            }
        }
        private async Task <Emotion[]> GetEmotions()
        {
            try
            {
                Emotion[] emotionResult;

                Windows.Storage.StorageFile file = await KnownFolders.PicturesLibrary.GetFileAsync(pictureName);

                if (file != null)
                {
                    // Open a stream for the selected file.
                    // The 'using' block ensures the stream is disposed
                    // after the image is loaded.
                    using (Stream fileStream =
                               await file.OpenStreamForReadAsync())
                    {
                        emotionResult = await emotionServiceClient.RecognizeAsync(fileStream);

                        return(emotionResult);
                    }
                }
                else
                {
                    return(null);
                }
            }
            catch (Exception ex)
            {
                SolidColorBrush brush = new SolidColorBrush(Windows.UI.Colors.Red);
                Status.Foreground = brush;
                Status.Text       = "Error reading Emotion : " + ex.Message;
                return(null);
            }
        }
Beispiel #21
0
        private static async Task <FaceResult[]> GetHappinessAsync(Stream stream)
        {
            var loading = UserDialogs.Instance.Loading("Analyzing...");

            loading.Show();

            var emotionClient = new EmotionServiceClient(
                CognitiveServicesKeys.Emotion);

            try
            {
                var emotionResults = await emotionClient.RecognizeAsync(stream);

                if (emotionResults == null || emotionResults.Count() == 0)
                {
                    throw new Exception("Can't detect face");
                }

                return(emotionResults);
            }
            finally
            {
                loading.Hide();
            }
        }
        /// <summary>
        /// Returns true if all faces are smiling in the frame
        /// </summary>
        /// <param name="stream">stream of frame</param>
        /// <param name="faces">list of faces from the builtin FaceRecognition</param>
        /// <param name="scale">factor by which frame has been scaled compared to recognized faces</param>
        /// <returns></returns>
        public async Task <bool> CheckIfEveryoneIsSmiling(IRandomAccessStream stream, IEnumerable <DetectedFace> faces, double scale)
        {
            List <Rectangle> rectangles = new List <Rectangle>();

            foreach (var face in faces)
            {
                var box = face.FaceBox;
                rectangles.Add(new Rectangle()
                {
                    Top    = (int)((double)box.Y * scale),
                    Left   = (int)((double)box.X * scale),
                    Height = (int)((double)box.Height * scale),
                    Width  = (int)((double)box.Width * scale)
                });
            }

            try
            {
                var emotions = await _client.RecognizeAsync(stream.AsStream(), rectangles.ToArray());

                return(emotions.Where(emotion => GetEmotionType(emotion) == EmotionType.Happiness).Count() == emotions.Count());
            }
            catch (Exception ex)
            {
                Debug.WriteLine("MAKE SURE TO POPULATE Keys.cs");
                return(false);
            }
        }
Beispiel #23
0
        /// <summary> Function which submits a frame to the Emotion API. </summary>
        /// <param name="frame"> The video frame to submit. </param>
        /// <returns> A <see cref="Task{LiveCameraResult}"/> representing the asynchronous API call,
        ///     and containing the emotions returned by the API. </returns>
        private async Task <LiveCameraResult> EmotionAnalysisFunction(VideoFrame frame)
        {
            // Encode image.
            var jpg = frame.Image.ToMemoryStream(".jpg", s_jpegParams);

            // Submit image to API.
            Emotion[] emotions = null;

            // See if we have local face detections for this image.
            var localFaces = (OpenCvSharp.Rect[])frame.UserData;

            if (localFaces == null)
            {
                // If localFaces is null, we're not performing local face detection.
                // Use Cognigitve Services to do the face detection.
                Properties.Settings.Default.EmotionAPICallCount++;
                emotions = await _emotionClient.RecognizeAsync(jpg);
            }
            else if (localFaces.Count() > 0)
            {
                // If we have local face detections, we can call the API with them.
                // First, convert the OpenCvSharp rectangles.
                var rects = localFaces.Select(
                    f => new Microsoft.ProjectOxford.Common.Rectangle
                {
                    Left   = f.Left,
                    Top    = f.Top,
                    Width  = f.Width,
                    Height = f.Height
                });
                Properties.Settings.Default.EmotionAPICallCount++;
                emotions = await _emotionClient.RecognizeAsync(jpg, rects.ToArray());
            }
            else
            {
                // Local face detection found no faces; don't call Cognitive Services.
                emotions = new Emotion[0];
            }

            // Output.
            return(new LiveCameraResult
            {
                Faces = emotions.Select(e => CreateFace(e.FaceRectangle)).ToArray(),
                // Extract emotion scores from results.
                EmotionScores = emotions.Select(e => e.Scores).ToArray()
            });
        }
Beispiel #24
0
        /// <summary>
        /// 获取链接图片中的表情
        /// </summary>
        /// <param name="uri">链接地址</param>
        /// <returns>表情包</returns>
        public async Task <Emotion[]> GetEmotionsFromUri(string uri)
        {
            var faceAttributes            = GetWhat.GetFaceAttirbutes();
            EmotionServiceClient e_client = new EmotionServiceClient(key_emotion);
            var emotion_task = e_client.RecognizeAsync(uri);

            return(await emotion_task);
        }
Beispiel #25
0
        private static async Task DetectEmotion(string apiKey, string imageUrl)
        {
            var emotionServiceClient = new EmotionServiceClient(apiKey);

            Emotion[] emotionResult = await emotionServiceClient.RecognizeAsync(imageUrl);

            LogEmotionResult(emotionResult);
        }
Beispiel #26
0
        public async Task CapturaEmocao(string fotoUrl, string chaveDaApi)
        {
            var emotionServiceClient = new EmotionServiceClient(chaveDaApi);

            Emotion[] emotionResult = await emotionServiceClient.RecognizeAsync(fotoUrl);

            LogEmotionResult(emotionResult);
        }
Beispiel #27
0
        private static async Task <Emotion[]> EmotionDetectAync(string emotionApiKey)
        {
            var client  = new EmotionServiceClient(emotionApiKey);
            var url     = "https://github.com/Microsoft/Cognitive-Face-Windows/blob/master/Data/detection2.jpg?raw=true";
            var emotion = await client.RecognizeAsync(url);

            return(emotion);
        }
 public async Task Load(Stream stream)
 {
     IsDataLoading = true;
     EmotionServiceClient esc = new EmotionServiceClient("4064c52bfb044805a39d2d3c33749f44");
     Emotions = await esc.RecognizeAsync(stream);
     
     IsDataLoading = false;
 }
Beispiel #29
0
        private async Task <Emotion[]> RecognizeEmotionsFromImage(dynamic image, Rectangle[] faceRectangles = null)
        {
            var result = await _emotionServiceClient.RecognizeAsync(image, faceRectangles);

            _emotionAPICallCount++;

            return(result);
        }
        public static async Task <(Face[] faces, Person person, Emotion[] emotions)> DetectAndIdentifyFace(Bitmap image)
        {
            FaceServiceClient    fsc = new FaceServiceClient(Settings.Instance.FaceApiKey, FaceApiEndpoint);
            EmotionServiceClient esc = new EmotionServiceClient(Settings.Instance.EmotionApiKey);

            //FACE Detection
            //TODO add detection interval as param
            Emotion[] emotions = null;
            Person    person   = null;

            Face[] faces = null;


            //Detect and identify only once per 10 seconds
            if (lastFaceDetectTime.AddSeconds(10) < DateTime.Now)
            {
                lastFaceDetectTime = DateTime.Now;

                MemoryStream memoryStream = new MemoryStream();
                image.Save(memoryStream, System.Drawing.Imaging.ImageFormat.Jpeg);

                //We need to seek to begin
                memoryStream.Seek(0, SeekOrigin.Begin);
                faces = await fsc.DetectAsync(memoryStream, true, true, new List <FaceAttributeType>() { FaceAttributeType.Age, FaceAttributeType.Gender });

                if (faces.Any())
                {
                    var rec = new Microsoft.ProjectOxford.Common.Rectangle[] { faces.First().FaceRectangle.ToRectangle() };
                    //Emotions

                    //We need to seek to begin, due to problems with parallel access we needed to create new memory stream
                    memoryStream = new MemoryStream();
                    image.Save(memoryStream, System.Drawing.Imaging.ImageFormat.Jpeg);
                    memoryStream.Seek(0, SeekOrigin.Begin);

                    //We call Emotion API and we include face rectangle information,
                    //as this way the call is cheaper, as emotion api does not have to run face detection
                    emotions = await esc.RecognizeAsync(memoryStream, rec);


                    //Person Identification
                    var groups = await fsc.ListPersonGroupsAsync();

                    var groupId = groups.First().PersonGroupId;

                    //We are interested only in first candidate
                    var identifyResult = await fsc.IdentifyAsync(groupId, new Guid[] { faces.First().FaceId }, 1);

                    var candidate = identifyResult?.FirstOrDefault()?.Candidates?.FirstOrDefault();

                    if (candidate != null)
                    {
                        person = await fsc.GetPersonAsync(groupId, candidate.PersonId);
                    }
                }
            }
            return(faces, person, emotions);
        }
Beispiel #31
0
        public static async Task <string> RecognizeEmotionsFromPortraitImage(System.IO.Stream imgStream)
        {
            var emotionServiceClient = new EmotionServiceClient(BotConfig.MS_COGNITIVE_API_KEY);
            var emotions             = await emotionServiceClient.RecognizeAsync(imgStream);

            var strEmotions = emotions.Select(PrintEmotion);

            return(string.Join("\n\r\n\r", strEmotions));
        }