Inheritance: ServiceClient, IEmotionServiceClient
コード例 #1
0
        public async Task<Emotion[]> GetEmotion()
        {
            try {
                var emotionServiceClient = new EmotionServiceClient("keyemotio");

                var emotion = await emotionServiceClient.RecognizeAsync(this.ImageResult.Url);
                this.EmotionCollection = emotion;
                var image = new EmotionView
                {
                    Anger = emotion[0].Scores.Anger * 100,
                    Nombre = ImageResult.Nombre,
                    Url = ImageResult.Url,
                    Disgust = emotion[0].Scores.Disgust * 100,
                    Contempt = emotion[0].Scores.Contempt * 100,
                    Fear = emotion[0].Scores.Fear * 100,
                    Happiness = emotion[0].Scores.Happiness * 100,
                    Neutral = emotion[0].Scores.Neutral * 100,
                    Sadness = emotion[0].Scores.Sadness * 100,
                    Surprise = emotion[0].Scores.Surprise * 100
                };


                ImageCollection.Add(image);
                return emotion;
            }
            catch(Exception)
            {
                return null;
            }
        }
コード例 #2
0
ファイル: OxfordClient.cs プロジェクト: elcalado/showmelove
        public async Task InitializeAsync()
        {
            var oxfordEmotionClientKey = _configurationReader["OxfordEmotionClientKey"];
            var oxfordFaceClientKey    = _configurationReader["OxfordFaceClientKey"];

            _emotionServiceClient = new EmotionServiceClient(_httpClient, oxfordEmotionClientKey);
            _faceServiceClient    = new FaceServiceClient(oxfordFaceClientKey);

            await Task.FromResult<object>(null);
        }
コード例 #3
0
ファイル: Program.cs プロジェクト: modulexcite/ProjectOxford
        private static async void DetecFacesAndDisplayResult(string fileLocation, string subscriptionKeyFace, string subscriptionKeyEmotion)
        {
            using (var fileStreamFace = File.OpenRead(fileLocation))
            {
                using (var fileStreamEmotions = File.OpenRead(fileLocation))
                {
                    try
                    {

                        var faceServiceClient = new FaceServiceClient(subscriptionKeyFace);
                        var emotionServiceClient = new EmotionServiceClient(subscriptionKeyEmotion);
                        var faces = await faceServiceClient.DetectAsync(fileStreamFace, false, true, true);
                        Console.WriteLine(" > " + faces.Length + " detected.");
                        if (faces.Length > 0)
                        {
                            var faceRectangles = new List<Rectangle>();
                            foreach (var face in faces)
                            {
                                Console.WriteLine(" >> age: " + face.Attributes.Age + " gender:" + face.Attributes.Gender);
                                var rectangle = new Rectangle
                                {
                                    Height = face.FaceRectangle.Height,
                                    Left = face.FaceRectangle.Left,
                                    Top = face.FaceRectangle.Top,
                                    Width = face.FaceRectangle.Width
                                };
                                faceRectangles.Add(rectangle);
                            }

                            // on face detected we start emotion analysis
                            var emotions = await emotionServiceClient.RecognizeAsync(fileStreamEmotions, faceRectangles.ToArray());
                            var emotionsDetails = "";
                            foreach (var emotion in emotions)
                            {
                                emotionsDetails += $@" Anger: {emotion.Scores.Anger}
    Contempt: {emotion.Scores.Contempt}
    Disgust: {emotion.Scores.Disgust}
    Fear: {emotion.Scores.Fear}
    Happiness: {emotion.Scores.Happiness}
    Neutral: {emotion.Scores.Neutral}
    Sadness: {emotion.Scores.Sadness}
    Surprise: {emotion.Scores.Surprise}
";
                            }

                            Console.WriteLine(" >> emotions: " + emotionsDetails);
                        }
                    }
                    catch (Exception exception)
                    {
                        Console.WriteLine(exception.ToString());
                    }
                }
            }
        }
コード例 #4
0
        static async Task<Emotion[]> RecognizeEmotionsAsync(string imageUrl)
        {
            var client = new EmotionServiceClient(SubscriptionKey);

            try
            {
                return await client.RecognizeAsync(imageUrl);
            }
            catch (Exception)
            {
                return null;
            }
        }
コード例 #5
0
    public async Task<IDictionary<Emotions, float>> ReadEmotionsFromImageStreamAndGetRankedEmotions(Stream imageStream)
    {
      EmotionServiceClient emotionServiceClient = new EmotionServiceClient(this.subscriptionKey);

      Emotion[] emotions = await emotionServiceClient.RecognizeAsync(imageStream).ConfigureAwait(false);

      Emotion emotion = emotions.FirstOrDefault();

      if (emotion == null)
        return null;

      return this.CalculateAndRankScoreToDictionary(emotion.Scores);

    }
コード例 #6
0
ファイル: Core.cs プロジェクト: XpiritBV/mini-hacks
        private static async Task<Emotion[]> GetHappiness(Stream stream)
        {
            string emotionKey = "88f748eefd944a5d8d337a1765414bba";

            EmotionServiceClient emotionClient = new EmotionServiceClient(emotionKey);

            var emotionResults = await emotionClient.RecognizeAsync(stream);

            if (emotionResults == null || emotionResults.Count() == 0)
            {
                throw new Exception("Can't detect face");
            }

            return emotionResults;
        }
        /// <summary>
        /// Uploads the image to Project Oxford and detect emotions.
        /// </summary>
        /// <param name="imageFilePath">The image file path.</param>
        /// <returns></returns>
        private async Task<Emotion[]> UploadAndDetectEmotions(string url)
        {
            MainWindow window = (MainWindow)Application.Current.MainWindow;
            string subscriptionKey = window.ScenarioControl.SubscriptionKey;

            // -----------------------------------------------------------------------
            // KEY SAMPLE CODE STARTS HERE
            // -----------------------------------------------------------------------

            window.Log("EmotionServiceClient is created");

            //
            // Create Project Oxford Emotion API Service client
            //
            EmotionServiceClient emotionServiceClient = new EmotionServiceClient(subscriptionKey);

            window.Log("Calling EmotionServiceClient.RecognizeAsync()...");
            try
            {
                //
                // Detect the emotions in the URL
                //
                Emotion[] emotionResult = await emotionServiceClient.RecognizeAsync(url);
                return emotionResult;
            }
            catch (Exception exception)
            {
                window.Log("Dection failed. Please make sure that you have the right subscription key and proper URL to detect.");
                window.Log(exception.ToString());
                return null;
            }
            // -----------------------------------------------------------------------
            // KEY SAMPLE CODE ENDS HERE
            // -----------------------------------------------------------------------

        }
コード例 #8
0
        private async void btnTakePhoto_Click(object sender, RoutedEventArgs e)
        {
            btnTakePhoto.IsEnabled = false;
            btnStartPreview.IsEnabled = false;

            InMemoryRandomAccessStream stream = new InMemoryRandomAccessStream();
            await mediaCapture.CapturePhotoToStreamAsync(ImageEncodingProperties.CreateJpeg(), stream);

            stream.Seek(0);
            BitmapImage bitmap = new BitmapImage();
            bitmap.SetSource(stream);
            captureImage.Source = bitmap;

            stream.Seek(0);
            Stream st = stream.AsStream();

            if (isPreviewing == true) await mediaCapture.StopPreviewAsync();
            isPreviewing = false;
            previewElement.Visibility = Visibility.Collapsed;

            progring.IsActive = true;

                try
                {
                    EmotionServiceClient emotionServiceClient =
                            new EmotionServiceClient("12345678901234567890123456789012");
        // replace 12345678901234567890123456789012 with your key taken from https://www.projectoxford.ai/Subscription/
                    emotionResult = await emotionServiceClient.RecognizeAsync(st);
                }
                catch { }

            progring.IsActive = false;

            if ((emotionResult != null) && (emotionResult.Length > 0))
            {
                emo.Clear();
                emo.Add(emotionResult[0]);
                this.DataContext = emo.ElementAt(0);
            }
            btnStartPreview.IsEnabled = true;
            btnTakePhoto.IsEnabled = true;
        }
コード例 #9
0
 public EmotionServiceClient Constructor(string subscriptionKey)
 {
     EmotionServiceClient target = new EmotionServiceClient(subscriptionKey);
     return target;
     // TODO: add assertions to method EmotionServiceClientTest.Constructor(String)
 }
コード例 #10
0
ファイル: EmotionApi.cs プロジェクト: evgri243/pubic-demos
        public EmotionApi(string subscriptionKey)
        {
            Check.Required<ArgumentNullException>(() => subscriptionKey != null);

            _client = new EmotionServiceClient(subscriptionKey);
        }
コード例 #11
0
 static Emotion()
 {
     // Create a EmotionServiceClient shared among all instances/clients connected
     // to the EmotionController
     emotionServiceClient = new EmotionServiceClient(subscriptionKey);
 }
コード例 #12
0
        // method to take a still image, send to APIs, and display result
        public async void takePhoto_Click(object sender, RoutedEventArgs e)
        {

            try
            {
                takePhoto.IsEnabled = false;

                photoFile = await KnownFolders.PicturesLibrary.CreateFileAsync(
                    PHOTO_FILE_NAME, CreationCollisionOption.GenerateUniqueName);
                ImageEncodingProperties imageProperties = ImageEncodingProperties.CreateJpeg();
                await mediaCapture.CapturePhotoToStorageFileAsync(imageProperties, photoFile);
                takePhoto.IsEnabled = true;
                statusBox.Text = "Take Photo succeeded: " + photoFile.Path;

                IRandomAccessStream photoStream = await photoFile.OpenReadAsync();
                IRandomAccessStream photoStream2 = await photoFile.OpenReadAsync();
                BitmapImage bitmap = new BitmapImage();
                bitmap.SetSource(photoStream);
                await writeableBitmap.SetSourceAsync(photoStream2);

                // and now for the face API call
                statusBox.Text = "Uploading image for Face API";

                Stream fs1 = await photoFile.OpenStreamForReadAsync();
                Stream fs2 = await photoFile.OpenStreamForReadAsync();

                var faceClient = new FaceServiceClient("9725d03742394560be3ff295e1e435a2");
                var emotionClient = new EmotionServiceClient("c9306a1f134749759f1f4f9ae8838e1a");
                faceResult = await faceClient.DetectAsync(fs1);
                emotionResult = await emotionClient.RecognizeAsync(fs2);

                numFaces = faceResult.Length;

                statusBox.Text = "Number of faces detected: " + numFaces.ToString();
                currentFace = 0;

                if (numFaces > 0) // if faces were returned in the result, display the first one
                {
                    displayFaceInfo();
                    displayImage();
                }

            }
            catch (Exception ex)
            {
                statusBox.Text = ex.Message;
                Cleanup();
            }
            finally
            {
                takePhoto.IsEnabled = true;
            }

        }
コード例 #13
0
		private async void DetectAsync()
		{
			Shell.SetBusyVisibility( Visibility.Visible, "Taking photo.." );

			this.operationMode = OperationMode.Detect;

			this.viewModel.PhotoFile = await this.camera.CapturePhotoToFileAsync();
			await this.camera.CaptureManager.StopPreviewAsync();

			if( this.led != null )
			{
				this.led.TurnOff();
			}

			Shell.SetBusyVisibility( Visibility.Visible, "Detecting your face.." );
			
			Face.FaceServiceClient faceClient = new Face.FaceServiceClient( FACE_API_KEY );
			Stream stream = await this.viewModel.PhotoFile.OpenStreamForReadAsync();
			Face.Contract.Face[] faces = await faceClient.DetectAsync( stream, analyzesAge: true, analyzesGender: true );

			VoiceGender voiceGender = VoiceGender.Male;
			if( faces.Length == 1 )
			{
				Face.Contract.FaceAttribute face = faces[ 0 ].Attributes;
				string greet;

				if( face.Gender == "male" )
				{
					greet = "Hello Handsome!";
					voiceGender = VoiceGender.Female;
				}
				else
				{
					greet = "Hey, Sexy!";
					voiceGender = VoiceGender.Male;
				}
				this.viewModel.Greet = $"{greet} You look {face.Age} today.";

				await this.SpeakAsync( this.viewModel.Greet, voiceGender, true );
			}
			else
			{
				this.viewModel.Greet = "I cannot see your face :(";
			}

			Shell.SetBusyVisibility( Visibility.Visible, "Detecting your emotions.." );
			
			Emotion.EmotionServiceClient emotionClient = new Emotion.EmotionServiceClient( EMOTION_API_KEY );

			stream = await this.viewModel.PhotoFile.OpenStreamForReadAsync();
			Emotion.Contract.Emotion[] emotions = await emotionClient.RecognizeAsync( stream );
			if( emotions.Length == 1 )
			{
				Emotion.Contract.Scores scores = emotions[ 0 ].Scores;
				this.viewModel.Scores = scores;

				bool like = scores.Happiness > scores.Anger + scores.Sadness + scores.Disgust;

				this.viewModel.EvaluationResult = like
					? "So you liked it! I'm so happy to hear that! :)"
					: "Oh, really? I'm terribly sorry! :(";
				await this.SpeakAsync( this.viewModel.EvaluationResult, voiceGender, false );
			}
			else
			{
				this.viewModel.EvaluationResult = "I cannot see your emotions :(";
			}

			this.operationMode = OperationMode.Done;

			Shell.SetBusyVisibility( Visibility.Collapsed );
		}
コード例 #14
0
		/// <summary>
		/// Initializes a new instance of the <see cref="EmotionServiceHelper"/> class.
		/// </summary>
		/// <param name="subscriptionKey">The subscription key.</param>
		public EmotionHelper(string subscriptionKey)
		{
			this.emotionClient = new EmotionServiceClient(subscriptionKey);
		}
        /// <summary>
        /// Uploads the image to Project Oxford and detect emotions.
        /// </summary>
        /// <param name="imageFilePath">The image file path.</param>
        /// <returns></returns>
        private async Task<Emotion[]> UploadAndDetectEmotions(string imageFilePath)
        {
            MainWindow window = (MainWindow)Application.Current.MainWindow;
            string subscriptionKey = window.ScenarioControl.SubscriptionKey;

            window.Log("EmotionServiceClient is created");

            // -----------------------------------------------------------------------
            // KEY SAMPLE CODE STARTS HERE
            // -----------------------------------------------------------------------

            //
            // Create Project Oxford Emotion API Service client
            //
            EmotionServiceClient emotionServiceClient = new EmotionServiceClient(subscriptionKey);

            window.Log("Calling EmotionServiceClient.RecognizeAsync()...");
            try
            {
                Emotion[] emotionResult;
                using (Stream imageFileStream = File.OpenRead(imageFilePath))
                {
                    //
                    // Detect the emotions in the URL
                    //
                    emotionResult = await emotionServiceClient.RecognizeAsync(imageFileStream);
                    return emotionResult;
                }
            }
            catch (Exception exception)
            {
                window.Log(exception.ToString());
                return null;
            }
            // -----------------------------------------------------------------------
            // KEY SAMPLE CODE ENDS HERE
            // -----------------------------------------------------------------------

        }
        /// <summary>
        /// Uploads the video to Project Oxford and detects emotions.
        /// </summary>
        /// <param name="videoFilePath">The video file path.</param>
        /// <returns></returns>
        private async Task<VideoAggregateRecognitionResult> UploadAndDetectEmotions(string videoFilePath)
        {
            MainWindow window = (MainWindow)Application.Current.MainWindow;
            string subscriptionKey = window.ScenarioControl.SubscriptionKey;

            window.Log("EmotionServiceClient is created");

            // -----------------------------------------------------------------------
            // KEY SAMPLE CODE STARTS HERE
            // -----------------------------------------------------------------------

            //
            // Create Project Oxford Emotion API Service client
            //
            EmotionServiceClient emotionServiceClient = new EmotionServiceClient(subscriptionKey);

            window.Log("Calling EmotionServiceClient.RecognizeInVideoAsync()...");
            try
            {
                using (Stream videoFileStream = File.OpenRead(videoFilePath))
                {
                    //
                    // Upload the video, and tell the server to start recognizing emotions
                    //
                    window.Log("Start uploading video");
                    VideoEmotionRecognitionOperation videoOperation = await emotionServiceClient.RecognizeInVideoAsync(videoFileStream);
                    window.Log("Finished uploading video");


                    //
                    // Starts querying service status
                    //
                    VideoOperationResult result;
                    while (true)
                    {
                        result = await emotionServiceClient.GetOperationResultAsync(videoOperation);
                        if (result.Status == VideoOperationStatus.Succeeded || result.Status == VideoOperationStatus.Failed)
                        {
                            break;
                        }

                        window.Log(string.Format("Server status: {0}, wait {1} seconds...", result.Status, QueryWaitTime.TotalSeconds));
                        await Task.Delay(QueryWaitTime);
                    }

                    window.Log("Finish processing with server status: " + result.Status);

                    //
                    // Processing finished, checks result
                    // 
                    if (result.Status == VideoOperationStatus.Succeeded)
                    {
                        //
                        // Get the processing result by casting to the actual operation result
                        //
                        VideoAggregateRecognitionResult aggregateResult = ((VideoOperationInfoResult<VideoAggregateRecognitionResult>)result).ProcessingResult;
                        return aggregateResult;
                    }
                    else
                    {
                        // Failed
                        window.Log("Fail reason: " + result.Message);
                    }

                    return null;
                }
            }
            catch (Exception exception)
            {
                window.Log(exception.ToString());
                return null;
            }
            // -----------------------------------------------------------------------
            // KEY SAMPLE CODE ENDS HERE
            // -----------------------------------------------------------------------

        }
コード例 #17
0
        private async Task<Emotion[]> UploadAndDetectEmotion(string subscriptionKey, Stream imageStream)
        {
            try
            {
                var emotionServiceClient = new EmotionServiceClient(subscriptionKey);

                var emotionResult = await emotionServiceClient.RecognizeAsync(imageStream);
                return emotionResult;
            }
            catch (Exception ex)
            {
                Debug.WriteLine($"Error {ex}");
                return null;
            }
        }
コード例 #18
0
    async Task AddEmotionBasedTagsToPhotoAsync(PhotoResult photoResult)
    {
      // See comment at bottom of file
      if (!string.IsNullOrEmpty(cognitiveServiceEmotionServiceKey))
      {
        EmotionServiceClient client = new EmotionServiceClient(
          cognitiveServiceEmotionServiceKey);

        // Open the photo file we just captured.
        using (var stream = await photoResult.PhotoFile.OpenStreamForReadAsync())
        {
          // Call the cloud looking for emotions.
          var results = await client.RecognizeAsync(stream);

          // We're only taking the first result here.
          var scores = results?.FirstOrDefault()?.Scores;

          if (scores != null)
          {
            // This object has properties called Sadness, Happiness,
            // Fear, etc. all with floating point values 0..1
            var publicProperties = scores.GetType().GetRuntimeProperties();

            // We'll have any property with a score > 0.5f.
            var automaticTags =
              publicProperties
                .Where(
                  property => (float)property.GetValue(scores) > 0.5)
                .Select(
                  property => property.Name)
                .ToList();

            if (automaticTags.Count > 0)
            {
              // Add them to our photo!
              await this.photoControl.AddTagsToPhotoAsync(
                photoResult.PhotoId,
                automaticTags);
            }
          }
        }
      }
    }
コード例 #19
0
        private async Task<Emotion[]> UploadAndDetectEmotion(Stream imageStream)
        {
            EmotionServiceClient = new EmotionServiceClient(Constants.EmotionApiKey);
            try
            {
                Emotion[] emotionResult;
                using (Stream imageFileStream = imageStream)
                {
                    emotionResult = await EmotionServiceClient.RecognizeAsync(imageFileStream);
                    return emotionResult;
                }
            }
            catch (Exception ex)
            {

                return new Emotion[0];
            }
        }
コード例 #20
0
        public async Task<ObservableCollection<Face>> UpdateEmotions(string selectedFile, ObservableCollection<Face> faces, 
            string subscriptionKeyEmotions)
        {
            using (var fileStreamEmotions = File.OpenRead(selectedFile))
            {
                var emotionServiceClient = new EmotionServiceClient(subscriptionKeyEmotions);
                var emotions = await emotionServiceClient.RecognizeAsync(fileStreamEmotions, faces.Select(
                    face => new Rectangle
                {
                    Height = face.Height,
                    Left = face.Left,
                    Top = face.Top,
                    Width = face.Width
                }).ToArray());
                foreach (var emotion in emotions)
                {
                    foreach (var face in faces.Where(face => face.Height == emotion.FaceRectangle.Height &&
                                                             face.Left == emotion.FaceRectangle.Left &&
                                                             face.Top == emotion.FaceRectangle.Top &&
                                                             face.Width == emotion.FaceRectangle.Width))
                    {
                        face.Scores = emotion.Scores;
                        face.CalculateEmotion();
                    }
                }

                return faces;
            }
        }