public async Task DescribeImageAsync() { IsBusy = true; StatusMessage = null; string baseDescription = null; string facesRecognizedDescription = null; string emotionDescription = null; MessengerInstance.Send(new NotificationMessage(Constants.TakingPhoto)); try { StatusMessage = AppResources.QueryingVisionService; using (var stream = await streamingService.GetCurrentFrameAsync()) { if (stream != null) { var imageBytes = await stream.ToArrayAsync(); MessengerInstance.Send(new NotificationMessage <byte[]>(imageBytes, Constants.PhotoTaken)); if (await Network.IsInternetAvailableAsync()) { var result = await cognitiveClient.RecognizeAsync(stream, Language, RecognitionType.Vision | RecognitionType.Emotion, OnRecognitionProgress); var visionResult = result.VisionResult; if (visionResult.IsValid) { baseDescription = visionResult.Description; if (visionResult.IsTranslated) { if (Settings.ShowOriginalDescriptionOnTranslation) { baseDescription = $"{visionResult.TranslatedDescription} ({visionResult.Description})"; } else { baseDescription = visionResult.TranslatedDescription; } } if (Settings.ShowDescriptionConfidence) { baseDescription = $"{baseDescription} ({Math.Round(visionResult.Confidence, 2)})"; } // Analyzes emotion results. var emotionResults = result.EmotionResults; if (emotionResults.Any()) { var emotionMessages = new StringBuilder(); foreach (var emotionResult in emotionResults) { var emotionMessage = SpeechHelper.GetEmotionMessage(emotionResult.Gender, emotionResult.Age, emotionResult.Emotion); if (!string.IsNullOrWhiteSpace(emotionMessage)) { emotionMessages.Append(emotionMessage); } } // Describes how many faces have been recognized. if (emotionResults.Count() == 1) { facesRecognizedDescription = AppResources.FaceRecognizedSingular; } else { facesRecognizedDescription = $"{string.Format(AppResources.FacesRecognizedPlural, emotionResults.Count())} {Constants.SentenceEnd}"; } emotionDescription = emotionMessages.ToString(); } } else { if (Settings.ShowRawDescriptionOnInvalidRecognition && visionResult.RawDescription != null) { baseDescription = $"{AppResources.RecognitionFailed} ({visionResult.RawDescription}, {Math.Round(visionResult.Confidence, 2)})"; } else { baseDescription = AppResources.RecognitionFailed; } } } else { // Connection isn't available, the service cannot be reached. baseDescription = AppResources.NoConnection; } } else { baseDescription = AppResources.UnableToTakePhoto; } } } catch (Microsoft.ProjectOxford.Vision.ClientException) { // Unable to access the service (tipically, due to invalid registration keys). baseDescription = AppResources.UnableToAccessService; } catch (Microsoft.ProjectOxford.Common.ClientException ex) when(ex.Error.Code.ToLower() == "unauthorized") { // Unable to access the service (tipically, due to invalid registration keys). baseDescription = AppResources.UnableToAccessService; } catch (WebException) { // Internet isn't available, the service cannot be reached. baseDescription = AppResources.NoConnection; } catch (Exception ex) { var error = AppResources.RecognitionError; if (Settings.ShowExceptionOnError) { error = $"{error} ({ex.Message})"; } baseDescription = error; } // Shows and speaks the result. var message = $"{baseDescription}{Constants.SentenceEnd} {facesRecognizedDescription} {emotionDescription}"; StatusMessage = this.GetNormalizedMessage(message); await SpeechHelper.TrySpeechAsync(message); IsBusy = false; }
public async Task DescribeImageAsync() { IsBusy = true; StatusMessage = null; var visionService = ViewModelLocator.VisionServiceClient; var emotionService = ViewModelLocator.EmotionServiceClient; var translatorService = ViewModelLocator.TranslatorService; string baseDescription = null; string facesRecognizedDescription = null; string emotionDescription = null; MessengerInstance.Send(new NotificationMessage(Constants.TakingPhoto)); try { StatusMessage = AppResources.QueryingVisionService; using (var stream = await streamingService.GetCurrentFrameAsync()) { if (stream != null) { if (await Network.IsInternetAvailableAsync()) { var imageBytes = await stream.ToArrayAsync(); MessengerInstance.Send(new NotificationMessage <byte[]>(imageBytes, Constants.PhotoTaken)); var visualFeatures = new VisualFeature[] { VisualFeature.Description, VisualFeature.Faces }; var result = await visionService.AnalyzeImageAsync(stream, visualFeatures); Caption originalDescription; Caption filteredDescription; if (result.IsValid(out originalDescription, out filteredDescription)) { baseDescription = filteredDescription.Text; if (Language != Constants.DefaultLanguge && IsTranslatorServiceRegistered) { // The description needs to be translated. StatusMessage = AppResources.Translating; var translation = await translatorService.TranslateAsync(filteredDescription.Text, from : Constants.DefaultLanguge, to : Language); if (Settings.ShowOriginalDescriptionOnTranslation) { baseDescription = $"{translation} ({filteredDescription.Text})"; } else { baseDescription = translation; } } if (Settings.ShowDescriptionConfidence) { baseDescription = $"{baseDescription} ({Math.Round(filteredDescription.Confidence, 2)})"; } try { // If there is one or more faces, asks the service information about them. if (IsEmotionServiceRegistered && result.Faces?.Count() > 0) { StatusMessage = AppResources.RecognizingFaces; var messages = new StringBuilder(); foreach (var face in result.Faces) { using (var ms = new MemoryStream(imageBytes)) { var emotions = await emotionService.RecognizeAsync(ms, face.FaceRectangle.ToRectangle()); var bestEmotion = emotions.FirstOrDefault()?.Scores.GetBestEmotion(); // Creates the emotion description text to be speeched (if there are interesting information). var emotionMessage = SpeechHelper.GetEmotionMessage(face, bestEmotion, includeAge: Settings.GuessAge); if (!string.IsNullOrWhiteSpace(emotionMessage)) { messages.Append(emotionMessage); } } } // Checks if at least one emotion has been actually recognized. if (messages.Length > 0) { // Describes how many faces have been recognized. if (result.Faces.Count() == 1) { facesRecognizedDescription = AppResources.FaceRecognizedSingular; } else { facesRecognizedDescription = $"{string.Format(AppResources.FacesRecognizedPlural, result.Faces.Count())} {Constants.SentenceEnd}"; } emotionDescription = messages.ToString(); } } } catch (Microsoft.ProjectOxford.Common.ClientException ex) when(ex.Error.Code.ToLower() == "unauthorized") { // Unable to access the service (tipically, due to invalid registration keys). baseDescription = AppResources.UnableToAccessService; } catch { } } else { if (Settings.ShowRawDescriptionOnInvalidRecognition && originalDescription != null) { baseDescription = $"{AppResources.RecognitionFailed} ({originalDescription.Text}, {Math.Round(originalDescription.Confidence, 2)})"; } else { baseDescription = AppResources.RecognitionFailed; } } } else { // Internet isn't available, the service cannot be reached. baseDescription = AppResources.NoConnection; } } else { baseDescription = AppResources.UnableToTakePhoto; } } } catch (WebException) { // Internet isn't available, the service cannot be reached. baseDescription = AppResources.NoConnection; } catch (ClientException) { // Unable to access the service (tipically, due to invalid registration keys). baseDescription = AppResources.UnableToAccessService; } catch (Exception ex) { var error = AppResources.RecognitionError; if (Settings.ShowExceptionOnError) { error = $"{error} ({ex.Message})"; } baseDescription = error; } // Shows and speaks the result. var message = $"{baseDescription}{Constants.SentenceEnd} {facesRecognizedDescription} {emotionDescription}"; StatusMessage = this.GetNormalizedMessage(message); await SpeechHelper.TrySpeechAsync(message); IsBusy = false; }