/// <inheritdoc/>
 public async Task ExecuteAsync(CancellationToken cancellationToken = default)
 {
     if (!_conceptValues.Any())
     {
         // Do not call translator service if there is nothing to translate
         return;
     }
     _translations = await _service.TranslateAsync(_conceptValues, _clientName, cancellationToken);
 }
Example #2
0
        public async Task <CognitiveResult> RecognizeAsync(Stream stream, string language, RecognitionType recognitionType = RecognitionType.Vision | RecognitionType.Emotion, Func <RecognitionPhase, Task> onProgress = null)
        {
            await this.RaiseOnProgressAsync(onProgress, RecognitionPhase.QueryingService);

            var visionService = new VisionServiceClient(Settings.VisionSubscriptionKey);
            var result        = new CognitiveResult();

            if (recognitionType.HasFlag(RecognitionType.Vision) || recognitionType.HasFlag(RecognitionType.Emotion))
            {
                var imageBytes = await stream.ToArrayAsync();

                var features = new HashSet <VisualFeature> {
                    VisualFeature.Description
                };
                if (recognitionType.HasFlag(RecognitionType.Emotion))
                {
                    features.Add(VisualFeature.Faces);
                }

                var visionSettings = await VisionSettingsProvider?.GetSettingsAsync();

                var analyzeImageResult = await visionService.AnalyzeImageAsync(stream, features);

                var visionResult = result.VisionResult;

                Caption originalDescription;
                Caption filteredDescription;

                var isValid = analyzeImageResult.IsValid(out originalDescription, out filteredDescription, visionSettings);

                visionResult.IsValid        = isValid;
                visionResult.RawDescription = originalDescription.Text;
                visionResult.Confidence     = originalDescription.Confidence;

                if (isValid)
                {
                    visionResult.Description = filteredDescription.Text;

                    if (language != DefaultLanguge && IsTranslatorServiceRegistered)
                    {
                        // Make sure to use the updated translator subscription key.
                        translatorService.SubscriptionKey = Settings.TranslatorSubscriptionKey;

                        // The description needs to be translated.
                        await this.RaiseOnProgressAsync(onProgress, RecognitionPhase.Translating);

                        var translation = await translatorService.TranslateAsync(filteredDescription.Text, from : DefaultLanguge, to : language);

                        visionResult.TranslatedDescription = translation;
                    }
                }

                if (recognitionType.HasFlag(RecognitionType.Emotion))
                {
                    // If there is one or more faces, asks the service information about them.
                    if (IsEmotionServiceRegistered && (analyzeImageResult.Faces?.Any() ?? false))
                    {
                        await this.RaiseOnProgressAsync(onProgress, RecognitionPhase.RecognizingFaces);

                        var emotionService = new EmotionServiceClient(Settings.EmotionSubscriptionKey);

                        foreach (var face in analyzeImageResult.Faces)
                        {
                            using (var ms = new MemoryStream(imageBytes))
                            {
                                var emotions = await emotionService.RecognizeAsync(ms, face.FaceRectangle.ToRectangle());

                                var emotionResult = emotions.GetEmotionResult(face);
                                result.EmotionResults.Add(emotionResult);
                            }
                        }
                    }
                }
            }

            if (recognitionType.HasFlag(RecognitionType.Text))
            {
                await this.RaiseOnProgressAsync(onProgress, RecognitionPhase.RecognizingText);

                var results = await visionService.RecognizeTextAsync(stream);

                var text = results.GetRecognizedText();

                result.OcrResult.Text = text;
            }

            return(result);
        }
        public static async Task <TranslationOrchestrationContext> TranslateDocument([ActivityTrigger] TranslationOrchestrationContext context, [Inject] ITranslatorService service, ILogger log)
        {
            context.TranslatedText = await service.TranslateAsync(context.SourceText, context.Language);

            return(context);
        }
Example #4
0
        public async Task DescribeImageAsync()
        {
            IsBusy = true;
            string message = null;

            if (IsOnline)
            {
                try
                {
                    MessengerInstance.Send(new NotificationMessage(Constants.PhotoTaken));

                    using (var stream = await streamingService.GetCurrentFrameAsync())
                    {
                        if (stream != null)
                        {
                            StatusMessage = AppResources.QueryingVisionService;

                            var result = await visionServiceClient.DescribeAsync(stream);

                            StatusMessage = AppResources.VisionServiceQueried;

                            if (result.Description.Captions.Length > 0)
                            {
                                message       = result.Description.Captions.First().Text;
                                StatusMessage = message;

                                if (Settings.AutomaticTranslation && Language != Constants.DefaultLanguge)
                                {
                                    // The description needs to be translated.
                                    StatusMessage = AppResources.Translating;
                                    message       = await translatorService.TranslateAsync(message);
                                }
                            }
                            else
                            {
                                message = AppResources.RecognitionFailed;
                            }
                        }
                        else
                        {
                            message = AppResources.UnableToGetImage;
                        }
                    }
                }
                catch (Exception ex)
                {
                    var msg = ex.Message;
                    Debug.WriteLine(msg);

                    message = AppResources.RecognitionError;
                }
            }
            else
            {
                // Internet isn't available, to service cannot be reached.
                message = AppResources.NoConnection;
            }

            // Speaks the result.
            StatusMessage = message;
            await speechService.SpeechAsync(message, Language);

            IsBusy = false;
        }
Example #5
0
        public async Task DescribeImageAsync()
        {
            IsBusy        = true;
            StatusMessage = null;

            string baseDescription            = null;
            string facesRecognizedDescription = null;
            string emotionDescription         = null;

            MessengerInstance.Send(new NotificationMessage(Constants.TakingPhoto));

            try
            {
                StatusMessage = AppResources.QueryingVisionService;
                using (var stream = await streamingService.GetCurrentFrameAsync())
                {
                    if (stream != null)
                    {
                        if (await Network.IsInternetAvailableAsync())
                        {
                            var imageBytes = await stream.ToArrayAsync();

                            MessengerInstance.Send(new NotificationMessage <byte[]>(imageBytes, Constants.PhotoTaken));

                            var visualFeatures = new VisualFeature[] { VisualFeature.Description, VisualFeature.Faces };
                            var result         = await visionService.AnalyzeImageAsync(stream, visualFeatures);

                            Caption originalDescription;
                            Caption filteredDescription;

                            if (result.IsValid(out originalDescription, out filteredDescription))
                            {
                                baseDescription = filteredDescription.Text;

                                if (Language != Constants.DefaultLanguge && IsTranslatorServiceRegistered)
                                {
                                    // The description needs to be translated.
                                    StatusMessage = AppResources.Translating;
                                    var translation = await translatorService.TranslateAsync(filteredDescription.Text, from : Constants.DefaultLanguge, to : Language);

                                    if (Settings.ShowOriginalDescriptionOnTranslation)
                                    {
                                        baseDescription = $"{translation} ({filteredDescription.Text})";
                                    }
                                    else
                                    {
                                        baseDescription = translation;
                                    }
                                }

                                if (Settings.ShowDescriptionConfidence)
                                {
                                    baseDescription = $"{baseDescription} ({Math.Round(filteredDescription.Confidence, 2)})";
                                }

                                try
                                {
                                    // If there is one or more faces, asks the service information about them.
                                    if (IsEmotionServiceRegistered && result.Faces?.Count() > 0)
                                    {
                                        StatusMessage = AppResources.RecognizingFaces;
                                        var messages = new StringBuilder();

                                        foreach (var face in result.Faces)
                                        {
                                            using (var ms = new MemoryStream(imageBytes))
                                            {
                                                var emotions = await emotionService.RecognizeAsync(ms, face.FaceRectangle.ToRectangle());

                                                var bestEmotion = emotions.FirstOrDefault()?.Scores.GetBestEmotion();

                                                // Creates the emotion description text to be speeched (if there are interesting information).
                                                var emotionMessage = SpeechHelper.GetEmotionMessage(face, bestEmotion, includeAge: Settings.GuessAge);
                                                if (!string.IsNullOrWhiteSpace(emotionMessage))
                                                {
                                                    messages.Append(emotionMessage);
                                                }
                                            }
                                        }

                                        // Checks if at least one emotion has been actually recognized.
                                        if (messages.Length > 0)
                                        {
                                            // Describes how many faces have been recognized.
                                            if (result.Faces.Count() == 1)
                                            {
                                                facesRecognizedDescription = AppResources.FaceRecognizedSingular;
                                            }
                                            else
                                            {
                                                facesRecognizedDescription = $"{string.Format(AppResources.FacesRecognizedPlural, result.Faces.Count())} {Constants.SentenceEnd}";
                                            }

                                            emotionDescription = messages.ToString();
                                        }
                                    }
                                }
                                catch (Microsoft.ProjectOxford.Common.ClientException ex) when(ex.Error.Code.ToLower() == "unauthorized")
                                {
                                    // Unable to access the service (tipically, due to invalid registration keys).
                                    baseDescription = AppResources.UnableToAccessService;
                                }
                                catch
                                { }
                            }
                            else
                            {
                                if (Settings.ShowRawDescriptionOnInvalidRecognition && originalDescription != null)
                                {
                                    baseDescription = $"{AppResources.RecognitionFailed} ({originalDescription.Text}, {Math.Round(originalDescription.Confidence, 2)})";
                                }
                                else
                                {
                                    baseDescription = AppResources.RecognitionFailed;
                                }
                            }
                        }
                        else
                        {
                            // Internet isn't available, the service cannot be reached.
                            baseDescription = AppResources.NoConnection;
                        }
                    }
                    else
                    {
                        baseDescription = AppResources.UnableToTakePhoto;
                    }
                }
            }
            catch (WebException)
            {
                // Internet isn't available, the service cannot be reached.
                baseDescription = AppResources.NoConnection;
            }
            catch (ClientException)
            {
                // Unable to access the service (tipically, due to invalid registration keys).
                baseDescription = AppResources.UnableToAccessService;
            }
            catch (Exception ex)
            {
                var error = AppResources.RecognitionError;

                if (Settings.ShowExceptionOnError)
                {
                    error = $"{error} ({ex.Message})";
                }

                baseDescription = error;
            }

            // Shows and speaks the result.
            var message = $"{baseDescription}{Constants.SentenceEnd} {facesRecognizedDescription} {emotionDescription}";

            StatusMessage = this.GetNormalizedMessage(message);

            await SpeechHelper.TrySpeechAsync(message);

            IsBusy = false;
        }