Ejemplo n.º 1
0
        private async Task TakePhotoAsync()
        {
            IsBusy = true;

            string recognizeText = null;

            try
            {
                using (var stream = await mediaPicker.TakePhotoAsync())
                {
                    if (stream != null)
                    {
                        var imageBytes = await stream.ToArrayAsync();

                        MessengerInstance.Send(new NotificationMessage <byte[]>(imageBytes, Constants.PhotoTaken));
                        Message = null;

                        if (await NetworkService.IsInternetAvailableAsync())
                        {
                            var result = await cognitiveClient.AnalyzeAsync(stream, Language, RecognitionType.Text);

                            var ocrResult = result.OcrResult;

                            if (ocrResult.ContainsText)
                            {
                                recognizeText = ocrResult.Text;
                            }
                            else
                            {
                                recognizeText = AppResources.UnableToRecognizeText;
                            }
                        }
                        else
                        {
                            // Internet isn't available, the service cannot be reached.
                            recognizeText = AppResources.NoConnection;
                        }
                    }
                    else
                    {
                        // If message is null at this point, this is the first request. If we cancel it, turns automatically to the
                        // previous page.
                        if (message == null)
                        {
                            AppNavigationService.GoBack();
                        }

                        IsBusy = false;
                        return;
                    }
                }
            }
            catch (CognitiveException ex)
            {
                // Unable to access the service (message contains translated error details).
                recognizeText = ex.Message;
            }
            catch (WebException)
            {
                // Internet isn't available, the service cannot be reached.
                recognizeText = AppResources.NoConnection;
            }
            catch (Exception ex)
            {
                var error = AppResources.RecognitionError;

                if (Settings.ShowExceptionOnError)
                {
                    error = $"{error} ({ex.Message})";
                }

                recognizeText = error;
            }

            // Shows the result.
            Message = this.GetNormalizedMessage(recognizeText);
            IsBusy  = false;
        }
Ejemplo n.º 2
0
        public async Task DescribeImageAsync()
        {
            IsBusy        = true;
            StatusMessage = null;

            string visionDescription          = null;
            string facesRecognizedDescription = null;
            string facesDescription           = null;

            MessengerInstance.Send(new NotificationMessage(Constants.TakingPhoto));

            try
            {
                StatusMessage = AppResources.QueryingVisionService;
                using (var stream = await streamingService.GetCurrentFrameAsync())
                {
                    if (stream != null)
                    {
                        var imageBytes = await stream.ToArrayAsync();

                        MessengerInstance.Send(new NotificationMessage <byte[]>(imageBytes, Constants.PhotoTaken));

                        if (await NetworkService.IsInternetAvailableAsync())
                        {
                            var result = await cognitiveClient.AnalyzeAsync(stream, Language, RecognitionType.Vision | RecognitionType.Face | RecognitionType.Emotion, OnRecognitionProgress);

                            var visionResult = result.VisionResult;
                            var faceResults  = result.FaceResults;

                            if (!faceResults.Any() || Settings.ShowDescriptionOnFaceIdentification)
                            {
                                // Gets the description only if no faces has been recognized or if the corresponding setting flag is set.
                                if (visionResult.IsValid)
                                {
                                    visionDescription = visionResult.Description;
                                    if (visionResult.IsTranslated)
                                    {
                                        if (Settings.ShowOriginalDescriptionOnTranslation)
                                        {
                                            visionDescription = $"{visionResult.TranslatedDescription} ({visionResult.Description})";
                                        }
                                        else
                                        {
                                            visionDescription = visionResult.TranslatedDescription;
                                        }
                                    }

                                    if (Settings.ShowDescriptionConfidence)
                                    {
                                        visionDescription = $"{visionDescription} ({Math.Round(visionResult.Confidence, 2)})";
                                    }
                                }
                                else
                                {
                                    if (Settings.ShowRawDescriptionOnInvalidRecognition && visionResult.RawDescription != null)
                                    {
                                        visionDescription = $"{AppResources.RecognitionFailed} ({visionResult.RawDescription}, {Math.Round(visionResult.Confidence, 2)})";
                                    }
                                    else
                                    {
                                        visionDescription = AppResources.RecognitionFailed;
                                    }
                                }

                                visionDescription = $"{visionDescription}{Constants.SentenceEnd}";
                            }

                            if (faceResults.Any())
                            {
                                // At least a face has been recognized.
                                var faceMessages = new StringBuilder();

                                foreach (var faceResult in faceResults)
                                {
                                    var faceMessage = SpeechHelper.GetFaceMessage(faceResult);
                                    faceMessages.Append(faceMessage);
                                }

                                // Describes how many faces have been recognized.
                                if (faceResults.Count() == 1)
                                {
                                    facesRecognizedDescription = AppResources.FaceRecognizedSingular;
                                }
                                else
                                {
                                    facesRecognizedDescription = $"{string.Format(AppResources.FacesRecognizedPlural, faceResults.Count())} {Constants.SentenceEnd}";
                                }

                                facesDescription = faceMessages.ToString();
                            }
                        }
                        else
                        {
                            // Connection isn't available, the service cannot be reached.
                            visionDescription = AppResources.NoConnection;
                        }
                    }
                    else
                    {
                        visionDescription = AppResources.UnableToTakePhoto;
                    }
                }
            }
            catch (CognitiveException ex)
            {
                // Unable to access the service (message contains translated error details).
                visionDescription = ex.Message;
            }
            catch (WebException)
            {
                // Internet isn't available, the service cannot be reached.
                visionDescription = AppResources.NoConnection;
            }
            catch (Exception ex)
            {
                var error = AppResources.RecognitionError;

                if (Settings.ShowExceptionOnError)
                {
                    error = $"{error} ({ex.Message})";
                }

                visionDescription = error;
            }

            // Shows and speaks the result.
            var message = $"{visionDescription} {facesRecognizedDescription} {facesDescription}";

            StatusMessage = this.GetNormalizedMessage(message);

            await SpeechHelper.TrySpeechAsync(message);

            IsBusy = false;
        }