Exemplo n.º 1
0
        async Task Translate(string language, Stream incomingImage)
        {
            var vs   = new VisionService();
            var desc = await vs.OCRPhoto(incomingImage, language);

            var ts             = new TranslateService();
            var translatedDesc = await ts.TranslateText(desc, language, LanguageCodes.English);

            descriptionLabel.Text = translatedDesc;
        }
        internal void Handle(string bucketName, string s3File)
        {
            var detectTextTask = textractTextService.DetectTextS3(bucketName, s3File);

            detectTextTask.Wait();
            var blocks     = detectTextTask.Result.Blocks;
            var sourceText = new StringBuilder();

            blocks.ForEach(x => {
                if (x.BlockType == "LINE")
                {
                    sourceText.AppendLine(x.Text);
                }
            });
            Console.WriteLine(sourceText.ToString());
            var translateTask = translateService.TranslateText(sourceText.ToString(), "en", "de");

            translateTask.Wait();
            Console.WriteLine(translateTask.Result.TranslatedText);
        }
        void StartRecording()
        {
            if (recognitionTask != null)
            {
                recognitionTask.Cancel();
                recognitionTask = null;
            }

            var audioSession = AVAudioSession.SharedInstance();

            try
            {
                NSError err;
                audioSession.SetCategory(AVAudioSessionCategory.PlayAndRecord);
                audioSession.SetMode(AVAudioSession.ModeMeasurement, out err);
                audioSession.SetActive(true);
            }
            catch (Exception ex)
            {
                var s = ex.ToString();
            }

            recognitionRequest = new SFSpeechAudioBufferRecognitionRequest();

            var inputNode = audioEngine.InputNode;

            recognitionRequest.ShouldReportPartialResults = true;

            recognitionTask = speechRecognizer.GetRecognitionTask(recognitionRequest, (arg1, arg2) =>
            {
                var isFinal = false;

                if (arg1 != null)
                {
                    var inputtedText = arg1.BestTranscription.FormattedString;
                    englishText.Text = inputtedText;

                    var fromLanguage = LanguageCodes.English;
                    var toLangague   = AppDelegate.CurrentLanguage.LanguageCode;

                    translator.TranslateText(inputtedText, fromLanguage, toLangague).ContinueWith(async(arg) =>
                    {
                        var translated = await arg;

                        InvokeOnMainThread(() => translatedText.Text = translated);
                    });


                    isFinal = arg1.Final;
                }

                if (arg2 != null || isFinal)
                {
                    audioEngine.Stop();
                    inputNode.RemoveTapOnBus(0);
                    recognitionRequest = null;
                    recognitionTask    = null;

                    askQuestion.Enabled = true;
                }
            });

            var recordingFormat = inputNode.GetBusOutputFormat(0);

            inputNode.InstallTapOnBus(0, 1024, recordingFormat, (buffer, when) =>
            {
                recognitionRequest?.Append(buffer);
            });

            audioEngine.Prepare();

            try
            {
                NSError err = new NSError();
                audioEngine.StartAndReturnError(out err);

                englishText.Text    = "OK, here we go!";
                translatedText.Text = "";
            }
            catch (Exception) { }
        }