public async Task <TranscriptUtterance> SpeechToTranslatedTextAsync(string audioUrl, string sourceLanguage, string targetLanguage) { Transcripts.Clear(); TranscriptUtterance utterance = null; var config = SpeechTranslationConfig.FromSubscription(_subscriptionKey, _region); config.SpeechRecognitionLanguage = sourceLanguage; config.AddTargetLanguage(targetLanguage); var stopTranslation = new TaskCompletionSource <int>(); using (var audioInput = await AudioUtils.DownloadWavFileAsync(audioUrl)) { using (var recognizer = new TranslationRecognizer(config, audioInput)) { // Subscribes to events. recognizer.Recognized += (s, e) => { if (e.Result.Reason == ResultReason.TranslatedSpeech) { utterance = new TranscriptUtterance { Recognition = e.Result.Text, Translation = e.Result.Translations.FirstOrDefault().Value, }; } else if (e.Result.Reason == ResultReason.NoMatch) { Trace.TraceError($"NOMATCH: Speech could not be translated."); } }; recognizer.Canceled += (s, e) => { if (e.Reason == CancellationReason.Error) { Trace.TraceError($"Failed to decode incoming text message: {e.ErrorDetails}"); } stopTranslation.TrySetResult(0); }; recognizer.SessionStopped += (s, e) => { Trace.TraceInformation("Session stopped event."); stopTranslation.TrySetResult(0); }; await recognizer.StartContinuousRecognitionAsync().ConfigureAwait(false); // Waits for completion. // Use Task.WaitAny to keep the task rooted. Task.WaitAny(new[] { stopTranslation.Task }); // Stops translation. await recognizer.StopContinuousRecognitionAsync().ConfigureAwait(false); return(utterance); } } }
private async Task ConnectAsync(SpeechClientOptions options) { if (s2smtClient != null && s2smtClient.IsConnected()) { return; } if (options.GetType() != typeof(SpeechTranslateClientOptions)) { throw new InvalidOperationException("Type of SpeechClientOptions is not supported."); } options.AuthHeaderValue = await AzureAuthenticationService.GetAccessToken(subscriptionKey); // Create the client s2smtClient = new SpeechClient((SpeechTranslateClientOptions)options, CancellationToken.None); TextMessageDecoder textDecoder = TextMessageDecoder.CreateTranslateDecoder(); s2smtClient.OnBinaryData += (c, a) => { AddSamplesToStream(a); }; s2smtClient.OnEndOfBinaryData += (c, a) => { AddSamplesToStream(a); }; s2smtClient.OnTextData += (c, a) => { textDecoder.AppendData(a); lastReceivedPacketTick = DateTime.Now.Ticks; }; s2smtClient.OnEndOfTextData += (c, a) => { textDecoder.AppendData(a); lastReceivedPacketTick = DateTime.Now.Ticks; textDecoder .Decode() .ContinueWith(t => { if (t.IsFaulted) { Trace.TraceError("Failed to decode incoming text message: {0}", t.Exception); } else { object msg = t.Result; TranscriptUtterance utterance = null; if (msg.GetType() == typeof(FinalResultMessage)) { var final = msg as FinalResultMessage; long offset = long.Parse(final.AudioTimeOffset); long duration = long.Parse(final.AudioTimeSize); TimeSpan currFileStartTime = TimeSpan.FromTicks(offset - currentFileStartTicks); TimeSpan currFileEndime = TimeSpan.FromTicks(currFileStartTime.Ticks + duration); Trace.TraceInformation("Final recognition {0} ({1} - {2}): {3}", final.Id, currFileStartTime.ToString(), currFileEndime.ToString(), final.Recognition); Trace.TraceInformation("Final translation {0}: {1}", final.Id, final.Translation); utterance = new TranscriptUtterance(); utterance.Recognition = final.Recognition; utterance.Translation = final.Translation; } if (msg.GetType() == typeof(PartialResultMessage)) { // Partial results are not used in this lab, leaving code as a reference var partial = msg as PartialResultMessage; Trace.TraceInformation("Partial recognition {0}: {1}", partial.Id, partial.Recognition); Trace.TraceInformation("Partial translation {0}: {1}", partial.Id, partial.Translation); utterance = new TranscriptUtterance(); utterance.Recognition = partial.Recognition; utterance.Translation = partial.Translation; } if (utterance != null) { Transcripts.Add(utterance); } } }); }; s2smtClient.Failed += (c, ex) => { Trace.TraceError("SpeechTranslation client reported an error: {0}", ex); }; s2smtClient.Disconnected += (c, ea) => { Trace.TraceInformation("Connection has been lost."); Trace.TraceInformation($"Errors (if any): \n{string.Join("\n", s2smtClient.Errors)}"); }; await s2smtClient.Connect(); }
public void Start(string recResult) { string text = recResult; string recognition = ""; string translation = ""; try { if (serviceFrom == "中文" && serviceTo == "英文" || serviceFrom == "中文" && serviceTo == "西班牙语") { recognition = polish(text); //Start Google List <string> reco = new List <string>(); reco.Add(recognition); int b = 0; while (reco[b].Length > 74 || reco[b].Contains("?")) { string buffer = reco[b]; if (reco[b].Contains("?") && reco[b].IndexOf("?") < 74) { if (reco[b].IndexOf("?") + 1 == reco[b].Length) { break; } else { reco.Add(reco[b].Substring(buffer.IndexOf("?") + 1)); reco[b] = reco[b].Remove(buffer.IndexOf("?") + 1); } } else { while (buffer.LastIndexOf(",") > 74) { buffer = buffer.Remove(buffer.LastIndexOf(",")); } reco.Add(reco[b].Substring(buffer.LastIndexOf(",") + 1)); reco[b] = reco[b].Remove(buffer.LastIndexOf(",") + 1); } b++; } foreach (string value in reco) { translation += GoogleTranslate(value, "zh-CN", g_languageTo) + " "; } //End Google } else if (serviceFrom == "中文" && serviceTo == "中文") { recognition = polish(text); translation = recognition; } } catch (Exception e) { text = null; } TranscriptUtterance utterance = new TranscriptUtterance(); utterance.Recognition = recognition; utterance.Translation = translation; utterance.Timespan = stopwatch.Elapsed; Transcript.Add(utterance); CallInOrderAsync(translation); }
private void WriteLine(string format, params object[] args) { var formattedStr = string.Format(format, args); Trace.WriteLine(formattedStr); Dispatcher.Invoke(() => { string recognition = formattedStr; string translation = ""; if (name != nameTo && (nameTo == "中文" || nameTo == "英文" || nameTo == "西班牙语")) { if (flag % 15 == 0 || args.Length > 1) { //Start Baidu translate API translation = GetResult(recognition); //End Baidu translate API //feedback.txtContent.Text = translation; feedback.txtContent.Text = buffer + translation + " "; if (args.Length > 1) { buffer += translation + "\n"; } Debug.WriteLine("*"); flag = 0; } flag++; } else { if (flag % 2 == 0 || args.Length > 1) { //feedback.txtContent.Text = recognition; feedback.txtContent.Text = buffer + recognition + " "; if (args.Length > 1) { buffer += recognition + /*"\n"*/ " "; } Debug.WriteLine("*"); flag = 0; } flag++; } if (args.Length > 1) { TranscriptUtterance utterance = new TranscriptUtterance(); utterance.Recognition = recognition; utterance.Translation = translation; utterance.Timespan = stopwatch.Elapsed; Transcript.Add(utterance); } feedback.txtContent.LineDown(); }); if (args.Length > 1) { Thread.Sleep(1000); Debug.WriteLine("ID2: {0}", Thread.CurrentThread.ManagedThreadId); } }