private async Task ConnectAsync(SpeechClientOptions options, bool suspendInputAudioDuringTTS) { // Authenticate string admClientId = Properties.Settings.Default.ClientID; string admClientSecret = Properties.Settings.Default.ClientSecret; string ADMScope = "http://api.microsofttranslator.com"; string ADMTokenUri = "https://datamarket.accesscontrol.windows.net/v2/OAuth2-13"; ADMToken ADMAuthenticator = new ADMToken(ADMTokenUri, ADMScope); options.AuthHeaderValue = await ADMAuthenticator.GetToken(admClientId, admClientSecret); if (options.AuthHeaderValue.Length < 10) { SetMessage(String.Format("Please enter ClientID and Secret in the Account Settings."), "", MessageKind.Error); UpdateUiState(UiState.MissingLanguageList); } // Create the client TextMessageDecoder textDecoder; if (options.GetType() == typeof(SpeechTranslateClientOptions)) { s2smtClient = new SpeechClient((SpeechTranslateClientOptions)options, CancellationToken.None); textDecoder = TextMessageDecoder.CreateTranslateDecoder(); } else { throw new InvalidOperationException("Type of SpeechClientOptions in not supported."); } if (ShowMiniWindow.IsChecked.Value) { miniwindow.Show(); } s2smtClient.OnBinaryData += (c, a) => { AddSamplesToPlay(a, suspendInputAudioDuringTTS); }; s2smtClient.OnEndOfBinaryData += (c, a) => { AddSamplesToPlay(a, suspendInputAudioDuringTTS); }; s2smtClient.OnTextData += (c, a) => { textDecoder.AppendData(a); }; s2smtClient.OnEndOfTextData += (c, a) => { textDecoder.AppendData(a); textDecoder .Decode() .ContinueWith(t => { if (t.IsFaulted) { Log(t.Exception, "E: Failed to decode incoming text message."); } else { object msg = t.Result; if (msg.GetType() == typeof(FinalResultMessage)) { var final = msg as FinalResultMessage; Log("Final recognition {0}: {1}", final.Id, final.Recognition); Log("Final translation {0}: {1}", final.Id, final.Translation); this.SafeInvoke(() => SetMessage(final.Recognition, final.Translation, MessageKind.Chat)); finaltranslationhistory = final.Translation + "\n" + finaltranslationhistory.Substring(0, Math.Min(500, finaltranslationhistory.Length)); } if (msg.GetType() == typeof(PartialResultMessage)) { var partial = msg as PartialResultMessage; Log("Partial recognition {0}: {1}", partial.Id, partial.Recognition); Log("Partial translation {0}: {1}", partial.Id, partial.Translation); this.SafeInvoke(() => SetMessage(partial.Recognition, partial.Translation, MessageKind.Chat)); } } }); }; s2smtClient.Failed += (c, ex) => { this.Log(ex, "E: SpeechTranslation client reported an error."); }; s2smtClient.Disconnected += (c, ea) => { this.SafeInvoke(() => { // We only care to react to server disconnect when our state is Connected. if (this.currentState == UiState.Connected) { this.Log("E: Connection has been lost."); this.Disconnect(); } }); }; await s2smtClient.Connect(); }
private async Task ConnectAsync(SpeechClientOptions options) { if (s2smtClient != null && s2smtClient.IsConnected()) { return; } if (options.GetType() != typeof(SpeechTranslateClientOptions)) { throw new InvalidOperationException("Type of SpeechClientOptions is not supported."); } options.AuthHeaderValue = await AzureAuthenticationService.GetAccessToken(subscriptionKey); // Create the client s2smtClient = new SpeechClient((SpeechTranslateClientOptions)options, CancellationToken.None); TextMessageDecoder textDecoder = TextMessageDecoder.CreateTranslateDecoder(); s2smtClient.OnBinaryData += (c, a) => { AddSamplesToStream(a); }; s2smtClient.OnEndOfBinaryData += (c, a) => { AddSamplesToStream(a); }; s2smtClient.OnTextData += (c, a) => { textDecoder.AppendData(a); lastReceivedPacketTick = DateTime.Now.Ticks; }; s2smtClient.OnEndOfTextData += (c, a) => { textDecoder.AppendData(a); lastReceivedPacketTick = DateTime.Now.Ticks; textDecoder .Decode() .ContinueWith(t => { if (t.IsFaulted) { Trace.TraceError("Failed to decode incoming text message: {0}", t.Exception); } else { object msg = t.Result; TranscriptUtterance utterance = null; if (msg.GetType() == typeof(FinalResultMessage)) { var final = msg as FinalResultMessage; long offset = long.Parse(final.AudioTimeOffset); long duration = long.Parse(final.AudioTimeSize); TimeSpan currFileStartTime = TimeSpan.FromTicks(offset - currentFileStartTicks); TimeSpan currFileEndime = TimeSpan.FromTicks(currFileStartTime.Ticks + duration); Trace.TraceInformation("Final recognition {0} ({1} - {2}): {3}", final.Id, currFileStartTime.ToString(), currFileEndime.ToString(), final.Recognition); Trace.TraceInformation("Final translation {0}: {1}", final.Id, final.Translation); utterance = new TranscriptUtterance(); utterance.Recognition = final.Recognition; utterance.Translation = final.Translation; } if (msg.GetType() == typeof(PartialResultMessage)) { // Partial results are not used in this lab, leaving code as a reference var partial = msg as PartialResultMessage; Trace.TraceInformation("Partial recognition {0}: {1}", partial.Id, partial.Recognition); Trace.TraceInformation("Partial translation {0}: {1}", partial.Id, partial.Translation); utterance = new TranscriptUtterance(); utterance.Recognition = partial.Recognition; utterance.Translation = partial.Translation; } if (utterance != null) { Transcripts.Add(utterance); } } }); }; s2smtClient.Failed += (c, ex) => { Trace.TraceError("SpeechTranslation client reported an error: {0}", ex); }; s2smtClient.Disconnected += (c, ea) => { Trace.TraceInformation("Connection has been lost."); Trace.TraceInformation($"Errors (if any): \n{string.Join("\n", s2smtClient.Errors)}"); }; await s2smtClient.Connect(); }