private async void MediaElement_CurrentStateChanged(object sender, RoutedEventArgs e) { if (this.MediaElement.CurrentState == MediaElementState.Paused || this.MediaElement.CurrentState == MediaElementState.Stopped || this.MediaElement.CurrentState == MediaElementState.Closed) { this.state = SpeechDialogBoxState.Default; await this.SetState(SpeechDialogBoxState.Default); this.MediaElement.CurrentStateChanged -= this.MediaElement_CurrentStateChanged; } }
/// <summary> /// Speaks the specified SSML in the current voice. /// </summary> /// <remarks>The SSML may override the current voice.</remarks> public async Task SpeakSsml(string text) { this.state = SpeechDialogBoxState.Speaking; var synthesizer = new SpeechSynthesizer(); await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, new DispatchedHandler( () => { synthesizer.Voice = this.FindVoice(); })); var stream = synthesizer.SynthesizeSsmlToStreamAsync(text); stream.Completed += SpeechSynthesis_Completed; }
/// <summary> /// Move to a new state. /// </summary> private async Task SetState(SpeechDialogBoxState state) { // Do not interrupt while speaking. while (this.state == SpeechDialogBoxState.Speaking) { await Task.Delay(200); } this.state = state; await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, new DispatchedHandler( async () => { // Hide all. this.DefaultState.Visibility = Visibility.Collapsed; this.TypingState.Visibility = Visibility.Collapsed; this.ListeningState.Visibility = Visibility.Collapsed; this.ThinkingState.Visibility = Visibility.Collapsed; switch (this.state) { case SpeechDialogBoxState.Default: this.DefaultState.Visibility = Visibility.Visible; break; case SpeechDialogBoxState.Typing: this.TypingState.Visibility = Visibility.Visible; break; case SpeechDialogBoxState.Listening: this.ListeningState.Visibility = Visibility.Visible; this.MediaElement.Source = new Uri("ms-appx:///Assets//Listening.wav"); SpeechRecognizer recognizer = new SpeechRecognizer(); foreach (var constraint in this.Constraints) { recognizer.Constraints.Add(constraint); } await recognizer.CompileConstraintsAsync(); var reco = recognizer.RecognizeAsync(); reco.Completed += this.SpeechRecognition_Completed; break; case SpeechDialogBoxState.Thinking: this.ThinkingState.Visibility = Visibility.Visible; break; default: break; } })); }
/// <summary> /// Speaks the current text in the current voice and with the current ResponseTemplate. /// </summary> public async Task Speak() { this.state = SpeechDialogBoxState.Speaking; string currentText = string.Empty; var synthesizer = new SpeechSynthesizer(); await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, new DispatchedHandler( () => { synthesizer.Voice = this.FindVoice(); currentText = string.Format(this.ResponsePattern, this.Text); })); var stream = synthesizer.SynthesizeTextToStreamAsync(currentText); stream.Completed += SpeechSynthesis_Completed; }