private async void Recogniser_Completed( SpeechContinuousRecognitionSession sender, SpeechContinuousRecognitionCompletedEventArgs args) { if (args.Status != SpeechRecognitionResultStatus.Success) { if (args.Status == SpeechRecognitionResultStatus.TimeoutExceeded) { await _dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { Result?.Invoke(_builder.ToString()); Completed?.Invoke(); _listening = false; }); } else { await _dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { Completed?.Invoke(); _listening = false; }); } } }
void OnContinuousRecognitionSessionCompleted( SpeechContinuousRecognitionSession sender, SpeechContinuousRecognitionCompletedEventArgs args) { _isInRecognitionSession = false; OnStateChanged(new StateChangedEventArgs(args)); }
private async void ContinuousRecognitionSession_Completed(SpeechContinuousRecognitionSession sender, SpeechContinuousRecognitionCompletedEventArgs args) { if (args.Status != SpeechRecognitionResultStatus.Success) { if (args.Status == SpeechRecognitionResultStatus.TimeoutExceeded) { await dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { rootPage.NotifyUser("Automatic Time Out of Dictation", NotifyType.StatusMessage); dictationTextBox.Text = dictatedTextBuilder.ToString(); isListening = false; }); } else { await dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { rootPage.NotifyUser("Continuous Recognition Completed: " + args.Status.ToString(), NotifyType.StatusMessage); isListening = false; }); } } }
private void ContinuousRecognitionSession_Completed( SpeechContinuousRecognitionSession sender, SpeechContinuousRecognitionCompletedEventArgs args) { IsInRecognitionSession = false; StateChangedEventArgs e = new StateChangedEventArgs(args); OnStateChanged(e); }
/// <summary> /// Method that runs when the recognition session is completed. /// </summary> /// <param name="sender"></param> /// <param name="args"></param> private async void CommandSessionCompleted( SpeechContinuousRecognitionSession sender, SpeechContinuousRecognitionCompletedEventArgs args) { string text = args.Status.ToString(); Debug.WriteLine("EndCommand -> " + text); if (text == "TimeoutExceeded") { OnResponseReceived(text); await StartTriggerRecognizer(); } }
/// <summary> /// Initializes a new instance of the <see cref="StateChangedEventArgs"/> class /// with the specified recognition session state. /// </summary> /// <param name="args">The session state.</param> /// <remarks>Use this overload for reporting changes in the session state.</remarks> public StateChangedEventArgs(SpeechContinuousRecognitionCompletedEventArgs args) { IsSessionState = true; IsSpeechRecognizerState = false; // Session-related properties. SessionCompletedSuccessfully = (args.Status == SpeechRecognitionResultStatus.Success); AudioQualitySuccess = !(args.Status == SpeechRecognitionResultStatus.AudioQualityFailure); UserCanceledSession = (args.Status == SpeechRecognitionResultStatus.UserCanceled); SessionTimedOut = (args.Status == SpeechRecognitionResultStatus.TimeoutExceeded); MicrophoneAvailable = !(args.Status == SpeechRecognitionResultStatus.MicrophoneUnavailable); NetworkAvailable = !(args.Status == SpeechRecognitionResultStatus.NetworkFailure); }
/// <summary> /// Initializes a new instance of the <see cref="StateChangedEventArgs"/> class /// with the specified recognition session state. /// </summary> /// <param name="args">The session state.</param> /// <remarks>Use this overload for reporting changes in the session state.</remarks> public StateChangedEventArgs(SpeechContinuousRecognitionCompletedEventArgs args) { IsSessionState = true; IsSpeechRecognizerState = false; _status = args.Status; SessionCompletedSuccessfully = _status == SpeechRecognitionResultStatus.Success; AudioQualitySuccess = _status != SpeechRecognitionResultStatus.AudioQualityFailure; UserCanceledSession = _status == SpeechRecognitionResultStatus.UserCanceled; SessionTimedOut = _status == SpeechRecognitionResultStatus.TimeoutExceeded; MicrophoneAvailable = _status != SpeechRecognitionResultStatus.MicrophoneUnavailable; NetworkAvailable = _status != SpeechRecognitionResultStatus.NetworkFailure; }
/// <summary> /// This method is called whenever the local recognizer generates a completed event. /// </summary> /// <param name="sender"></param> /// <param name="args"></param> private void localSessionCompleted( SpeechContinuousRecognitionSession sender, SpeechContinuousRecognitionCompletedEventArgs args) { string text = args.Status.ToString(); Debug.WriteLine("EndLocal -> " + text); //If the session stopped for some reason not related to success relaunch it. if (text != "Success" && text != "MicrophoneUnavailable") { Debug.WriteLine("Relaunching"); StartTriggerRecognizer(); } }
private static async void ContinuousRecognitionSession_Completed( SpeechContinuousRecognitionSession sender, SpeechContinuousRecognitionCompletedEventArgs args) { if (args.Status != SpeechRecognitionResultStatus.Success) { if (args.Status == SpeechRecognitionResultStatus.TimeoutExceeded) { await dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { RtfTextHelper.Text = dictatedTextBuilder.ToString(); }); } else { await dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { RtfTextHelper.Text = dictatedTextBuilder.ToString(); }); } } }
private async void ContinuousRecognitionSession_Completed(SpeechContinuousRecognitionSession sender, SpeechContinuousRecognitionCompletedEventArgs args) { await dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { Logger.Log("Timeout."); }); await contSpeechRecognizer.ContinuousRecognitionSession.StartAsync(); }
private async void ContinuousRecognitionSession_Completed(SpeechContinuousRecognitionSession sender, SpeechContinuousRecognitionCompletedEventArgs args) { if (args.Status != SpeechRecognitionResultStatus.Success) { await dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { ContinuousRecoButtonText.Text = " Continuous Recognition"; isListening = false; }); } }
private async void ContinuousRecognitionSession_Completed(SpeechContinuousRecognitionSession sender, SpeechContinuousRecognitionCompletedEventArgs args) { if (speechRecognizer.State == SpeechRecognizerState.Idle) { await dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { this.imgMode.Source = new BitmapImage(new Uri("ms-appx:///Assets/mic.png")); }); } }
private async void ContinuousRecognitionSession_Completed(SpeechContinuousRecognitionSession sender, SpeechContinuousRecognitionCompletedEventArgs args) { if (args.Status != SpeechRecognitionResultStatus.Success) { if (args.Status == SpeechRecognitionResultStatus.TimeoutExceeded) { await dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { DictationButtonText.Text = " Continuous Recognition"; DictationTextBox.Text = dictatedTextBuilder.ToString(); }); } else { await dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { DictationButtonText.Text = " Continuous Recognition"; }); } } //// Stop //Button_Click_1(null, null); //var question = _questions[_questionsCount]; //Speak(string.Format(new QuestionFormatter(), question.AssociatedMark, _questions)); }
private async void ContinuousRecognitionSession_Completed(SpeechContinuousRecognitionSession sender, SpeechContinuousRecognitionCompletedEventArgs args) { if (args.Status != SpeechRecognitionResultStatus.Success) { // If TimeoutExceeded occurs, the user has been silent for too long. We can use this to // cancel recognition if the user in dictation mode and walks away from their device, etc. // In a global-command type scenario, this timeout won't apply automatically. // With dictation (no grammar in place) modes, the default timeout is 20 seconds. if (args.Status == SpeechRecognitionResultStatus.TimeoutExceeded) { await this.Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () => { this.speechRecognitionControlButtonSymbol.Symbol = Symbol.Refresh; this.speechRecognitionTextBox.PlaceholderText = ""; this.speechRecognitionTextBox.Text = dictatedTextBuilder.ToString(); this.isCapturingSpeech = false; }); } else { await this.Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () => { this.speechRecognitionControlButtonSymbol.Symbol = Symbol.Refresh; this.speechRecognitionTextBox.PlaceholderText = ""; this.isCapturingSpeech = false; }); } } }
/// <summary> /// Callback when the continuous speech recognition session completed. /// In the ideal case, this is because the app stopped the session after 7 seconds. /// In that case, calculate the average score and show this as well as the recognized text /// to the user. /// In case the status is an error state, show the error to the user. /// </summary> private async void ContinuousRecognitionSession_Completed(SpeechContinuousRecognitionSession sender, SpeechContinuousRecognitionCompletedEventArgs args) { Debug.Write("Completed: " + args.Status); await _dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { if (args.Status == SpeechRecognitionResultStatus.Success) { const string resultText = "Your score: {0:P}\nI understood:\n{1}"; TxtScore.Text = string.Format(resultText, _averageScore / _numSegments, _recognizedText.ToString()); } else { TxtScore.Text = "Could not recognize what you said - please restart and try again! Reason: " + args.Status; ResetTimer(); } }); }
/// <summary> /// Handle events fired when the session ends, either from a call to /// CancelAsync() or StopAsync(), or an error condition, such as the /// microphone becoming unavailable or some transient issues occuring. /// </summary> /// <param name="sender">The continuous recognition session</param> /// <param name="args">The state of the recognizer</param> private async void ContinuousRecognitionSession_Completed(SpeechContinuousRecognitionSession sender, SpeechContinuousRecognitionCompletedEventArgs args) { await dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { rootPage.NotifyUser("Continuous Recognition Completed: " + args.Status.ToString(), NotifyType.StatusMessage); ContinuousRecoButtonText.Text = " Continuous Recognition"; cbLanguageSelection.IsEnabled = true; }); }
private async void ContinuousRecognitionSession_Completed(SpeechContinuousRecognitionSession sender, SpeechContinuousRecognitionCompletedEventArgs args) { if (args.Status != SpeechRecognitionResultStatus.Success) { // InitGpio(); await startSRProcess(); // startDictate(); // // If TimeoutExceeded occurs, the user has been silent for too long. We can use this to // // cancel recognition if the user in dictation mode and walks away from their device, etc. // // In a global-command type scenario, this timeout won't apply automatically. // // With dictation (no grammar in place) modes, the default timeout is 20 seconds. // if (args.Status == SpeechRecognitionResultStatus.TimeoutExceeded) // { // await dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => // { // DictationButtonText.Text = "Dictate"; // dictationTextBox.Text = dictatedTextBuilder.ToString(); // isListening = false; // }); // } // else // { // await dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => // { // DictationButtonText.Text = "Dictate"; // isListening = false; // }); // } } //InitGpio(); // await startSRProcess(); }
/// <summary> /// Handle events fired when error conditions occur, such as the microphone becoming unavailable, or if /// some transient issues occur. /// </summary> /// <param name="sender">The continuous recognition session</param> /// <param name="args">The state of the recognizer</param> private async void ContinuousRecognitionSession_Completed(SpeechContinuousRecognitionSession sender, SpeechContinuousRecognitionCompletedEventArgs args) { if (args.Status != SpeechRecognitionResultStatus.Success) { await dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { rootPage.NotifyUser("Continuous Recognition Completed: " + args.Status.ToString(), NotifyType.StatusMessage); recognizeButtonText.Text = " Continuous Recognition"; btnEmailGrammar.IsEnabled = false; btnPhoneGrammar.IsEnabled = false; isListening = false; }); } }
/// <summary> /// Handle events fired when error conditions occur, such as the microphone becoming unavailable, or if /// some transient issues occur. /// </summary> /// <param name="sender">The continuous recognition session</param> /// <param name="args">The state of the recognizer</param> private async void ContinuousRecognitionSession_Completed(SpeechContinuousRecognitionSession sender, SpeechContinuousRecognitionCompletedEventArgs args) { if (args.Status != SpeechRecognitionResultStatus.Success) { // If TimeoutExceeded occurs, the user has been silent for too long. We can use this to // cancel recognition if the user in dictation mode and walks away from their device, etc. // In a global-command type scenario, this timeout won't apply automatically. // With dictation (no grammar in place) modes, the default timeout is 20 seconds. if (args.Status == SpeechRecognitionResultStatus.TimeoutExceeded) { await dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { rootPage.NotifyUser("Automatic Time Out of Dictation", NotifyType.StatusMessage); DictationButtonText.Text = " Dictate"; cbLanguageSelection.IsEnabled = true; dictationTextBox.Text = dictatedTextBuilder.ToString(); isListening = false; }); } else { await dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { rootPage.NotifyUser("Continuous Recognition Completed: " + args.Status.ToString(), NotifyType.StatusMessage); DictationButtonText.Text = " Dictate"; cbLanguageSelection.IsEnabled = true; isListening = false; }); } } }
private void ContinuousRecognitionSession_Completed(SpeechContinuousRecognitionSession sender, SpeechContinuousRecognitionCompletedEventArgs args) { Debug.WriteLine("Completed"); }
/// <summary> /// Handle events fired when error conditions occur, such as the microphone becoming unavailable, or if /// some transient issues occur. /// </summary> /// <param name="sender">The continuous recognition session</param> /// <param name="args">The state of the recognizer</param> private async void ContinuousRecognitionSession_Completed(SpeechContinuousRecognitionSession sender, SpeechContinuousRecognitionCompletedEventArgs args) { if (args.Status != SpeechRecognitionResultStatus.Success) { // If TimeoutExceeded occurs, the user has been silent for too long. We can use this to // cancel recognition if the user in dictation mode and walks away from their device, etc. // In a global-command type scenario, this timeout won't apply automatically. // With dictation (no grammar in place) modes, the default timeout is 20 seconds. if (args.Status == SpeechRecognitionResultStatus.TimeoutExceeded) { await this.Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { this.defaultViewModel.Trip.Notes = this.originalNote; }); } } }
private async void ContinuousRecognitionSession_Completed(SpeechContinuousRecognitionSession sender, SpeechContinuousRecognitionCompletedEventArgs args) { await contSpeechRecognizer.ContinuousRecognitionSession.StartAsync(); }
private void ContinuousRecognitionSession_Completed(SpeechContinuousRecognitionSession sender, SpeechContinuousRecognitionCompletedEventArgs args) { //var recognition = voiceRecognizer.RecognizeAsync(); //recognition.Completed += this.OnRecoginitionCompletedHandler(); }
/// <summary> /// Handle events fired when error conditions occur, such as the microphone becoming unavailable, or if /// some transient issues occur. /// </summary> /// <param name="sender">The continuous recognition session</param> /// <param name="args">The state of the recognizer</param> private async void ContinuousRecognitionSession_Completed(SpeechContinuousRecognitionSession sender, SpeechContinuousRecognitionCompletedEventArgs args) { if (args.Status != SpeechRecognitionResultStatus.Success) { await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { edHearState.Text = $"{AppResources.GetString("RecognitionCompleted")} {AppResources.GetString(args.Status.ToString())}"; BtnCancel.Visibility = Visibility.Collapsed; btnListen.Content = AppResources.GetString("Listen"); btnListen.IsChecked = false; cbLanguageSelection.IsEnabled = true; isListening = false; }); } }
/// <summary> /// Handle events fired when the session ends, either from a call to /// CancelAsync() or StopAsync(), or an error condition, such as the /// microphone becoming unavailable or some transient issues occuring. /// </summary> /// <param name="sender">The continuous recognition session</param> /// <param name="args">The state of the recognizer</param> private void ContinuousRecognitionSession_Completed(SpeechContinuousRecognitionSession sender, SpeechContinuousRecognitionCompletedEventArgs args) { // TODO HANDLE ERROR }
private async void ContinuousRecognitionSession_Completed(SpeechContinuousRecognitionSession sender, SpeechContinuousRecognitionCompletedEventArgs args) { await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { this.NotifyUser("Continuous Recognition Completed: " + args.Status.ToString(), NotifyType.StatusMessage); }); }
private void ContinuousRecognitionSession_Completed(SpeechContinuousRecognitionSession sender, SpeechContinuousRecognitionCompletedEventArgs args) { //throw new NotImplementedException(); }
async void ContinuousRecognitionSession_Completed( SpeechContinuousRecognitionSession sender, SpeechContinuousRecognitionCompletedEventArgs args) { if (recognizer_ == null || recognizer_.ContinuousRecognitionSession != sender) { return; } System.Diagnostics.Debug.WriteLine("Completed :" + args.Status + " " + dictatedTextBuilder_.ToString() + " "); var dictationCompletionCause = UnityEngine.Windows.Speech.DictationCompletionCause.Complete; switch (args.Status) { case SpeechRecognitionResultStatus.Success: dictationCompletionCause = UnityEngine.Windows.Speech.DictationCompletionCause.Complete; break; case SpeechRecognitionResultStatus.TopicLanguageNotSupported: break; case SpeechRecognitionResultStatus.GrammarLanguageMismatch: break; case SpeechRecognitionResultStatus.GrammarCompilationFailure: break; case SpeechRecognitionResultStatus.AudioQualityFailure: dictationCompletionCause = UnityEngine.Windows.Speech.DictationCompletionCause.AudioQualityFailure; break; case SpeechRecognitionResultStatus.UserCanceled: dictationCompletionCause = UnityEngine.Windows.Speech.DictationCompletionCause.Canceled; break; case SpeechRecognitionResultStatus.Unknown: dictationCompletionCause = UnityEngine.Windows.Speech.DictationCompletionCause.UnknownError; break; case SpeechRecognitionResultStatus.TimeoutExceeded: dictationCompletionCause = UnityEngine.Windows.Speech.DictationCompletionCause.TimeoutExceeded; break; case SpeechRecognitionResultStatus.PauseLimitExceeded: dictationCompletionCause = UnityEngine.Windows.Speech.DictationCompletionCause.PauseLimitExceeded; break; case SpeechRecognitionResultStatus.NetworkFailure: dictationCompletionCause = UnityEngine.Windows.Speech.DictationCompletionCause.NetworkFailure; break; case SpeechRecognitionResultStatus.MicrophoneUnavailable: dictationCompletionCause = UnityEngine.Windows.Speech.DictationCompletionCause.MicrophoneUnavailable; break; } { eventQue_.Enqueue(new RecoEvent_ { eventType = RecoEvent_.EventType.DictationComplete, dictationCompletionCause = dictationCompletionCause, }); } dictatedTextBuilder_.Clear(); }
private async void ContinuousRecognitionSession_Completed(SpeechContinuousRecognitionSession speechContinousRecognationSession, SpeechContinuousRecognitionCompletedEventArgs speechContinuousRecognationCompletedEventArgs) { if (_isStopped) { return; } await Logger.Write($"SpeechRecognizer ContinousRecognationSession completed {speechContinuousRecognationCompletedEventArgs.Status}"); await _speechRecognizer.ContinuousRecognitionSession.StartAsync(); }
private async void ContinuousRecognitionSession_Completed(SpeechContinuousRecognitionSession sender, SpeechContinuousRecognitionCompletedEventArgs args) { if (speechRecognizer.State == SpeechRecognizerState.Idle) { await speechRecognizer.ContinuousRecognitionSession.StartAsync(); } }
/// <summary> /// Occurs when speech recognizer has completed. /// </summary> /// <param name="sender">The sender.</param> /// <param name="args">The args.</param> private void OnContinuousRecognitionSessionCompleted(SpeechContinuousRecognitionSession sender, SpeechContinuousRecognitionCompletedEventArgs args) { }
private void ContinuousRecognitionSession_Completed(SpeechContinuousRecognitionSession sender, SpeechContinuousRecognitionCompletedEventArgs args) { Debug.WriteLine("Continuous Recognition Session Completed: " + args.Status.ToString()); }
private async void ContinuousRecognitionSession_Completed(SpeechContinuousRecognitionSession sender, SpeechContinuousRecognitionCompletedEventArgs args) { if (args.Status != SpeechRecognitionResultStatus.Success) { if (args.Status == SpeechRecognitionResultStatus.TimeoutExceeded) { await dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { checkError.Visibility = Visibility.Visible; errorCheck.Visibility = Visibility.Visible; errorCheck.Text = "Automatic Time out of Dictation"; StartTalkButtonText.Text = "Start Talk"; dictationTextBox.Text = dictatedTextBuilder.ToString(); isListening = false; }); } else { await dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { checkError.Visibility = Visibility.Visible; errorCheck.Visibility = Visibility.Visible; errorCheck.Text = "Continuous Recognition Completed:" + args.Status.ToString(); StartTalkButtonText.Text = "Start Talk"; isListening = false; }); } } }
/// <summary> /// Handle events fired when error conditions occur, such as the microphone becoming unavailable, or if /// some transient issues occur. /// </summary> /// <param name="sender">The continuous recognition session</param> /// <param name="args">The state of the recognizer</param> private async void ContinuousRecognitionSession_Completed(SpeechContinuousRecognitionSession sender, SpeechContinuousRecognitionCompletedEventArgs args) { if (args.Status != SpeechRecognitionResultStatus.Success) { await dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { this.NotifyUser("Continuous Recognition Completed: " + args.Status.ToString(), NotifyType.StatusMessage); cbLanguageSelection.IsEnabled = true; isListening = false; }); } }
/// <summary> /// Handle events fired when error conditions occur, such as the microphone becoming unavailable, or if /// some transient issues occur. /// </summary> /// <param name="sender">The continuous recognition session</param> /// <param name="args">The state of the recognizer</param> private async void ContinuousRecognitionSession_Completed(SpeechContinuousRecognitionSession sender, SpeechContinuousRecognitionCompletedEventArgs args) { if (args.Status != SpeechRecognitionResultStatus.Success) { await dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { rootPage.NotifyUser("Continuous Recognition Completed: " + args.Status.ToString(), NotifyType.StatusMessage); recognizeButtonText.Text = " Continuous Recognition"; btnEmailGrammar.IsEnabled = false; btnPhoneGrammar.IsEnabled = false; isListening = false; }); } }
private void ContinuousRecognitionSession_Completed(SpeechContinuousRecognitionSession sender, SpeechContinuousRecognitionCompletedEventArgs args) { }
/// <summary> /// Handle events fired when error conditions occur, such as the microphone becoming unavailable, or if /// some transient issues occur. /// </summary> /// <param name="sender">The continuous recognition session</param> /// <param name="args">The state of the recognizer</param> private async void ContinuousRecognitionSession_Completed(SpeechContinuousRecognitionSession sender, SpeechContinuousRecognitionCompletedEventArgs args) { if (args.Status != SpeechRecognitionResultStatus.Success) { if (args.Status == SpeechRecognitionResultStatus.TimeoutExceeded) { await dispatcher.RunAsync(CoreDispatcherPriority.Normal, async() => { await InitializeRecognizer(SpeechRecognizer.SystemSpeechLanguage); isListening = true; await speechRecognizer.ContinuousRecognitionSession.StartAsync(); }); } } }
/// <summary> /// Handle events fired when error conditions occur, such as the microphone becoming unavailable, or if /// some transient issues occur. /// </summary> /// <param name="sender">The continuous recognition session</param> /// <param name="args">The state of the recognizer</param> private async void ContinuousRecognitionSession_Completed(SpeechContinuousRecognitionSession sender, SpeechContinuousRecognitionCompletedEventArgs args) { if (args.Status != SpeechRecognitionResultStatus.Success) { // If TimeoutExceeded occurs, the user has been silent for too long. We can use this to // cancel recognition if the user in dictation mode and walks away from their device, etc. // In a global-command type scenario, this timeout won't apply automatically. // With dictation (no grammar in place) modes, the default timeout is 20 seconds. if (args.Status == SpeechRecognitionResultStatus.TimeoutExceeded) { await Dispatcher.InvokeAsync(() => { btnContinuousRecognize.Content = "Speak"; DictationTextBox.Text = dictatedTextBuilder.ToString() + "Automatic Time Out of Dictation"; isListening = false; }); } else { await Dispatcher.InvokeAsync(() => { DictationTextBox.Text = "Continuous Recognition Completed: " + args.Status.ToString(); btnContinuousRecognize.Content = "Speak"; isListening = false; }); } } }
private async void ContinuousRecognitionSession_Completed(SpeechContinuousRecognitionSession sender, SpeechContinuousRecognitionCompletedEventArgs args) { Debug.WriteLine("[COMPLETED] - " + args.Status.ToString()); if (args.Status.ToString() == "TimeoutExceeded") { reconState = 0; await InitializeRecognizer(SpeechRecognizer.SystemSpeechLanguage, reconState); try { await speechRecognizer.ContinuousRecognitionSession.StartAsync(); } catch (Exception ex) { Debug.WriteLine("[ERR] - On recognize starting..."); } } }
/// <summary> /// Handle events fired when error conditions occur, such as the microphone becoming unavailable, or if /// some transient issues occur. /// </summary> /// <param name="sender">The continuous recognition session</param> /// <param name="args">The state of the recognizer</param> private async void ContinuousRecognitionSession_Completed(SpeechContinuousRecognitionSession sender, SpeechContinuousRecognitionCompletedEventArgs args) { if (args.Status != SpeechRecognitionResultStatus.Success) { // If TimeoutExceeded occurs, the user has been silent for too long. We can use this to // cancel recognition if the user in dictation mode and walks away from their device, etc. // In a global-command type scenario, this timeout won't apply automatically. // With dictation (no grammar in place) modes, the default timeout is 20 seconds. if (args.Status == SpeechRecognitionResultStatus.TimeoutExceeded) { await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { rootPage.NotifyUser("Automatic Time Out of Dictation", NotifyType.StatusMessage); DictationButtonText.Text = " Dictate"; cbLanguageSelection.IsEnabled = true; dictationTextBox.Text = dictatedTextBuilder.ToString(); isListening = false; ContinuousRecognize_Click(null, null); }); } else { await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { rootPage.NotifyUser("Continuous Recognition Completed: " + args.Status.ToString(), NotifyType.StatusMessage); DictationButtonText.Text = " Dictate"; cbLanguageSelection.IsEnabled = true; isListening = false; }); } } }
/// <summary> /// Handle events fired when the session ends, either from a call to /// CancelAsync() or StopAsync(), or an error condition, such as the /// microphone becoming unavailable or some transient issues occuring. /// </summary> /// <param name="sender">The continuous recognition session</param> /// <param name="args">The state of the recognizer</param> private async void ContinuousRecognitionSession_Completed(SpeechContinuousRecognitionSession sender, SpeechContinuousRecognitionCompletedEventArgs args) { await dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { rootPage.NotifyUser("Continuous Recognition Completed: " + args.Status.ToString(), NotifyType.StatusMessage); ContinuousRecoButtonText.Text = " Continuous Recognition"; cbLanguageSelection.IsEnabled = true; }); }
private async void ContinuousRecognitionSession_Completed(SpeechContinuousRecognitionSession sender, SpeechContinuousRecognitionCompletedEventArgs args) { if (args.Status != SpeechRecognitionResultStatus.Success) { if (args.Status == SpeechRecognitionResultStatus.TimeoutExceeded) { await CoreApplication.MainView.CoreWindow.Dispatcher.RunAsync( Windows.UI.Core.CoreDispatcherPriority.Normal, () => { MessageDialog dialog = new MessageDialog("Voice recognization time out and stop"); dialog.ShowAsync(); }); } else { await CoreApplication.MainView.CoreWindow.Dispatcher.RunAsync( Windows.UI.Core.CoreDispatcherPriority.Normal, () => { //MessageDialog dialog = new MessageDialog("Voice recognization ended"); //dialog.ShowAsync(); if (speechRecognizer.State == SpeechRecognizerState.Idle) { speechRecognizer.ContinuousRecognitionSession.StartAsync(); } }); } } }
/// <summary> /// Handle events fired when error conditions occur, such as the microphone becoming unavailable, or if /// some transient issues occur. /// </summary> /// <param name="sender">The continuous recognition session</param> /// <param name="args">The state of the recognizer</param> private async void ContinuousRecognitionSession_Completed(SpeechContinuousRecognitionSession sender, SpeechContinuousRecognitionCompletedEventArgs args) { if (args.Status != SpeechRecognitionResultStatus.Success) { // If TimeoutExceeded occurs, the user has been silent for too long. We can use this to // cancel recognition if the user in dictation mode and walks away from their device, etc. // In a global-command type scenario, this timeout won't apply automatically. // With dictation (no grammar in place) modes, the default timeout is 20 seconds. if (args.Status == SpeechRecognitionResultStatus.TimeoutExceeded) { await this.Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { this.defaultViewModel.Trip.Notes = this.originalNote; }); } } }