/// <summary>
        /// Initialize Speech Recognizer and compile constraints.
        /// </summary>
        /// <param name="recognizerLanguage">Language to use for the speech recognizer</param>
        /// <returns>Awaitable task.</returns>
        private async Task InitializeRecognizer()
        {
            if (speechRecognizer != null)
            {
                // cleanup prior to re-initializing this scenario.
                speechRecognizer.ContinuousRecognitionSession.Completed       -= ContinuousRecognitionSession_Completed;
                speechRecognizer.ContinuousRecognitionSession.ResultGenerated -= ContinuousRecognitionSession_ResultGenerated;
                speechRecognizer.StateChanged -= SpeechRecognizer_StateChanged;

                speechRecognizer.Dispose();
                speechRecognizer = null;
            }

            speechRecognizer = new SpeechRecognizer();

            // Provide feedback to the user about the state of the recognizer. This can be used to provide visual feedback in the form
            // of an audio indicator to help the user understand whether they're being heard.
            speechRecognizer.StateChanged += SpeechRecognizer_StateChanged;

            // It's not valid to pause a list grammar recognizer and recompile the constraints without at least one
            // constraint in place, so create a permanent constraint.
            var goHomeConstraint = new SpeechRecognitionListConstraint(new List <string>()
            {
                "Go Home"
            }, "gohome");

            // These speech recognition constraints will be added and removed from the recognizer.
            emailConstraint = new SpeechRecognitionListConstraint(new List <string>()
            {
                "Send email"
            }, "email");
            phoneConstraint = new SpeechRecognitionListConstraint(new List <string>()
            {
                "Call phone"
            }, "phone");

            // Add some of the constraints initially, so we don't start with an empty list of constraints.
            speechRecognizer.Constraints.Add(goHomeConstraint);
            speechRecognizer.Constraints.Add(emailConstraint);

            SpeechRecognitionCompilationResult result = await speechRecognizer.CompileConstraintsAsync();

            if (result.Status != SpeechRecognitionResultStatus.Success)
            {
                // Disable the recognition buttons.
                btnRecognize.IsEnabled = false;

                // Let the user know that the grammar didn't compile properly.
                resultTextBlock.Text = "Unable to compile grammar.";
            }

            // Handle continuous recognition events. Completed fires when various error states occur. ResultGenerated fires when
            // some recognized phrases occur, or the garbage rule is hit.
            speechRecognizer.ContinuousRecognitionSession.Completed       += ContinuousRecognitionSession_Completed;
            speechRecognizer.ContinuousRecognitionSession.ResultGenerated += ContinuousRecognitionSession_ResultGenerated;
        }
Example #2
0
 private static void DisposeAll()
 {
     s_twitchBot.Disconnect();
     s_speechSynthesizer.Dispose();
     s_speechRecognizer.Dispose();
     s_audioConfig.Dispose();
 }
Example #3
0
    void Update()
    {
        if (Input.GetKeyUp(KeyCode.Space))
        {
#if UNITY_STANDALONE_WIN
            recognizer.StopContinuousRecognitionAsync().ConfigureAwait(false);
            recognizer.Dispose();
            recognizer = null;
#endif
        }

        lock (threadLocker)
        {
            if (resultText != null)
            {
                if (partialResultMessage != "")
                {
                    resultText.text = partialResultMessage;
                }
                if (finalResultMessage != null && finalResultMessage != "" && finalResultMessage.Length >= 5)
                {
                    StartCoroutine(GetAndSayNluResponse(finalResultMessage));
                    resultText.text      = finalResultMessage;
                    finalResultMessage   = "";
                    partialResultMessage = "";
                    onFinalResult.Raise();
                }
                if (nluMessage != null && nluMessage != "")
                {
                    nluText.text = nluMessage;
                    nluMessage   = "";
                }
            }
        }
    }
Example #4
0
        public void Dispose()
        {
            if (deviceResources != null)
            {
                deviceResources.Dispose();
                deviceResources = null;
            }

            if (main != null)
            {
                main.Dispose();
                main = null;
            }

            if (speechRecognizer != null)
            {
                speechRecognizer.Dispose();
                speechRecognizer = null;
            }

            if (Synthesizer != null)
            {
                Synthesizer.Dispose();
                Synthesizer = null;
            }

            if (MediaPlayer != null)
            {
                MediaPlayer.Dispose();
                MediaPlayer = null;
            }
        }
Example #5
0
 public void Dispose()
 {
     if (isRecognitionAvailable)
     {
         recognizer.Dispose();
     }
 }
Example #6
0
        protected async override void OnNavigatedFrom(NavigationEventArgs e)
        {
            if (IsRecognizing)
            {
                Cancellation?.Cancel();
                await SpeechRec?.StopRecognitionAsync();
            }
            else
            {
                Cancellation?.Dispose();
                Cancellation = null;
            }

            AudioPlay.Stop();
            AudioPlay.Source = null;

            SpeechRec?.Dispose();
            SpeechRec = null;

            SpeechSynth?.Dispose();
            SpeechSynth                 = null;
            LoadTask                    = null;
            StatusText.Visibility       = Visibility.Collapsed;
            ListeningDisplay.Visibility = Visibility.Collapsed;
        }
Example #7
0
        /// <summary>
        /// Dispose(bool disposing) executes in two distinct scenarios.
        /// If disposing equals true, the method has been called directly
        /// or indirectly by a user's code. Managed and unmanaged resources
        /// can be disposed.
        /// If disposing equals false, the method has been called by the
        /// runtime from inside the finalizer and you should not reference
        /// other objects. Only unmanaged resources can be disposed.
        /// </summary>
        protected virtual void Dispose(bool disposing)
        {
            // Check to see if Dispose has already been called.
            if (!this._disposed)
            {
                // Note disposing has been done.
                _disposed = true;

                // If disposing equals true, dispose all managed
                // and unmanaged resources.
                if (disposing)
                {
                    if (_synth != null)
                    {
                        _synth.Dispose();
                    }

                    if (_recon != null)
                    {
                        _recon.Dispose();
                    }
                }

                // Call the appropriate methods to clean up
                // unmanaged resources here.
                // If the stream was created internally.
                _synth = null;
                _recon = null;
            }
        }
Example #8
0
 public static void Stop()
 {
     Utils.RunOnWindowsUIThread(async() =>
     {
         try
         {
             if (speechRecognizer != null)
             {
                 try
                 {
                     if (isListening)
                     {
                         await speechRecognizer.ContinuousRecognitionSession.StopAsync();
                         isListening = false;
                     }
                 }
                 catch (Exception)
                 {
                     speechRecognizer.Dispose();
                     speechRecognizer = null;
                     isListening      = false;
                 }
             }
         }
         catch (Exception ex)
         {
             // If stoping diction crashes, the assumption is that it is ok to ignore it since the scenario is anyways done
             DebugLog.Log(LogLevel.Error, "Hit an exception in Speech::Stop and ignoring it.." + ex.ToString());
             return;
         }
     });
 }
        public async void Dispose()
        {
            await EndAsync();

            _Recognizer?.Dispose();
            _Processors.Clear();
        }
    /// <summary>
    /// Stops the recognition on the speech recognizer or translator as applicable.
    /// Important: Unhook all events & clean-up resources.
    /// </summary>
    public async void StopRecognition()
    {
        if (recognizer != null)
        {
            await recognizer.StopContinuousRecognitionAsync().ConfigureAwait(false);

            recognizer.Recognizing         -= RecognizingHandler;
            recognizer.Recognized          -= RecognizedHandler;
            recognizer.SpeechStartDetected -= SpeechStartDetectedHandler;
            recognizer.SpeechEndDetected   -= SpeechEndDetectedHandler;
            recognizer.Canceled            -= CanceledHandler;
            recognizer.SessionStarted      -= SessionStartedHandler;
            recognizer.SessionStopped      -= SessionStoppedHandler;
            recognizer.Dispose();
            recognizer       = null;
            recognizedString = "Speech Recognizer is now stopped.";
            Debug.Log("Speech Recognizer is now stopped.");
        }
        if (translator != null)
        {
            await translator.StopContinuousRecognitionAsync().ConfigureAwait(false);

            translator.Recognizing         -= RecognizingTranslationHandler;
            translator.Recognized          -= RecognizedTranslationHandler;
            translator.SpeechStartDetected -= SpeechStartDetectedHandler;
            translator.SpeechEndDetected   -= SpeechEndDetectedHandler;
            translator.Canceled            -= CanceledTranslationHandler;
            translator.SessionStarted      -= SessionStartedHandler;
            translator.SessionStopped      -= SessionStoppedHandler;
            translator.Dispose();
            translator       = null;
            recognizedString = "Speech Translator is now stopped.";
            Debug.Log("Speech Translator is now stopped.");
        }
    }
        private async Task InitializeRecognizer(Language recognizerLanguage)
        {
            if (speechRecognizer != null)
            {
                // cleanup prior to re-initializing this scenario.
                speechRecognizer.StateChanged -= SpeechRecognizer_StateChanged;
                speechRecognizer.ContinuousRecognitionSession.Completed       -= ContinuousRecognitionSession_Completed;
                speechRecognizer.ContinuousRecognitionSession.ResultGenerated -= ContinuousRecognitionSession_ResultGenerated;
                speechRecognizer.HypothesisGenerated -= SpeechRecognizer_HypothesisGenerated;
                speechRecognizer.Dispose();
                speechRecognizer = null;
            }

            this.speechRecognizer = new SpeechRecognizer(recognizerLanguage);

            // Provide feedback to the user about the state of the recognizer. This can be used to provide visual feedback in the form
            // of an audio indicator to help the user understand whether they're being heard.
            speechRecognizer.StateChanged += SpeechRecognizer_StateChanged;

            // Apply the dictation topic constraint to optimize for dictated freeform speech.
            var dictationConstraint = new SpeechRecognitionTopicConstraint(SpeechRecognitionScenario.Dictation, "dictation");

            speechRecognizer.Constraints.Add(dictationConstraint);
            SpeechRecognitionCompilationResult result = await speechRecognizer.CompileConstraintsAsync();

            if (result.Status != SpeechRecognitionResultStatus.Success)
            {
                btnContinuousRecognize.IsEnabled = false;
            }
            speechRecognizer.ContinuousRecognitionSession.Completed       += ContinuousRecognitionSession_Completed;
            speechRecognizer.ContinuousRecognitionSession.ResultGenerated += ContinuousRecognitionSession_ResultGenerated;
            speechRecognizer.HypothesisGenerated += SpeechRecognizer_HypothesisGenerated;
        }
    private async void Setup(Language language)
    {
        if (_recogniser != null)
        {
            _recogniser.ContinuousRecognitionSession.Completed       -= Recogniser_Completed;
            _recogniser.ContinuousRecognitionSession.ResultGenerated -= Recogniser_ResultGenerated;
            _recogniser.HypothesisGenerated -= SpeechRecognizer_HypothesisGenerated;
            _recogniser.Dispose();
            _recogniser = null;
        }
        _recogniser = new SpeechRecognizer(language);
        SpeechRecognitionTopicConstraint constraint = new SpeechRecognitionTopicConstraint(
            SpeechRecognitionScenario.Dictation, "dictation");

        _recogniser.Constraints.Add(constraint);
        SpeechRecognitionCompilationResult result = await _recogniser.CompileConstraintsAsync();

        if (result.Status != SpeechRecognitionResultStatus.Success)
        {
            await ShowDialogAsync($"Grammar Compilation Failed: {result.Status.ToString()}");
        }
        _recogniser.ContinuousRecognitionSession.Completed       += Recogniser_Completed;
        _recogniser.ContinuousRecognitionSession.ResultGenerated += Recogniser_ResultGenerated;
        _recogniser.HypothesisGenerated += SpeechRecognizer_HypothesisGenerated;
    }
Example #13
0
        /// <summary>
        /// Code for voice recognition.
        /// </summary>
        //To initialize Speech Recognizer
        public async void InitSpeechRecognizer(int n)
        {
            if (n == 0)
            {
                Rec.Dispose();
                return;
            }
            Rec = new SpeechRecognizer();
            Rec.ContinuousRecognitionSession.ResultGenerated += Rec_ResultGenerated;

            StorageFile Store = await Package.Current.InstalledLocation.GetFileAsync(@"GrammarFile.xml");

            SpeechRecognitionGrammarFileConstraint constraint = new SpeechRecognitionGrammarFileConstraint(Store);

            Rec.Constraints.Add(constraint);
            SpeechRecognitionCompilationResult result = await Rec.CompileConstraintsAsync();

            if (result.Status == SpeechRecognitionResultStatus.Success)
            {
                status.Text = "Speech Recognition started.";
                tts(status.Text);
                Rec.UIOptions.AudiblePrompt = "Speech Recognition started.";
                await Rec.ContinuousRecognitionSession.StartAsync();
            }
        }
Example #14
0
        private async void InitializeSpeechRecognizer()
        {
            try
            {
                if (speechRecognizer != null)
                {
                    speechRecognizer.RecognizeAsync().Cancel();
                    speechRecognizer.RecognizeAsync().Close();
                    this.speechRecognizer.Dispose();
                    this.speechRecognizer = null;
                }
                speechRecognizer = new SpeechRecognizer();
                var topicConstraing = new SpeechRecognitionTopicConstraint(SpeechRecognitionScenario.Dictation, "Development");
                speechRecognizer.Constraints.Add(topicConstraing);
                await speechRecognizer.CompileConstraintsAsync();

                this.Operation = await speechRecognizer.RecognizeAsync();

                if (Operation.Status == SpeechRecognitionResultStatus.Success)
                {
                    ResultGenerated(Operation.Text);
                    speechRecognizer.RecognizeAsync().Cancel();
                    speechRecognizer.Dispose();
                    speechRecognizer = null;
                }
            }
            catch (Exception)
            {
            }
        }
        public Form1()
        {
            //Init Grammar
            grammarList.Add(grammarFile);
            Grammar grammar = new Grammar(new GrammarBuilder(grammarList));

            try
            {
                speechRecognitionEngine.RequestRecognizerUpdate();
                speechRecognitionEngine.LoadGrammarAsync(grammar);
                speechRecognitionEngine.SetInputToDefaultAudioDevice();
                speechRecognitionEngine.RecognizeAsync(RecognizeMode.Multiple);
                speechRecognitionEngine.SpeechRecognized          += SpeechRecognizedEvent;    //Subscriber speech recognized
                speechRecognitionEngine.SpeechRecognitionRejected += SpeechNotRecognizedEvent; //Subscriber speech not recognized
            }
            catch                                                                              //*********Exeptions***********
            {
                return;
            }

            //Custom Speech Sythesis Settings
            speechSynthesizer.SelectVoiceByHints(VoiceGender.Female);

            InitializeComponent();

            //Disposing resourses
            speechSynthesizer.Dispose();
            rec.Dispose();
            speechRecognitionEngine.Dispose();
        }
 private void FinishTeardown()
 {
     recognizer.SessionStopped -= Recognizer_SessionStopped;
     recognizer.Dispose();
     recognizer = null;
     IsStopping = false;
     IsRunning  = false;
 }
Example #17
0
        // Unload
        private async void MainPage_Unloaded(object sender, object args)
        {
            // Stop recognizing
            await recognizer.ContinuousRecognitionSession.StopAsync();

            recognizer.Dispose();
            recognizer = null;
        }
Example #18
0
 /// <summary>
 /// Fires when the main window is unloaded.
 /// </summary>
 /// <param name="sender">The main window.</param>
 /// <param name="e">The event arguments.</param>
 private void Window_Unloaded(object sender, RoutedEventArgs e)
 {
     // dispose the speech recognition engine
     if (speechRecognizer != null)
     {
         speechRecognizer.Dispose();
     }
 }
Example #19
0
 private void ResetRecognizerIfInitialized()
 {
     if (_speechRecognizer != null)
     {
         _speechRecognizer.Dispose();
         _speechRecognizer = null;
     }
 }
Example #20
0
 void DisableSpeechRecognizer()
 {
     _speechRecognizer.Recognizing -= SpeechRecognizingHandler;
     _speechRecognizer.Recognized  -= SpeechRecognizedHandler;
     _speechRecognizer.Canceled    -= SpeechCanceledHandler;
     _pushStream.Close();
     _speechRecognizer.Dispose();
 }
 void Disable()
 {
     recognizer.Recognizing -= RecognizingHandler;
     recognizer.Recognized  -= RecognizedHandler;
     recognizer.Canceled    -= CanceledHandler;
     pushStream.Close();
     recognizer.Dispose();
 }
Example #22
0
 public void FinishSpeechRecognition()
 {
     if (speechRecognizer != null)
     {
         speechRecognizer.Dispose();
         speechRecognizer = null;
     }
 }
        public async void Stop()
        {
            // Stop recognizing
            await recognizer.ContinuousRecognitionSession.StopAsync();

            recognizer.Dispose();
            recognizer = null;
        }
 /// <summary>
 /// Stops the speech recognition process and disposes of it, and ends the background thread by killing the while loop inside of it
 /// </summary>
 public static void Stop()
 {
     if (IsStarted)
     {
         recognizer.HypothesisGenerated -= Recognizer_HypothesisGenerated;
         recognizer.Dispose();
         IsStarted = false;
     }
 }
 public void Dispose()
 {
     if (_recognizer != null)
     {
         _recognizer.HypothesisGenerated -= Recognizer_HypothesisGenerated;
         _recognizer.Dispose();
         _recognizer = null;
     }
 }
Example #26
0
 public SpeechRecognizerTestsViewModel(CoreDispatcher dispatcher)
     : base(dispatcher)
 {
     _speechRecognizer.HypothesisGenerated += HypothesisGenerated;
     Disposables.Add(() =>
     {
         _speechRecognizer.HypothesisGenerated -= HypothesisGenerated;
         _speechRecognizer.Dispose();
     });
 }
Example #27
0
        /// <summary>
        /// 释放资源
        /// </summary>
        public void Dispose()
        {
            if (_speechRecognizer != null)
            {
                CancelRecognitionOperation();

                _speechRecognizer.Dispose();
                _speechRecognizer = null;
            }
        }
Example #28
0
        public void DisposeSpeechRecognizer()
        {
            if (_speechRecognizer != null)
            {
                _speechRecognizer.HypothesisGenerated -= SpeechRecognizer_HypothesisGenerated;

                _speechRecognizer.Dispose();
                _speechRecognizer = null;
            }
        }
Example #29
0
        void sre_SpeechRecognized(object sender, SpeechRecognizedEventArgs e)
        {
            if (e.Result.Text.ToUpper() == "RED")
            {
                this.bntColor.BackColor = Color.Red;
                this.bntColor.Text      = "RED";
                synth.Speak("Turning red on!");

                try
                {
                    serialPortArduino.Write("red");
                }
                catch (Exception)
                {
                    // synth.Speak("Not Connected to Arduino!");
                }
            }
            else if (e.Result.Text.ToUpper() == "YELLOW")
            {
                this.bntColor.BackColor = Color.Yellow;
                this.bntColor.Text      = "YELLOW";
                synth.Speak("Turning yellow on!");

                try
                {
                    serialPortArduino.Write("yellow");
                }
                catch (Exception)
                {
                    // synth.Speak("Not Connected to Arduino!");
                }
            }
            else if (e.Result.Text.ToUpper() == "GREEN")
            {
                this.bntColor.BackColor = Color.Green;
                this.bntColor.Text      = "GREEN";
                synth.Speak("Turning green on!");

                try
                {
                    serialPortArduino.Write("green");
                }
                catch (Exception)
                {
                    // synth.Speak("Not Connected to Arduino!");
                }
            }
            else if (e.Result.Text.ToUpper() == "EXIT")
            {
                synth.Speak("Thank you for testing me out! See you! Exiting Application...");
                recognizer.Dispose();
                serialPortArduino.Close();
                this.Close();
            }
        }
 internal void Stop()
 {
     if (rec == null)
     {
         return;
     }
     rec.Enabled           = false;
     rec.SpeechRecognized -= new EventHandler <SpeechRecognizedEventArgs>(onSpeech);
     rec.Dispose();
     rec = null;
 }
        public async static Task<SpeechRecognizer>  InitRecognizer()
        {
            try
            {
                if (null != recognizer)
                {                   
                    recognizer.Dispose();
                    recognizer = null;
                }
                recognizer = new SpeechRecognizer(SpeechRecognizer.SystemSpeechLanguage);
                recognizer.Constraints.Add(
                    new SpeechRecognitionListConstraint(
                        new List<string>()
                        {
                        speechResourceMap.GetValue("account page", speechContext).ValueAsString,
                        speechResourceMap.GetValue("audit page", speechContext).ValueAsString,
                        speechResourceMap.GetValue("finace page", speechContext).ValueAsString,
                        speechResourceMap.GetValue("transfer page", speechContext).ValueAsString
                        }, "goto"));


                SpeechRecognitionCompilationResult compilationResult = await recognizer.CompileConstraintsAsync();
                if (compilationResult.Status != SpeechRecognitionResultStatus.Success)
                {
                    recognizer.Dispose();
                    recognizer = null;
                }

                //string uiOptionsText = string.Format("Try saying '{0}', '{1}' or '{2}'",
                //        speechResourceMap.GetValue("account page", speechContext).ValueAsString,
                //        speechResourceMap.GetValue("audit page", speechContext).ValueAsString,
                //        speechResourceMap.GetValue("audit page", speechContext).ValueAsString);
                //recognizer.UIOptions.ExampleText = uiOptionsText;
                return recognizer;
            }
            catch(Exception e)
            {             
                return null;
            }
           
        }
        public override async Task InitializeAsync()
        {
            if (speechRecognizer == null)
            {
                try
                {
                    var recognizer = new SpeechRecognizer(ConvertAILangToSystem(config.Language));
                    recognizer.StateChanged += Recognizer_StateChanged;

                    // INFO: Dictation is default Constraint
                    //var webSearchGrammar = new SpeechRecognitionTopicConstraint(SpeechRecognitionScenario.Dictation, "dictation");
                    //recognizer.Constraints.Add(webSearchGrammar);

                    await recognizer.CompileConstraintsAsync();

                    lock (speechRecognizerLock)
                    {
                        if (speechRecognizer == null)
                        {
                            speechRecognizer = recognizer;
                        }
                        else
                        {
                            recognizer.Dispose();
                        }
                    }
                }
                catch (Exception e)
                {
                    if ((uint)e.HResult == HRESULT_LANG_NOT_SUPPORTED)
                    {
                        throw new AIServiceException(string.Format("Specified language {0} not supported or not installed on device", config.Language.code), e);
                    }
                    throw;
                }

            }
        }
        private async void InitializeSpeechRecognizer()
        {
            if (speechRecognizer != null)
            {
                this.speechRecognizer.Dispose();
                this.speechRecognizer = null;
            }
            speechRecognizer = new SpeechRecognizer();
            var topicConstraing = new SpeechRecognitionTopicConstraint(SpeechRecognitionScenario.Dictation, "Development");
            speechRecognizer.Constraints.Add(topicConstraing);
            await speechRecognizer.CompileConstraintsAsync();

            var operation = await speechRecognizer.RecognizeAsync();
            if (!this.Completed && operation.Status == SpeechRecognitionResultStatus.Success)
            {
                this.Completed = true;
                ResultGenerated(operation.Text);
                speechRecognizer.RecognizeAsync().Cancel();
                speechRecognizer.Dispose();
                speechRecognizer = null;
            }
        }
        private async void InitializeSpeechRecognizer()
        {
            try
            {
                if (speechRecognizer != null)
                {
                    speechRecognizer.RecognizeAsync().Cancel();
                    speechRecognizer.RecognizeAsync().Close();
                    this.speechRecognizer.Dispose();
                    this.speechRecognizer = null;
                }
                speechRecognizer = new SpeechRecognizer();
                var topicConstraing = new SpeechRecognitionTopicConstraint(SpeechRecognitionScenario.Dictation, "Development");
                speechRecognizer.Constraints.Add(topicConstraing);
                await speechRecognizer.CompileConstraintsAsync();

                this.Operation = await speechRecognizer.RecognizeAsync();
                if (Operation.Status == SpeechRecognitionResultStatus.Success)
                {
                    ResultGenerated(Operation.Text);
                    speechRecognizer.RecognizeAsync().Cancel();
                    speechRecognizer.Dispose();
                    speechRecognizer = null;
                }
            }
            catch (Exception)
            {
            }
        }
		private async void StartVoiceRecognition()
		{
			await SpeakText( "Say Captains Log at any time to create a log entry." );

			speechRecognizerCaptainsLogCommand = new SpeechRecognizer();

			while ( !cancellationSource.IsCancellationRequested )
			{
				// Listen for user to say "Captains Log"
				ISpeechRecognitionConstraint commandConstraint = 
					new SpeechRecognitionListConstraint( new[] { "Captains Log", "Computer Captains Log" } );
				speechRecognizerCaptainsLogCommand.Constraints.Add( commandConstraint );
				await speechRecognizerCaptainsLogCommand.CompileConstraintsAsync();

				SpeechRecognitionResult commandResult = await speechRecognizerCaptainsLogCommand.RecognizeAsync();

				if ( commandResult.Status != SpeechRecognitionResultStatus.Success
					|| commandResult.Confidence == SpeechRecognitionConfidence.Rejected
					|| cancellationSource.IsCancellationRequested )
				{
					continue;
				}
				// Recognized user saying "Captains Log"

				// Listen for the user's dictation entry
				var captainsLogDictationRecognizer = new SpeechRecognizer();

				ISpeechRecognitionConstraint dictationConstraint = 
					new SpeechRecognitionTopicConstraint( 
						SpeechRecognitionScenario.Dictation, "LogEntry", "LogEntryDictation" );

				captainsLogDictationRecognizer.Constraints.Add( dictationConstraint );

				await captainsLogDictationRecognizer.CompileConstraintsAsync();

				captainsLogDictationRecognizer.UIOptions.ExampleText = "Boldly going where no man or woman has gone before.";
				captainsLogDictationRecognizer.UIOptions.AudiblePrompt = "Go ahead";
				captainsLogDictationRecognizer.UIOptions.IsReadBackEnabled = true;
				captainsLogDictationRecognizer.UIOptions.ShowConfirmation = true;

				SpeechRecognitionResult dictationResult = await captainsLogDictationRecognizer.RecognizeWithUIAsync();

				if ( dictationResult.Status != SpeechRecognitionResultStatus.Success
					|| dictationResult.Confidence == SpeechRecognitionConfidence.Rejected
					|| string.IsNullOrWhiteSpace( dictationResult.Text )
					|| cancellationSource.IsCancellationRequested )
				{
					captainsLogDictationRecognizer.Dispose();

					continue;
				}
				// Recognized user's dictation entry

				AddLogEntry( dictationResult.Text );

				captainsLogDictationRecognizer.Dispose();
			}

			speechRecognizerCaptainsLogCommand.Dispose();
		}