/// <summary>
        /// When activating the scenario, ensure we have permission from the user to access their microphone, and
        /// provide an appropriate path for the user to enable access to the microphone if they haven't
        /// given explicit permission for it.
        /// Construct a recognizer with a simple list of recognized terms.
        /// </summary>
        /// <param name="e">The navigation event details</param>
        protected async override void OnNavigatedTo(NavigationEventArgs e)
        {
            bool permissionGained = await AudioCapturePermissions.RequestMicrophonePermission();

            if (permissionGained)
            {
                // enable the recognition buttons
                btnRecognizeWithUI.IsEnabled    = true;
                btnRecognizeWithoutUI.IsEnabled = true;
            }
            else
            {
                this.resultTextBlock.Visibility = Visibility.Visible;
                this.resultTextBlock.Text       = "Permission to access capture resources was not given by the user, reset the application setting in Settings->Privacy->Microphone.";
            }

            // Create an instance of SpeechRecognizer.
            speechRecognizer = new SpeechRecognizer();

            // Provide feedback to the user about the state of the recognizer.
            speechRecognizer.StateChanged += SpeechRecognizer_StateChanged;

            // You could create any IEnumerable dynamically.
            string[] responses = { "Yes", "No" };

            // Add a list constraint to the recognizer.
            var listConstraint = new SpeechRecognitionListConstraint(responses, "yesOrNo");

            speechRecognizer.UIOptions.ExampleText = @"Ex. ""Yes"", ""No""";
            speechRecognizer.Constraints.Add(listConstraint);

            // Compile the constraint.
            await speechRecognizer.CompileConstraintsAsync();
        }
Exemplo n.º 2
0
        public async Task <bool> RegisterCortanaCommands(Dictionary <string, Action> commands)
        {
            cortanaCommands  = commands;
            SpeechRecognizer = new SpeechRecognizer();
            var constraint = new SpeechRecognitionListConstraint(cortanaCommands.Keys);

            SpeechRecognizer.Constraints.Clear();
            SpeechRecognizer.Constraints.Add(constraint);
            var result = await SpeechRecognizer.CompileConstraintsAsync();

            if (result.Status == SpeechRecognitionResultStatus.Success)
            {
                SpeechRecognizer.ContinuousRecognitionSession.StartAsync();
                SpeechRecognizer.ContinuousRecognitionSession.ResultGenerated += (s, e) =>
                {
                    if (e.Result.RawConfidence >= 0.5f)
                    {
                        Action handler;
                        if (cortanaCommands.TryGetValue(e.Result.Text, out handler))
                        {
                            Application.InvokeOnMain(handler);
                        }
                    }
                };
                return(true);
            }
            return(false);
        }
Exemplo n.º 3
0
        /// <summary>
        /// Starts a speech recognition session that can recognize the topics on the checklist and possibly related words, if that functionality is available
        /// </summary>
        /// Also adds the right callbacks to the speech recognizer
        private async void startSpeechRecognition()
        {
            var constraint = new SpeechRecognitionListConstraint(app.getKeywords());

            if (speechRecognizer?.State == SpeechRecognizerState.Capturing)
            {
                await speechRecognizer.ContinuousRecognitionSession.StopAsync();
            }

            speechRecognizer = new SpeechRecognizer();
            speechRecognizer.Constraints.Add(constraint);
            var speechCompilationResult = await speechRecognizer.CompileConstraintsAsync();

            speechRecognizer.ContinuousRecognitionSession.ResultGenerated += speechResultCallback;

            if (speechRecognizer.State == SpeechRecognizerState.Idle)
            {
                await speechRecognizer.ContinuousRecognitionSession.StartAsync();

                Debug.WriteLine("Started speech recognition session");
            }
            else
            {
                Debug.WriteLine("Speech recognizer is not idle, attemptint to reboot");
                await speechRecognizer.ContinuousRecognitionSession.StopAsync();

                await speechRecognizer.ContinuousRecognitionSession.StartAsync();
            }
        }
Exemplo n.º 4
0
        /// <summary>
        /// Initializes the speech recognizer.
        /// </summary>
        public async void Initialize()
        {
            // Local recognizer
            triggerRecognizer = new SpeechRecognizer();

            var list = new SpeechRecognitionListConstraint(activationPhrases);

            triggerRecognizer.Constraints.Add(list);
            await triggerRecognizer.CompileConstraintsAsync();

            triggerRecognizer.ContinuousRecognitionSession.Completed += localSessionCompleted;

            triggerRecognizer.ContinuousRecognitionSession.ResultGenerated +=
                LocalSessionResult;

            //triggerRecognizer.HypothesisGenerated += CommandHypothesisGenerated;


            // Command recognizer (web)
            speechRecognizer = new SpeechRecognizer();
            var result = await speechRecognizer.CompileConstraintsAsync();

            speechRecognizer.ContinuousRecognitionSession.ResultGenerated +=
                CommandResultGenerated;

            speechRecognizer.HypothesisGenerated += CommandHypothesisGenerated;

            speechRecognizer.ContinuousRecognitionSession.Completed +=
                CommandSessionCompleted;

            await StartTriggerRecognizer();

            OnResponseReceived(initText);
        }
Exemplo n.º 5
0
        private async Task InitializeRecognizer()
        {
            if (speechRecognizer != null)
            {
                this.speechRecognizer.Dispose();
                this.speechRecognizer = null;
            }

            // Create an instance of SpeechRecognizer.
            speechRecognizer = new SpeechRecognizer();
            //set of responses
            string[] responses = { "hey sanya", "what's up sanya" };

            //list constraint to the recognizer
            var listConstraint = new SpeechRecognitionListConstraint(responses, "AssitantName");

            speechRecognizer.Constraints.Add(listConstraint);

            // Compile the dictation topic constraint, which optimizes for dictated speech.
            SpeechRecognitionCompilationResult compilationResult = await speechRecognizer.CompileConstraintsAsync();


            // Check to make sure that the constraints were in a proper format and the recognizer was able to compile it.
            if (compilationResult.Status != SpeechRecognitionResultStatus.Success)
            {
                // Let the user know that the grammar didn't compile properly.
                resultTextBlock.Visibility = Visibility.Visible;
                resultTextBlock.Text       = "Unable to compile grammar.";
            }
        }
Exemplo n.º 6
0
        private async Task <SpeechRecognitionResult> RecognizeSpeech()
        {
            try
            {
                if (recognizer == null)
                {
                    recognizer = new SpeechRecognizer();
                    string[] possibleAnswers = { "Welche Orte gibt es in meiner Nähe?", "Welche sind diese?", "Welcher ist dieser?", "Welche Orte?", "Welche Orte genau?" };
                    var      listConstraint  = new SpeechRecognitionListConstraint(possibleAnswers, "Answer");
                    recognizer.UIOptions.ExampleText = @"Bsp. 'Welche Orte gibt es in meiner Nähe?'";
                    recognizer.Constraints.Add(listConstraint);

                    await recognizer.CompileConstraintsAsync();
                }
                SpeechRecognitionResult result = await recognizer.RecognizeWithUIAsync();

                return(result);
            }
            catch (Exception exception)
            {
                const uint HResultPrivacyStatementDeclined = 0x80045509;
                if ((uint)exception.HResult == HResultPrivacyStatementDeclined)
                {
                    var messageDialog = new Windows.UI.Popups.MessageDialog("You must accept the speech privacy policy");
                    messageDialog.ShowAsync().GetResults();
                }
                else
                {
                    Debug.WriteLine("Error: " + exception.Message);
                }
            }
            return(null);
        }
Exemplo n.º 7
0
        private void VoiceRec_Loaded(object sender, RoutedEventArgs e)
        {
            LoadTask = Task.Run(async() =>
            {
                Cancellation = new CancellationTokenSource();
                SpeechRec    = new SpeechRecognizer();
                SpeechSynth  = new SpeechSynthesizer();

                //获取SRGS.grxml识别语法文件
                var GrammarFile = await StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///VoiceRec/SRGS.grxml"));

                //创建该文件的语法约束并添加至语音识别的约束集合
                var SRGSConstraint = new SpeechRecognitionGrammarFileConstraint(GrammarFile, "Control");
                SpeechRec?.Constraints.Add(SRGSConstraint);

                //要播放音乐,则必须动态从数据库取出音乐名称的数据,并添加语法约束
                var SongNames = await SQLite.GetInstance().GetAllMusicNameAsync();

                if (SongNames != null)
                {
                    //若存在音乐数据,则添加语法约束
                    var PlayConstraint = new SpeechRecognitionListConstraint(from item in SongNames select string.Format("{0}{1}", "播放", item), "ChooseMusic");
                    SpeechRec?.Constraints.Add(PlayConstraint);
                }

                //编译所有语法约束
                await SpeechRec.CompileConstraintsAsync();
            });
        }
		private async void StartVoiceRecognition()
		{
			await SpeakText( "Say Captains Log at any time to create a log entry." );

			speechRecognizerCaptainsLogCommand = new SpeechRecognizer();

			while ( !cancellationSource.IsCancellationRequested )
			{
				// Listen for user to say "Captains Log"
				ISpeechRecognitionConstraint commandConstraint = 
					new SpeechRecognitionListConstraint( new[] { "Captains Log", "Computer Captains Log" } );
				speechRecognizerCaptainsLogCommand.Constraints.Add( commandConstraint );
				await speechRecognizerCaptainsLogCommand.CompileConstraintsAsync();

				SpeechRecognitionResult commandResult = await speechRecognizerCaptainsLogCommand.RecognizeAsync();

				if ( commandResult.Status != SpeechRecognitionResultStatus.Success
					|| commandResult.Confidence == SpeechRecognitionConfidence.Rejected
					|| cancellationSource.IsCancellationRequested )
				{
					continue;
				}
				// Recognized user saying "Captains Log"

				// Listen for the user's dictation entry
				var captainsLogDictationRecognizer = new SpeechRecognizer();

				ISpeechRecognitionConstraint dictationConstraint = 
					new SpeechRecognitionTopicConstraint( 
						SpeechRecognitionScenario.Dictation, "LogEntry", "LogEntryDictation" );

				captainsLogDictationRecognizer.Constraints.Add( dictationConstraint );

				await captainsLogDictationRecognizer.CompileConstraintsAsync();

				captainsLogDictationRecognizer.UIOptions.ExampleText = "Boldly going where no man or woman has gone before.";
				captainsLogDictationRecognizer.UIOptions.AudiblePrompt = "Go ahead";
				captainsLogDictationRecognizer.UIOptions.IsReadBackEnabled = true;
				captainsLogDictationRecognizer.UIOptions.ShowConfirmation = true;

				SpeechRecognitionResult dictationResult = await captainsLogDictationRecognizer.RecognizeWithUIAsync();

				if ( dictationResult.Status != SpeechRecognitionResultStatus.Success
					|| dictationResult.Confidence == SpeechRecognitionConfidence.Rejected
					|| string.IsNullOrWhiteSpace( dictationResult.Text )
					|| cancellationSource.IsCancellationRequested )
				{
					captainsLogDictationRecognizer.Dispose();

					continue;
				}
				// Recognized user's dictation entry

				AddLogEntry( dictationResult.Text );

				captainsLogDictationRecognizer.Dispose();
			}

			speechRecognizerCaptainsLogCommand.Dispose();
		}
        /// <summary>
        /// Creates a SpeechRecognizer instance and initializes the grammar.
        /// </summary>
        private async Task InitializeRecognizer()
        {
            // Create an instance of SpeechRecognizer.
            speechRecognizer = new SpeechRecognizer();

            // Provide feedback to the user about the state of the recognizer.
            speechRecognizer.StateChanged += SpeechRecognizer_StateChanged;

            // You could create any IEnumerable dynamically.
            string[] responses = { "Yes", "No" };

            // Add a list constraint to the recognizer.
            var listConstraint = new SpeechRecognitionListConstraint(responses, "yesOrNo");

            speechRecognizer.Constraints.Add(listConstraint);

            // RecognizeWithUIAsync allows developers to customize the prompts.
            speechRecognizer.UIOptions.ExampleText = @"Ex. ""Yes"", ""No""";

            // Compile the constraint.
            SpeechRecognitionCompilationResult compilationResult = await speechRecognizer.CompileConstraintsAsync();

            // Check to make sure that the constraints were in a proper format and the recognizer was able to compile it.
            if (compilationResult.Status != SpeechRecognitionResultStatus.Success)
            {
                // Disable the recognition buttons.
                btnRecognizeWithUI.IsEnabled    = false;
                btnRecognizeWithoutUI.IsEnabled = false;

                // Let the user know that the grammar didn't compile properly.
                resultTextBlock.Visibility = Visibility.Visible;
                resultTextBlock.Text       = "Unable to compile grammar.";
            }
        }
Exemplo n.º 10
0
        private async Task <SpeechRecognitionResult> SpeechRecognizeAsync()
        {
            if (_speechRecognizer == null)
            {
                // Create an instance of SpeechRecognizer.
                _speechRecognizer = new SpeechRecognizer();

                var songs = new[] { "order", "product", "manage", "capture", "home" };

                // Generates the collection which we expect user will say one of.

                // Create an instance of the constraint.
                // Pass the collection and an optional tag to identify.
                var playConstraint = new SpeechRecognitionListConstraint(songs);

                // Add it into teh recognizer
                _speechRecognizer.Constraints.Add(playConstraint);

                // Then add the constraint for pausing and resuming.

                //var pauseConstraint = new SpeechRecognitionListConstraint(new[] { "Pause", "Resume" }, "pauseAndResume");
                //_speechRecognizer.Constraints.Add(pauseConstraint);

                // Compile the dictation grammar by default.
                await _speechRecognizer.CompileConstraintsAsync();
            }

            // Start recognition and return the result.
            return(await _speechRecognizer.RecognizeWithUIAsync());
        }
Exemplo n.º 11
0
        private async void listen()
        {
            string[] SpeechInput    = { "Connect" };
            var      ListConstraint = new SpeechRecognitionListConstraint(SpeechInput, "connect");

            _recognizer.Constraints.Add(ListConstraint);

            // compile it
            await _recognizer.CompileConstraintsAsync();

            //*
            // start recognition
            SpeechRecognitionResult result = await _recognizer.RecognizeWithUIAsync();

            // do something
            Debug.WriteLine(result.Text);
            Debug.WriteLine("Listen do something done");
            if (result.Text == "Connect")
            {
                Debug.WriteLine("Trying to connect from voice commands.");
                ConnectBot();
            }
            //

            /*
             * _recognizer.ContinuousRecognitionSession.ResultGenerated += VoiceHandler;
             * await _recognizer.ContinuousRecognitionSession.StartAsync();
             * // */
        }
Exemplo n.º 12
0
        private async Task InitializeSpeechRecognizer()
        {
            /* if (speechRecognizer != null)
             * {
             *   speechRecognizer.StateChanged -= SpeechRecognizer_StateChanged;
             *   this.speechRecognizer.Dispose();
             *   this.speechRecognizer = null;
             * }
             */
            try
            {   //Create an instance of speech recognizer
                speechRecognizer = new SpeechRecognizer();

                //speechRecognizer.StateChanged += SpeechRecognizer_StateChanged;

                //Add grammar file constraint to the recognizer.
                //  var storageFile = await Windows.Storage.StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///SRGSmusic.grxml"));
                //  var grammarfileConstraint = new Windows.Media.SpeechRecognition.SpeechRecognitionGrammarFileConstraint(storageFile, "music");
                string[] responses      = { "Play the song", "Introduce yourself", "Who are your creators", "Which day is it", "What is the temperature" };
                var      listConstraint = new SpeechRecognitionListConstraint(responses, "Action");
                //speechRecognizer.Constraints.Add(grammarfileConstraint);
                //resultTextBlock.Text = "Example play, pause";
                speechRecognizer.Constraints.Add(listConstraint);
                SpeechRecognitionCompilationResult compilationResult = await speechRecognizer.CompileConstraintsAsync();

                resultTextBlock.Text = "Yahan tak to aa hi gye";
                if (compilationResult.Status != SpeechRecognitionResultStatus.Success)
                {
                    // Disable the recognition button.
                    btnContinuousRecognize.IsEnabled = false;

                    // Let the user know that the grammar didn't compile properly.
                    resultTextBlock.Text = "Unable to compile grammar.";
                }
                else
                {
                    resultTextBlock.Text = "Compilation Successful!";
                    // Set EndSilenceTimeout to give users more time to complete speaking a phrase.
                    //speechRecognizer.Timeouts.EndSilenceTimeout = TimeSpan.FromSeconds(1.2);

                    btnContinuousRecognize.IsEnabled = true;
                }
            }
            catch (Exception ex)
            {
                if ((uint)ex.HResult == HResultRecognizerNotFound)
                {
                    btnContinuousRecognize.IsEnabled = false;

                    resultTextBlock.Visibility = Visibility.Visible;
                    resultTextBlock.Text       = "Speech Language pack for selected language not installed.";
                }
                else
                {
                    var messageDialog = new Windows.UI.Popups.MessageDialog(ex.Message, "Exception");
                    await messageDialog.ShowAsync();
                }
            }
        }
Exemplo n.º 13
0
        private void SetupConstraits()
        {
            var onConstrait   = new SpeechRecognitionListConstraint(new[] { "lights on", "turn the lights on" }, "on");
            var offConstraint = new SpeechRecognitionListConstraint(new[] { "lights off", "turn the lights off", "off" });

            _speechRecognizer.Constraints.Add(onConstrait);
            _speechRecognizer.Constraints.Add(offConstraint);
        }
Exemplo n.º 14
0
        /// <summary>
        /// Initialize Speech Recognizer and compile constraints.
        /// </summary>
        /// <param name="recognizerLanguage">Language to use for the speech recognizer</param>
        /// <returns>Awaitable task.</returns>
        private async Task InitializeRecognizer()
        {
            if (speechRecognizer != null)
            {
                // cleanup prior to re-initializing this scenario.
                speechRecognizer.ContinuousRecognitionSession.Completed       -= ContinuousRecognitionSession_Completed;
                speechRecognizer.ContinuousRecognitionSession.ResultGenerated -= ContinuousRecognitionSession_ResultGenerated;
                speechRecognizer.StateChanged -= SpeechRecognizer_StateChanged;

                speechRecognizer.Dispose();
                speechRecognizer = null;
            }

            speechRecognizer = new SpeechRecognizer();

            // Provide feedback to the user about the state of the recognizer. This can be used to provide visual feedback in the form
            // of an audio indicator to help the user understand whether they're being heard.
            speechRecognizer.StateChanged += SpeechRecognizer_StateChanged;

            // It's not valid to pause a list grammar recognizer and recompile the constraints without at least one
            // constraint in place, so create a permanent constraint.
            var goHomeConstraint = new SpeechRecognitionListConstraint(new List <string>()
            {
                "Go Home"
            }, "gohome");

            // These speech recognition constraints will be added and removed from the recognizer.
            emailConstraint = new SpeechRecognitionListConstraint(new List <string>()
            {
                "Send email"
            }, "email");
            phoneConstraint = new SpeechRecognitionListConstraint(new List <string>()
            {
                "Call phone"
            }, "phone");

            // Add some of the constraints initially, so we don't start with an empty list of constraints.
            speechRecognizer.Constraints.Add(goHomeConstraint);
            speechRecognizer.Constraints.Add(emailConstraint);

            SpeechRecognitionCompilationResult result = await speechRecognizer.CompileConstraintsAsync();

            if (result.Status != SpeechRecognitionResultStatus.Success)
            {
                // Disable the recognition buttons.
                btnRecognize.IsEnabled = false;

                // Let the user know that the grammar didn't compile properly.
                resultTextBlock.Text = "Unable to compile grammar.";
            }

            // Handle continuous recognition events. Completed fires when various error states occur. ResultGenerated fires when
            // some recognized phrases occur, or the garbage rule is hit.
            speechRecognizer.ContinuousRecognitionSession.Completed       += ContinuousRecognitionSession_Completed;
            speechRecognizer.ContinuousRecognitionSession.ResultGenerated += ContinuousRecognitionSession_ResultGenerated;
        }
        public async Task <bool> InitializeRecognizerAsync()
        {
            Debug.WriteLine("[Speech to Text]: initializing Speech Recognizer...");
            var language = new Windows.Globalization.Language(_languageName);

            _recognizer = new SpeechRecognizer(language);
            // Set timeout settings.
            _recognizer.Timeouts.InitialSilenceTimeout = TimeSpan.FromSeconds(_recognizerInitialSilenceTimeOutInSeconds);
            _recognizer.Timeouts.BabbleTimeout         = TimeSpan.FromSeconds(_recognizerBabbleTimeoutInSeconds);
            _recognizer.Timeouts.EndSilenceTimeout     = TimeSpan.FromSeconds(_recognizerEndSilenceTimeoutInSeconds);
            // Set UI text
            _recognizer.UIOptions.AudiblePrompt = "Say what you want to do...";

            if (!this.IsOffline())
            {
                // This requires internet connection
                SpeechRecognitionTopicConstraint topicConstraint = new SpeechRecognitionTopicConstraint(SpeechRecognitionScenario.Dictation, "Development");
                _recognizer.Constraints.Add(topicConstraint);
            }
            else
            {
                // In case of network issue
                string[] responses =
                {
                    "I would like to rent a bike",
                    "I want to rent a bike",
                    "I'd like to rent a bike",
                    "rent a bike",
                    "I would like to rent a bicycle",
                    "I want to rent a bicycle",
                    "I'd like to rent a bicycle",
                    "rent a bicycle"
                };

                // Add a list constraint to the recognizer.
                var listConstraint = new SpeechRecognitionListConstraint(responses, "rentBikePhrases");
                _recognizer.Constraints.Add(listConstraint);
            }

            SpeechRecognitionCompilationResult result = await _recognizer.CompileConstraintsAsync();   // Required

            if (result.Status != SpeechRecognitionResultStatus.Success)
            {
                Debug.WriteLine("[Speech to Text]: Grammar Compilation Failed: " + result.Status.ToString());
                return(false);
            }

            _recognizer.HypothesisGenerated += Recognizer_HypothesisGenerated;
            _recognizer.StateChanged        += Recognizer_StateChanged;
            _recognizer.ContinuousRecognitionSession.ResultGenerated += (s, e) => { Debug.WriteLine($"[Speech to Text]: recognizer results: {e.Result.Text}, {e.Result.RawConfidence.ToString()}, {e.Result.Confidence.ToString()}"); };
            Debug.WriteLine("[Speech to Text]: done initializing Speech Recognizer");
            return(true);
        }
Exemplo n.º 16
0
        protected async override void OnNavigatedTo(NavigationEventArgs e)
        {
            _recognizer = new SpeechRecognizer();
            List <string> phrases = new List <string>()
            {
                "red", "yellow", "white", "blue", "green"
            };
            SpeechRecognitionListConstraint listConstraint = new SpeechRecognitionListConstraint(phrases);

            _recognizer.Constraints.Add(listConstraint);
            await _recognizer.CompileConstraintsAsync();
        }
        private async void InitializeSpeechRecognition()
        {
            var speechRecognizer = new SpeechRecognizer(new Windows.Globalization.Language("en-US"));

            var navigationConstraint = new SpeechRecognitionListConstraint(new[] { nextTerm, backTerm }, "navigate");

            speechRecognizer.Constraints.Add(navigationConstraint);

            await speechRecognizer.CompileConstraintsAsync();

            speechRecognizer.ContinuousRecognitionSession.ResultGenerated += ContinuousRecognitionSession_ResultGenerated;
            await speechRecognizer.ContinuousRecognitionSession.StartAsync();
        }
Exemplo n.º 18
0
        /// <summary>
        /// Runs the service.
        /// </summary>
        /// <param name="state">The state<see cref="object"/></param>
        private static async void Run(object state)
        {
            try
            {
                // restart listener if nothing has happend for more than 30 seconds
                if (lastListenCylce > DateTime.Now.AddSeconds(-30))
                {
                    return;
                }

                if (recognizer != null)
                {
                    try
                    {
                        await recognizer.StopRecognitionAsync();
                    }
                    catch (Exception ex)
                    {
                        Log(ex);
                    }
                }

                recognizer = new SpeechRecognizer(new Language("de-DE"));
                recognizer.Timeouts.InitialSilenceTimeout = TimeSpan.FromSeconds(2);
                recognizer.Timeouts.EndSilenceTimeout     = TimeSpan.FromSeconds(0.5);
                recognizer.StateChanged += RecognizerStateChanged;
                recognizer.ContinuousRecognitionSession.ResultGenerated += RecognizerResultGenerated;

                var textGrammar = new SpeechRecognitionListConstraint(new List <string> {
                    "Licht an", "Licht aus"
                });
                var webSearchGrammar = new SpeechRecognitionTopicConstraint(SpeechRecognitionScenario.WebSearch, "webSearch");
                recognizer.Constraints.Add(textGrammar);
                recognizer.Constraints.Add(webSearchGrammar);
                SpeechRecognitionCompilationResult compilationResult = await recognizer.CompileConstraintsAsync();

                if (compilationResult.Status == SpeechRecognitionResultStatus.Success)
                {
                    Log(LogLevel.Debug, "Speechrecognition compile result: " + compilationResult.ToString());
                    await Listen();
                }
                else
                {
                    Log(LogLevel.Debug, "Speechrecognition compile result: " + compilationResult.ToString());
                }
            }
            catch (Exception ex)
            {
                Log(ex);
            }
        }
Exemplo n.º 19
0
        private async Task InitializeRecognizer(Language recognize)
        {
            if (speechRecognizer != null)
            {
                speechRecognizer.StateChanged -= SpeechRecognizer_StateChanged;
                speechRecognizer.ContinuousRecognitionSession.Completed       -= ContinuousRecognitionSession_Completed;
                speechRecognizer.ContinuousRecognitionSession.ResultGenerated -= ContinuousRecognitionSession_ResultGenerated;
                this.speechRecognizer.Dispose();
                this.speechRecognizer = null;
            }
            try
            {
                this.speechRecognizer = new SpeechRecognizer();
                var grammar        = new[] { "order", "product", "manage", "capture", "home", "exit", "help", "back" };
                var playConstraint = new SpeechRecognitionListConstraint(grammar);
                speechRecognizer.Constraints.Add(playConstraint);
                speechRecognizer.StateChanged += SpeechRecognizer_StateChanged;
                SpeechRecognitionCompilationResult result = await speechRecognizer.CompileConstraintsAsync();

                if (result.Status != SpeechRecognitionResultStatus.Success)
                {
                    btnContinuousRecognize.IsEnabled = false;


                    resultTextBlock.Visibility = Visibility.Visible;
                    resultTextBlock.Text       = "Unable to compile grammar.";
                }
                else
                {
                    btnContinuousRecognize.IsEnabled = true;
                    resultTextBlock.Visibility       = Visibility.Collapsed;
                    speechRecognizer.ContinuousRecognitionSession.Completed       += ContinuousRecognitionSession_Completed;
                    speechRecognizer.ContinuousRecognitionSession.ResultGenerated += ContinuousRecognitionSession_ResultGenerated;
                }
            }
            catch (Exception ex)
            {
                if ((uint)ex.HResult == HResultRecognizerNotFound)
                {
                    btnContinuousRecognize.IsEnabled = false;
                    resultTextBlock.Visibility       = Visibility.Visible;
                    resultTextBlock.Text             = "Speech Language pack for selected language not installed.";
                }
                else
                {
                    var messageDialog = new Windows.UI.Popups.MessageDialog(ex.Message, "Exception");
                    await messageDialog.ShowAsync();
                }
            }
        }
Exemplo n.º 20
0
        async private void OnRecognizeFromList(object sender, RoutedEventArgs e)
        {
            //if you are listening for one word this is a good thing to use
            var recognizer = new SpeechRecognizer();

            recognizer.UIOptions.ExampleText = "To test this say one, two, or three";
            var list = new SpeechRecognitionListConstraint("To test this say one, two, or three".Split(','));

            recognizer.Constraints.Add(list);
            await recognizer.CompileConstraintsAsync();

            var result = await recognizer.RecognizeWithUIAsync();

            txt_dictation.Text = result.Text;
        }
Exemplo n.º 21
0
 async Task<bool> answerYN(string question)
 {
     var language = SpeechRecognizer.SystemSpeechLanguage;
     speakString(question);
     string[] yn = {"Yes", "No"};
     SpeechRecognizer speechRecognizer = new SpeechRecognizer();
     SpeechRecognitionListConstraint list = new SpeechRecognitionListConstraint(yn, "yesOrNo");
     speechRecognizer.Constraints.Add(list);
     await speechRecognizer.CompileConstraintsAsync();
     SpeechRecognitionResult answerResult = await speechRecognizer.RecognizeWithUIAsync();
     if (answerResult.Text == "Yes")
         return true;
     else
         return false;
 }
        async private void OnSayPrompt(object sender, RoutedEventArgs e)
        {
            try
            {
                var button = sender as Button;
                button.Content = "...";
                bool   failed_signin = true;
                Random random        = new Random();
                int    seed_value    = random.Next(0, 1000);
                int    selection     = seed_value % 4;
                var    tuple         = passphrases[selection];

                var prompt = (string)tuple.Phrase;
                //general dictation
                var recognizer = new SpeechRecognizer();
                var phrases    = passphrases.Select(i => (string)i.Phrase).ToList();

                var list_constraint = new SpeechRecognitionListConstraint(phrases);
                recognizer.Constraints.Add(list_constraint);

                recognizer.StateChanged         += Recognizer_StateChanged;
                recognizer.UIOptions.ExampleText = $"Repeat the phrase '{prompt}'";
                await recognizer.CompileConstraintsAsync();

                var result = await recognizer.RecognizeWithUIAsync();

                if (result.Status == SpeechRecognitionResultStatus.Success)
                {
                    if (result.Text.ToLower() == prompt.ToLower())
                    {
                        if (txt_employeecode2.Password == "employee")
                        {
                            failed_signin = false;
                            Frame.Navigate(_target_type);
                        }
                    }
                }

                if (failed_signin)
                {
                    button.Content = "Failed connection";
                }
            }
            catch (Exception ex)
            {
                await new MessageDialog(ex.Message).ShowAsync();
            }
        }
Exemplo n.º 23
0
        private async void InitializeSpeechRecognizer()
        {
            if (isSpeechInizialized)
            {
                return;
            }

            // init recognizer
            recognizer = new SpeechRecognizer();
            var listConstraint = new SpeechRecognitionListConstraint(new string[]
            {
                "Show", "News", "Detail", "Weather",
                "Hide", "Close", "Time", "Back", "Escape",
                "Stop", "Pause", "Radio",
                "Louder", "Quieter",
            });

            foreach (var item in new Configuration.Configuration().Radios)
            {
                listConstraint.Commands.Add(item.PhoneticName);
            }

            recognizer.Constraints.Add(listConstraint);

            recognizer.StateChanged += RecognizerStateChanged;
            recognizer.ContinuousRecognitionSession.ResultGenerated += RecognizerResultGenerated;

            // compile constraints
            SpeechRecognitionCompilationResult compilationResult = await recognizer.CompileConstraintsAsync();

            // start recogition session if successful
            if (compilationResult.Status == SpeechRecognitionResultStatus.Success)
            {
                Log.i("SR Success");

                await recognizer.ContinuousRecognitionSession.StartAsync();
            }
            else
            {
                Log.w("SR Failed {0}", compilationResult.Status);
            }

            isSpeechInizialized = true;
        }
Exemplo n.º 24
0
        private async void SetupSpeechRecognition()
        {
            SpeechRecognizer = new SpeechRecognizer();
            var speechOptions = new SpeechRecognitionListConstraint(new[]
            {
                "Toggle Camera",
                "Toggle Geometry",
                "Toggle Debug",
                "Export Mesh"
            });

            SpeechRecognizer.Constraints.Add(speechOptions);
            var result = await SpeechRecognizer.CompileConstraintsAsync();

            if (result.Status == SpeechRecognitionResultStatus.Success)
            {
                await SpeechRecognizer.ContinuousRecognitionSession.StartAsync();
            }
            SpeechRecognizer.ContinuousRecognitionSession.ResultGenerated += OnSpeechCommandDetected;
        }
Exemplo n.º 25
0
 protected async override void OnNavigatedTo(NavigationEventArgs e)
 {
     dispatcher = CoreWindow.GetForCurrentThread().Dispatcher;
     //bool permissionGained = await AudioCapturePermissions.RequestMicrophoneCapture();
     if (recognizer == null)
     {
         recognizer = new SpeechRecognizer();
         var languages = SpeechRecognizer.SupportedGrammarLanguages;
         var SysSpeech = SpeechRecognizer.SystemSpeechLanguage;
     }
     string[] possibleAnswers = { "Light on", "Light off", "on", "off", "light", "dark", "bright", "next", "previous", "forward", "back", "slideshow", "stop" }; //, "start slideshow", "stop slideshow", };
     var listConstraint = new SpeechRecognitionListConstraint(possibleAnswers, "Answer");
     recognizer.Constraints.Add(listConstraint);
     listenText.Text = recognizer.CurrentLanguage.DisplayName;
     await recognizer.CompileConstraintsAsync();
     recognizer.ContinuousRecognitionSession.Completed += ContinuousRecognitionSession_Completed;
     recognizer.ContinuousRecognitionSession.ResultGenerated += ContinuousRecognitionSession_ResultGenerated;
     await recognizer.ContinuousRecognitionSession.StartAsync();
     timer = new DispatcherTimer();
 }
Exemplo n.º 26
0
        private async Task InitializeRecognizer(Language recognizerLanguage)
        {
            if (_speechRecognizer != null)
            {
                _speechRecognizer.ContinuousRecognitionSession.Completed       -= ContinuousRecognitionSession_Completed;
                _speechRecognizer.ContinuousRecognitionSession.ResultGenerated -= ContinuousRecognitionSession_ResultGenerated;
                _speechRecognizer.Dispose();
                _speechRecognizer = null;
            }

            _speechRecognizer = new SpeechRecognizer(recognizerLanguage);

            var expectedResponses = GetMenuOptions();
            var listConstraint    = new SpeechRecognitionListConstraint(expectedResponses, "Opcije");

            _speechRecognizer.Constraints.Add(listConstraint);
            await _speechRecognizer.CompileConstraintsAsync();

            _speechRecognizer.ContinuousRecognitionSession.Completed       += ContinuousRecognitionSession_Completed;
            _speechRecognizer.ContinuousRecognitionSession.ResultGenerated += ContinuousRecognitionSession_ResultGenerated;
        }
Exemplo n.º 27
0
        private async void ListConstraintRecognizing_OnClick(object sender, RoutedEventArgs e)
        {
            // プログラムによる一覧の制約の指定 (SpeechRecognitionListConstraint)

            var speechRecognizer = new Windows.Media.SpeechRecognition.SpeechRecognizer();

            string[] responses = { "Yes", "No" };

            var list = new SpeechRecognitionListConstraint(responses, "yesOrNo");

            speechRecognizer.UIOptions.ExampleText = @"Ex. 'yes', 'no'";
            speechRecognizer.Constraints.Add(list);

            await speechRecognizer.CompileConstraintsAsync();

            var result = await speechRecognizer.RecognizeWithUIAsync();

            var dialog = new MessageDialog(result.Text, "Text spoken");

            dialog.ShowAsync();
        }
Exemplo n.º 28
0
        private async void InitializeSpeechRecognition()
        {
            speechRecognizer = new SpeechRecognizer();

            var session = speechRecognizer.ContinuousRecognitionSession;

            session.ResultGenerated += HandleSpeechResult;

            var phrases = new List <string> {
                searchTriggerPhrase
            };
            var constraint = new SpeechRecognitionListConstraint(phrases);

            speechRecognizer.Constraints.Add(constraint);

            var compilationResult = await speechRecognizer.CompileConstraintsAsync();

            if (compilationResult.Status == SpeechRecognitionResultStatus.Success)
            {
                await session.StartAsync();
            }
        }
Exemplo n.º 29
0
        private async void Button_Click_Recognize(object sender, RoutedEventArgs e)
        {
            Button button = sender as Button;

            button.IsEnabled = false;

            String[] array = { "开始", "返回", "退出", "设置", "介绍" };
            SpeechRecognitionListConstraint speechRecognitionListConstraint =
                new SpeechRecognitionListConstraint(array);

            using (SpeechRecognizer recognizer = new SpeechRecognizer())
            {
                try
                {
                    recognizer.Constraints.Add(speechRecognitionListConstraint);
                    SpeechRecognitionCompilationResult compilationResult = await recognizer.CompileConstraintsAsync();

                    if (compilationResult.Status == SpeechRecognitionResultStatus.Success)
                    {
                        SpeechRecognitionResult speechRecognitionResult = await recognizer.RecognizeAsync();

                        if (speechRecognitionResult.Status == SpeechRecognitionResultStatus.Success)
                        {
                            tbDisplay.Text       = "finished";
                            textInput.Text       = speechRecognitionResult.Text;
                            this.lb.SelectedItem = speechRecognitionResult.Text;
                        }
                    }
                }
                catch (Exception exception)
                {
                    tbDisplay.Text = "Error" + exception.Message;
                    //throw;
                }
            }
            button.IsEnabled = true;
        }
Exemplo n.º 30
0
        // Initialize Speech Recognizer
        private async void InitializeSpeechRecognizer()
        {
            // Initialize name recognizer
            nameRecognizer = new SpeechRecognizer();

            // Create list constraint
            SpeechRecognitionListConstraint listConstraint = new SpeechRecognitionListConstraint(listOfNames);

            // Add list constraint and compile
            nameRecognizer.Constraints.Add(listConstraint);
            SpeechRecognitionCompilationResult nameResult = await nameRecognizer.CompileConstraintsAsync();

            if (nameResult.Status != SpeechRecognitionResultStatus.Success)
            {
                ListenerStatus.Text = "Unable to initialize Name listener.";
                return;
            }

            // Initialize item recognizer
            itemRecognizer = new SpeechRecognizer();

            // Create topic constraint
            SpeechRecognitionTopicConstraint topicConstraint = new SpeechRecognitionTopicConstraint(SpeechRecognitionScenario.WebSearch, "Short Form");

            // Add topic constraint and compile
            itemRecognizer.Constraints.Add(topicConstraint);
            SpeechRecognitionCompilationResult itemresult = await itemRecognizer.CompileConstraintsAsync();

            if (itemresult.Status != SpeechRecognitionResultStatus.Success)
            {
                ListenerStatus.Text = "Unable to initialize Item listener.";
                return;
            }

            listeningIsEnabled = true;

            ListenerStatus.Text = "Listeners initialized correctly.";
        }
Exemplo n.º 31
0
        private async void InitializeSpeechRecognizer()
        {
            if (isSpeechInizialized) return;

            // init recognizer
            recognizer = new SpeechRecognizer();
            var listConstraint = new SpeechRecognitionListConstraint(new string[]
            {
                "Show", "News", "Detail", "Weather",
                "Hide", "Close", "Time", "Back", "Escape",
                "Stop", "Pause", "Radio",
                "Louder", "Quieter",
            });

            foreach (var item in new Configuration.Configuration().Radios)
            {
                listConstraint.Commands.Add(item.PhoneticName);
            }

            recognizer.Constraints.Add(listConstraint);

            recognizer.StateChanged += RecognizerStateChanged;
            recognizer.ContinuousRecognitionSession.ResultGenerated += RecognizerResultGenerated;
            
            // compile constraints
            SpeechRecognitionCompilationResult compilationResult = await recognizer.CompileConstraintsAsync();
            
            // start recogition session if successful
            if (compilationResult.Status == SpeechRecognitionResultStatus.Success)
            {
                Log.i("SR Success");

                await recognizer.ContinuousRecognitionSession.StartAsync();
            }
            else
            {
                Log.w("SR Failed {0}", compilationResult.Status);
            }

            isSpeechInizialized = true;
        }
Exemplo n.º 32
0
        private async Task<SpeechRecognitionResult> RecognizeSpeech()
        {
            try
            {
                if (recognizer == null)
                {
                    recognizer = new SpeechRecognizer();
                    var languages = SpeechRecognizer.SupportedGrammarLanguages;
                    var SysSpeech = SpeechRecognizer.SystemSpeechLanguage;

                    string[] possibleAnswers = { "Light on", "Light off", "on", "off", "light", "dark", "bright", "next", "previous", "forward", "back" };
                    var listConstraint = new SpeechRecognitionListConstraint(possibleAnswers, "Answer");
                    //recognizer.UIOptions.ExampleText = @"Bsp. 'ja','nein'";
                    recognizer.Constraints.Add(listConstraint);
                    listenText.Text = recognizer.CurrentLanguage.DisplayName;
                    await recognizer.CompileConstraintsAsync();
                }
                SpeechRecognitionResult result = await recognizer.RecognizeAsync(); //.RecognizeWithUIAsync();
                return result;
            }
            catch (Exception exception)
            {
                const uint HResultPrivacyStatementDeclined = 0x80045509;
                if ((uint)exception.HResult == HResultPrivacyStatementDeclined)
                {
                    var messageDialog = new Windows.UI.Popups.MessageDialog("You must accept the speech privacy policy");
                    messageDialog.ShowAsync().GetResults();
                }
                else
                {
                    //Debug.WriteLine("Error: " + exception.Message);
                }
            }
            return null;
        }
Exemplo n.º 33
0
        private async Task InitializeRecognizer(Language recognize)
        {
            if (speechRecognizer != null)
            {
                speechRecognizer.StateChanged -= SpeechRecognizer_StateChanged;
                speechRecognizer.ContinuousRecognitionSession.Completed       -= ContinuousRecognitionSession_Completed;
                speechRecognizer.ContinuousRecognitionSession.ResultGenerated -= ContinuousRecognitionSession_ResultGenerated;
                this.speechRecognizer.Dispose();
                this.speechRecognizer = null;
            }
            try
            {
                this.speechRecognizer = new SpeechRecognizer(recognize);

                if (textBox.IsEnabled == true)
                {
                    //var grammar = new[] { "1", "2", "3", "3", "4", "5", "6", "7", "8", "9", "100" };
                    var grammar = Enumerable.Range(1, 100).Select(n => n.ToString()).ToList();


                    var playConstraint = new SpeechRecognitionListConstraint(grammar);
                    speechRecognizer.Constraints.Add(playConstraint);
                    speechRecognizer.StateChanged += SpeechRecognizer_StateChanged;
                    SpeechRecognitionCompilationResult result = await speechRecognizer.CompileConstraintsAsync();

                    if (result.Status != SpeechRecognitionResultStatus.Success)
                    {
                        btnContinuousRecognize.IsEnabled = false;


                        resultTextBlock.Visibility = Visibility.Visible;
                        resultTextBlock.Text       = "Unable to compile grammar.";
                    }
                    else
                    {
                        btnContinuousRecognize.IsEnabled = true;
                        resultTextBlock.Visibility       = Visibility.Collapsed;
                        speechRecognizer.ContinuousRecognitionSession.Completed       += ContinuousRecognitionSession_Completed;
                        speechRecognizer.ContinuousRecognitionSession.ResultGenerated += ContinuousRecognitionSession_ResultGenerated;
                    }
                }
                else if (textBox.IsEnabled == false)
                {
                    var grammar        = new[] { "next", "undo", "home", "help" };
                    var playConstraint = new SpeechRecognitionListConstraint(grammar);
                    speechRecognizer.Constraints.Add(playConstraint);
                    speechRecognizer.StateChanged += SpeechRecognizer_StateChanged;
                    SpeechRecognitionCompilationResult result = await speechRecognizer.CompileConstraintsAsync();

                    if (result.Status != SpeechRecognitionResultStatus.Success)
                    {
                        btnContinuousRecognize.IsEnabled = false;


                        resultTextBlock.Visibility = Visibility.Visible;
                        resultTextBlock.Text       = "Unable to compile grammar.";
                    }
                    else
                    {
                        btnContinuousRecognize.IsEnabled = true;
                        resultTextBlock.Visibility       = Visibility.Collapsed;
                        speechRecognizer.ContinuousRecognitionSession.Completed       += ContinuousRecognitionSession_Completed;
                        speechRecognizer.ContinuousRecognitionSession.ResultGenerated += ContinuousRecognitionSession_ResultGenerated;
                    }
                }
            }

            catch (Exception ex)
            {
                if ((uint)ex.HResult == HResultRecognizerNotFound)
                {
                    btnContinuousRecognize.IsEnabled = false;
                    resultTextBlock.Visibility       = Visibility.Visible;
                    resultTextBlock.Text             = "Speech Language pack for selected language not installed.";
                }
                else
                {
                    var messageDialog = new Windows.UI.Popups.MessageDialog(ex.Message, "Exception");
                    await messageDialog.ShowAsync();
                }
            }
        }
        /// <summary>
        /// Initialize Speech Recognizer and compile constraints.
        /// </summary>
        /// <param name="recognizerLanguage">Language to use for the speech recognizer</param>
        /// <returns>Awaitable task.</returns>
        private async Task InitializeRecognizer()
        {
            if (speechRecognizer != null)
            {
                // cleanup prior to re-initializing this scenario.
                speechRecognizer.ContinuousRecognitionSession.Completed -= ContinuousRecognitionSession_Completed;
                speechRecognizer.ContinuousRecognitionSession.ResultGenerated -= ContinuousRecognitionSession_ResultGenerated;
                speechRecognizer.StateChanged -= SpeechRecognizer_StateChanged;

                speechRecognizer.Dispose();
                speechRecognizer = null;
            }

            speechRecognizer = new SpeechRecognizer();

            // Provide feedback to the user about the state of the recognizer. This can be used to provide visual feedback in the form
            // of an audio indicator to help the user understand whether they're being heard.
            speechRecognizer.StateChanged += SpeechRecognizer_StateChanged;

            // It's not valid to pause a list grammar recognizer and recompile the constraints without at least one
            // constraint in place, so create a permanent constraint.
            var goHomeConstraint = new SpeechRecognitionListConstraint(new List<string>() { "Go Home" }, "gohome");

            // These speech recognition constraints will be added and removed from the recognizer.
            emailConstraint = new SpeechRecognitionListConstraint(new List<string>() { "Send email" }, "email");
            phoneConstraint = new SpeechRecognitionListConstraint(new List<string>() { "Call phone" }, "phone");

            // Add some of the constraints initially, so we don't start with an empty list of constraints.
            speechRecognizer.Constraints.Add(goHomeConstraint);
            speechRecognizer.Constraints.Add(emailConstraint);

            SpeechRecognitionCompilationResult result = await speechRecognizer.CompileConstraintsAsync();
            if (result.Status != SpeechRecognitionResultStatus.Success)
            {
                // Disable the recognition buttons.
                btnRecognize.IsEnabled = false;

                // Let the user know that the grammar didn't compile properly.
                resultTextBlock.Text = "Unable to compile grammar.";
            }

            // Handle continuous recognition events. Completed fires when various error states occur. ResultGenerated fires when
            // some recognized phrases occur, or the garbage rule is hit.
            speechRecognizer.ContinuousRecognitionSession.Completed += ContinuousRecognitionSession_Completed;
            speechRecognizer.ContinuousRecognitionSession.ResultGenerated += ContinuousRecognitionSession_ResultGenerated;
        }
Exemplo n.º 35
0
        public async void ListenVoiceCommand()
        {
            await this.dispatcher.RunAsync(CoreDispatcherPriority.High, () => {
                ResultText.Text = "Listening ...";
            });

            SpeechRecognizer commandRecognizer = new SpeechRecognizer();

            string[] commands = { "Turn left",    "Turn right",          "stop moving",
                                  "Move forward", "Move backward",
                                  "how are you",  "is every thing fine",
                                  "Lights on",    "Lights off",          "Red light",  "Blue light",
                                  "Green light" };
            var      commandConstraints = new SpeechRecognitionListConstraint(commands);

            commandRecognizer.Constraints.Add(commandConstraints);
            await commandRecognizer.CompileConstraintsAsync();

            commandRecognizer.HypothesisGenerated += SpeechRecognizer_HypothesisGenerated;

            SpeechRecognitionResult speechRecognitionResult = await commandRecognizer.RecognizeAsync();

            string robotCommand = speechRecognitionResult.Text;

            if (!robotCommand.Equals(""))
            {
                switch (robotCommand.ToLower())
                {
                case "how are you":
                    SayWord("I am fine, thank you");
                    break;

                case "turn left":
                    TurnLeft();
                    SayWord("Turning Left boss");
                    break;

                case "turn right":
                    TurnRight();
                    SayWord("Vehical turning right");
                    break;

                case "move backward":
                    SayWord("Moving backward");
                    await dispatcher.RunAsync(CoreDispatcherPriority.Normal, () =>
                    {
                        FrontButton.IsChecked   = false;
                        ReverseButton.IsChecked = true;
                    });

                    break;

                case "move forward":
                    SayWord("moving forward");
                    await dispatcher.RunAsync(CoreDispatcherPriority.Normal, () =>
                    {
                        ReverseButton.IsChecked = false;
                        FrontButton.IsChecked   = true;
                    });

                    break;

                case "stop moving":
                    Stop();
                    break;

                case "lights on":
                    LightsOn();
                    break;

                case "lights off":
                    LightsOff();
                    break;

                case "green light":
                    GreenLight();
                    break;

                case "red light":
                    RedLight();
                    break;

                case "blue light":
                    BlueLight();
                    break;

                default:
                    break;
                }
            }
            else
            {
                SayWord("No valid command specified");
            }
        }
        /// <summary>
        /// Initialize Speech Recognizer and compile constraints.
        /// </summary>
        /// <returns>Awaitable task.</returns>
        private async void InitializeRecognizer()
        {
            if (this.initializationFailed)
            {
                Debug.WriteLine($"VoiceCommand: Initialization failed detected. Skipping InitializeRecognizer.");
                return;
            }

            await this.startStopSemaphore.WaitAsync();
            try
            {
                if (this.speechRecognizer != null)
                {
                    // cleanup prior to re-initializing this scenario.
                    this.speechRecognizer.StateChanged -= this.SpeechRecognizer_StateChanged;
                    this.speechRecognizer.ContinuousRecognitionSession.Completed -= this.ContinuousRecognitionSession_Completed;
                    this.speechRecognizer.ContinuousRecognitionSession.ResultGenerated -= this.ContinuousRecognitionSession_ResultGenerated;
                    this.speechRecognizer.HypothesisGenerated -= this.SpeechRecognizer_HypothesisGenerated;

                    this.speechRecognizer.Dispose();
                    this.speechRecognizer = null;
                }

                this.speechRecognizer = new SpeechRecognizer();

                // Provide feedback to the user about the state of the recognizer. This can be used to provide visual feedback in the form
                // of an audio indicator to help the user understand whether they're being heard.
                this.speechRecognizer.StateChanged += this.SpeechRecognizer_StateChanged;

                // Apply the dictation topic constraint to optimize for dictated freeform speech.
                var listConstraint = new SpeechRecognitionListConstraint(this.keywords);
                this.speechRecognizer.Constraints.Add(listConstraint);
                SpeechRecognitionCompilationResult result = await this.speechRecognizer.CompileConstraintsAsync();
                if (result.Status == SpeechRecognitionResultStatus.Success)
                {
                    Debug.WriteLine("VoiceCommand: Grammar Compilation Success");
                }
                else
                {
                    Debug.WriteLine($"VoiceCommand: Grammar Compilation Failed: {result.Status}");
                }

                // Handle continuous recognition events. Completed fires when various error states occur. ResultGenerated fires when
                // some recognized phrases occur, or the garbage rule is hit. HypothesisGenerated fires during recognition, and
                // allows us to provide incremental feedback based on what the user's currently saying.
                this.speechRecognizer.ContinuousRecognitionSession.Completed += this.ContinuousRecognitionSession_Completed;
                this.speechRecognizer.ContinuousRecognitionSession.ResultGenerated += this.ContinuousRecognitionSession_ResultGenerated;
                this.speechRecognizer.HypothesisGenerated += this.SpeechRecognizer_HypothesisGenerated;

                await this.speechRecognizer.ContinuousRecognitionSession.StartAsync();
                this.isListening = true;
            }
            catch (Exception ex)
            {
                this.initializationFailed = true;
                Debug.WriteLine($"VoiceCommand: Exception raised during InitializeRecognizer: {ex}");
            }
            finally
            {
                this.startStopSemaphore.Release();
            }
        }
Exemplo n.º 37
0
        public static async void StartService()
        {
            try
            {
                if (isRunning)
                {
                    // If the service is already running, exits.
                    return;
                }

                isRunning = true;

                var isConnected = false;
                while (!isConnected)
                {
                    // Be sure to be connected to the Internet.
                    var profile = NetworkInformation.GetInternetConnectionProfile();
                    isConnected = profile?.GetNetworkConnectivityLevel() == NetworkConnectivityLevel.InternetAccess;
                    await Task.Delay(200);
                }

                if (Settings.Instance == null)
                {
                    await LoadSettingsAsync();
                }

                if (directLineClient == null)
                {
                    // Obtain a token using the Direct Line secret
                    var tokenResponse = await new DirectLineClient(Settings.Instance.DirectLineSecret).Tokens.GenerateTokenForNewConversationAsync();

                    // Use token to create conversation
                    directLineClient = new DirectLineClient(tokenResponse.Token);
                    conversation     = await directLineClient.Conversations.StartConversationAsync();

                    // Connect using a WebSocket.
                    webSocketClient = new MessageWebSocket();
                    webSocketClient.MessageReceived += WebSocketClient_MessageReceived;
                    await webSocketClient.ConnectAsync(new Uri(conversation.StreamUrl));
                }

                if (assistantInvokerSpeechRecognizer == null)
                {
                    // Create an instance of SpeechRecognizer.
                    assistantInvokerSpeechRecognizer = new SpeechRecognizer(new Language(Settings.Instance.Culture));
                    assistantInvokerSpeechRecognizer.Timeouts.InitialSilenceTimeout = TimeSpan.MaxValue;
                    assistantInvokerSpeechRecognizer.Timeouts.BabbleTimeout         = TimeSpan.MaxValue;

                    // Add a list constraint to the recognizer.
                    var listConstraint = new SpeechRecognitionListConstraint(new string[] { Settings.Instance.AssistantName }, "assistant");
                    assistantInvokerSpeechRecognizer.Constraints.Add(listConstraint);
                    await assistantInvokerSpeechRecognizer.CompileConstraintsAsync();
                }

                if (commandSpeechRecognizer == null)
                {
                    commandSpeechRecognizer = new SpeechRecognizer(new Language(Settings.Instance.Culture));

                    // Apply the dictation topic constraint to optimize for dictated freeform speech.
                    var dictationConstraint = new SpeechRecognitionTopicConstraint(SpeechRecognitionScenario.WebSearch, "dictation");
                    commandSpeechRecognizer.Constraints.Add(dictationConstraint);
                    await commandSpeechRecognizer.CompileConstraintsAsync();
                }

                // The assistant is ready to receive input.
                SoundPlayer.Instance.Play(Sounds.SpeechActive);

                while (isRunning)
                {
                    try
                    {
                        var assistantInvocationResult = await assistantInvokerSpeechRecognizer.RecognizeAsync();

                        if (assistantInvocationResult.Status == SpeechRecognitionResultStatus.Success && assistantInvocationResult.Confidence != SpeechRecognitionConfidence.Rejected)
                        {
                            OnStartRecognition?.Invoke(null, EventArgs.Empty);
                            SoundPlayer.Instance.Play(Sounds.Ready);

                            // Starts command recognition. It returns when the first utterance has been recognized.
                            var commandResult = await commandSpeechRecognizer.RecognizeAsync();

                            if (commandResult.Status == SpeechRecognitionResultStatus.Success && commandResult.Confidence != SpeechRecognitionConfidence.Rejected)
                            {
                                var command = commandResult.NormalizeText();
                                Debug.WriteLine(command);

                                OnCommandReceived?.Invoke(null, EventArgs.Empty);

                                // Sends the activity to the Bot. The answer will be received in the WebSocket received event handler.
                                var userMessage = new Activity
                                {
                                    From = new ChannelAccount(Settings.Instance.UserName),
                                    Text = command,
                                    Type = ActivityTypes.Message
                                };

                                await directLineClient.Conversations.PostActivityAsync(conversation.ConversationId, userMessage);
                            }
                        }
                    }
                    catch (Exception ex)
                    {
                        OnResponseReceived?.Invoke(null, new BotEventArgs(ex.Message));
                    }
                }

                // Clean up used resources.
                SoundPlayer.Instance.Play(Sounds.SpeechStopped);
            }
            catch (Exception ex)
            {
                OnResponseReceived?.Invoke(null, new BotEventArgs(ex.Message));
                SoundPlayer.Instance.Play(Sounds.SpeechStopped);
            }

            assistantInvokerSpeechRecognizer?.Dispose();
            commandSpeechRecognizer?.Dispose();
            webSocketClient?.Dispose();
            directLineClient?.Dispose();

            assistantInvokerSpeechRecognizer = null;
            commandSpeechRecognizer          = null;
            webSocketClient  = null;
            conversation     = null;
            directLineClient = null;

            isRunning = false;
        }