Ejemplo n.º 1
0
        // Initialize Speech Recognizer and start async recognition
        private async void initializeSpeechRecognizer()
        {
            // Initialize recognizer
            recognizer = new SpeechRecognizer();

            // Set event handlers
            recognizer.StateChanged += RecognizerStateChanged;
            recognizer.ContinuousRecognitionSession.ResultGenerated += RecognizerResultGenerated;

            // Load Grammar file constraint
            string      fileName           = String.Format(SRGS_FILE);
            StorageFile grammarContentFile = await Package.Current.InstalledLocation.GetFileAsync(fileName);

            SpeechRecognitionGrammarFileConstraint grammarConstraint = new SpeechRecognitionGrammarFileConstraint(grammarContentFile);

            // Add to grammar constraint
            recognizer.Constraints.Add(grammarConstraint);

            // Compile grammar
            SpeechRecognitionCompilationResult compilationResult = await recognizer.CompileConstraintsAsync();

            Debug.WriteLine("Status: " + compilationResult.Status.ToString());

            // If successful, display the recognition result.
            if (compilationResult.Status == SpeechRecognitionResultStatus.Success)
            {
                Debug.WriteLine("Result: " + compilationResult.ToString());

                await recognizer.ContinuousRecognitionSession.StartAsync();
            }
            else
            {
                Debug.WriteLine("Status: " + compilationResult.Status);
            }
        }
Ejemplo n.º 2
0
        private void VoiceRec_Loaded(object sender, RoutedEventArgs e)
        {
            LoadTask = Task.Run(async() =>
            {
                Cancellation = new CancellationTokenSource();
                SpeechRec    = new SpeechRecognizer();
                SpeechSynth  = new SpeechSynthesizer();

                //获取SRGS.grxml识别语法文件
                var GrammarFile = await StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///VoiceRec/SRGS.grxml"));

                //创建该文件的语法约束并添加至语音识别的约束集合
                var SRGSConstraint = new SpeechRecognitionGrammarFileConstraint(GrammarFile, "Control");
                SpeechRec?.Constraints.Add(SRGSConstraint);

                //要播放音乐,则必须动态从数据库取出音乐名称的数据,并添加语法约束
                var SongNames = await SQLite.GetInstance().GetAllMusicNameAsync();

                if (SongNames != null)
                {
                    //若存在音乐数据,则添加语法约束
                    var PlayConstraint = new SpeechRecognitionListConstraint(from item in SongNames select string.Format("{0}{1}", "播放", item), "ChooseMusic");
                    SpeechRec?.Constraints.Add(PlayConstraint);
                }

                //编译所有语法约束
                await SpeechRec.CompileConstraintsAsync();
            });
        }
        /// <summary>
        /// Creates a SpeechRecognizer instance and initializes the grammar.
        /// </summary>
        private async void InitializeRecognizer()
        {
            // Initialize the SRGS-compliant XML file.
            // For more information about grammars for Windows apps and how to
            // define and use SRGS-compliant grammars in your app, see
            // https://msdn.microsoft.com/en-us/library/dn596121.aspx

            StorageFile grammarContentFile = await Package.Current.InstalledLocation.GetFileAsync(@"SRGSColors.xml");

            // Initialize the SpeechRecognizer and add the grammar.
            recognizer = new SpeechRecognizer();

            // Provide feedback to the user about the state of the recognizer.
            recognizer.StateChanged += SpeechRecognizer_StateChanged;

            SpeechRecognitionGrammarFileConstraint grammarConstraint = new SpeechRecognitionGrammarFileConstraint(grammarContentFile);

            recognizer.Constraints.Add(grammarConstraint);
            SpeechRecognitionCompilationResult compilationResult = await recognizer.CompileConstraintsAsync();

            if (compilationResult.Status != SpeechRecognitionResultStatus.Success)
            {
                return;
            }

            // Set EndSilenceTimeout to give users more time to complete speaking a phrase.
            recognizer.Timeouts.EndSilenceTimeout = TimeSpan.FromSeconds(1.2);
        }
Ejemplo n.º 4
0
        /// <summary>
        /// Starts the speech recognition
        /// </summary>
        public async void Start()
        {
            if (m_IsDisposed)
            {
                throw new ObjectDisposedException(nameof(DateTimeProvider));
            }

            var hasPermission = await HasMicrophonePermission();

            if (!hasPermission)
            {
                throw new UnauthorizedAccessException("No access to microphone!");
            }

            var grammarFile = await Package.Current.InstalledLocation.GetFileAsync(GrammerFile);

            var grammarConstraint = new SpeechRecognitionGrammarFileConstraint(grammarFile);

            m_Recognizer.Constraints.Add(grammarConstraint);

            var compilationResult = await m_Recognizer.CompileConstraintsAsync();

            if (compilationResult.Status == SpeechRecognitionResultStatus.Success)
            {
                await m_Recognizer.ContinuousRecognitionSession.StartAsync();
            }
        }
Ejemplo n.º 5
0
        private async Task InitializeRecognizer()
        {
            bool permissionGained = await RequestMicrophonePermission();

            if (!permissionGained)
            {
                stat.Text = "No mic permission";
                return;
            }
            // Create an instance of SpeechRecognizer.
            speechRecognizer = new SpeechRecognizer();
            StorageFile grammarContentFile = await Package.Current.InstalledLocation.GetFileAsync(@"grammar.xml");

            SpeechRecognitionGrammarFileConstraint grammarConstraint = new SpeechRecognitionGrammarFileConstraint(grammarContentFile);

            speechRecognizer.Constraints.Add(grammarConstraint);
            SpeechRecognitionCompilationResult compilationResult = await speechRecognizer.CompileConstraintsAsync();

            if (compilationResult.Status != SpeechRecognitionResultStatus.Success)
            {
                stat.Text = "Error:" + compilationResult.Status.ToString();
                return;
            }

            // Set EndSilenceTimeout to give users more time to complete speaking a phrase.
            speechRecognizer.Timeouts.EndSilenceTimeout = TimeSpan.FromSeconds(1.2);
            speechRecognizer.StateChanged += SpeechRecognizer_StateChanged;
            speechRecognizer.ContinuousRecognitionSession.ResultGenerated += ContinuousRecognitionSession_ResultGenerated;
            await speechRecognizer.ContinuousRecognitionSession.StartAsync();
        }
Ejemplo n.º 6
0
        /// <summary>
        /// Initializes MyRecognizer and Loads Grammar from File 'Grammar\MyGrammar.xaml'
        /// </summary>
        private async void InitializeSpeechRecognizer()
        {
            // Initialize SpeechRecognizer Object
            var myRecognizer = new SpeechRecognizer();

            // Register Event Handlers
            myRecognizer.StateChanged += MyRecognizer_StateChanged;
            myRecognizer.ContinuousRecognitionSession.ResultGenerated += MyRecognizer_ResultGenerated;

            // Create Grammar File Object
            var grammarContentFile = await Package.Current.InstalledLocation.GetFileAsync(@"Grammar\MyGrammar.xml");

            // Add Grammar Constraint from Grammar File
            var grammarConstraint = new SpeechRecognitionGrammarFileConstraint(grammarContentFile);

            myRecognizer.Constraints.Add(grammarConstraint);

            // Compile Grammar
            var compilationResult = await myRecognizer.CompileConstraintsAsync();

            // Write Debug Information
            Debug.WriteLine($"Status: {compilationResult.Status}");

            // If Compilation Successful, Start Continuous Recognition Session
            if (compilationResult.Status == SpeechRecognitionResultStatus.Success)
            {
                await myRecognizer.ContinuousRecognitionSession.StartAsync();
            }
        }
Ejemplo n.º 7
0
        /// <summary>
        /// Code for voice recognition.
        /// </summary>
        //To initialize Speech Recognizer
        public async void InitSpeechRecognizer(int n)
        {
            if (n == 0)
            {
                Rec.Dispose();
                return;
            }
            Rec = new SpeechRecognizer();
            Rec.ContinuousRecognitionSession.ResultGenerated += Rec_ResultGenerated;

            StorageFile Store = await Package.Current.InstalledLocation.GetFileAsync(@"GrammarFile.xml");

            SpeechRecognitionGrammarFileConstraint constraint = new SpeechRecognitionGrammarFileConstraint(Store);

            Rec.Constraints.Add(constraint);
            SpeechRecognitionCompilationResult result = await Rec.CompileConstraintsAsync();

            if (result.Status == SpeechRecognitionResultStatus.Success)
            {
                status.Text = "Speech Recognition started.";
                tts(status.Text);
                Rec.UIOptions.AudiblePrompt = "Speech Recognition started.";
                await Rec.ContinuousRecognitionSession.StartAsync();
            }
        }
Ejemplo n.º 8
0
        public static async Task InitializeRecognizer(Language recognizerLanguage)
        {
            try
            {
                // determine the language code being used.
                StorageFile grammarContentFile = await Package.Current.InstalledLocation.GetFileAsync(GrammarPath);

                // Initialize the SpeechRecognizer and add the grammar.
                speechRecognizer = new SpeechRecognizer(recognizerLanguage);

                // RecognizeWithUIAsync allows developers to customize the prompts.
                SpeechRecognitionGrammarFileConstraint grammarConstraint = new SpeechRecognitionGrammarFileConstraint(grammarContentFile);
                speechRecognizer.Constraints.Add(grammarConstraint);
                SpeechRecognitionCompilationResult compilationResult = await speechRecognizer.CompileConstraintsAsync();

                // Check to make sure that the constraints were in a proper format and the recognizer was able to compile it.
                if (compilationResult.Status != SpeechRecognitionResultStatus.Success)
                {
                }
                else
                {
                    // Set EndSilenceTimeout to give users more time to complete speaking a phrase.
                    speechRecognizer.Timeouts.EndSilenceTimeout = TimeSpan.FromSeconds(SpeechTimespan);
                }
            }
            catch (Exception ex)
            {
                var messageDialog = new Windows.UI.Popups.MessageDialog(ex.Message, "Exception");
                await messageDialog.ShowAsync();

                throw;
            }
        }
Ejemplo n.º 9
0
        private async void InitializeSpeechRecognizer()
        {
            // Initialize SpeechRecognizer Object (Khởi tạo đối tượng SpeechRecognizer)
            MyRecognizer = new SpeechRecognizer();

            // Register Event Handlers
            MyRecognizer.StateChanged += MyRecognizer_StateChanged;
            MyRecognizer.ContinuousRecognitionSession.ResultGenerated += MyRecognizer_ResultGenerated;

            // Create Grammar File Object (Tạo đối tượng Grammar từ mygrammar.xml đã xác định từ trước)
            StorageFile GrammarContentFile = await Package.Current.InstalledLocation.GetFileAsync(@"mygrammar.xml");

            // Add Grammar Constraint from Grammar File
            SpeechRecognitionGrammarFileConstraint GrammarConstraint = new SpeechRecognitionGrammarFileConstraint(GrammarContentFile);

            MyRecognizer.Constraints.Add(GrammarConstraint);

            // Compile Grammar
            SpeechRecognitionCompilationResult CompilationResult = await MyRecognizer.CompileConstraintsAsync();

            // Write Debug Information
            Debug.WriteLine("Status: " + CompilationResult.Status.ToString());

            // If Compilation Successful, Start Continuous Recognition Session
            if (CompilationResult.Status == SpeechRecognitionResultStatus.Success)
            {
                await MyRecognizer.ContinuousRecognitionSession.StartAsync();
            }
        }
Ejemplo n.º 10
0
        private async void initializeSpeechRecognizer()
        {
            // Initialize recognizer
            recognizer = new SpeechRecognizer();
            recognizer.StateChanged += Recognizer_StateChanged;
            recognizer.ContinuousRecognitionSession.ResultGenerated += RecognizerResultGenerated;

            var grammarContentFile = await Windows.ApplicationModel.Package.Current.InstalledLocation.GetFileAsync(@"Assets\grammar.xml");

            var grammarConstraint = new SpeechRecognitionGrammarFileConstraint(grammarContentFile);

            recognizer.Constraints.Add(grammarConstraint);

            var compilationResult = await recognizer.CompileConstraintsAsync();

            if (compilationResult.Status == SpeechRecognitionResultStatus.Success)
            {
                //Debug.WriteLine("Result: " + compilationResult.ToString());

                await recognizer.ContinuousRecognitionSession.StartAsync();
            }
            else
            {
                //tbMainMessage.Foreground = new SolidColorBrush(Windows.UI.Color.FromArgb(255, 255, 0, 0));
            }
        }
Ejemplo n.º 11
0
        public static async Task <IList <ISpeechRecognitionConstraint> > GetConstraintsAsync()
        {
            var grammarList = new List <ISpeechRecognitionConstraint>();

            var commandsFileTask      = StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///VoiceControl/MoveCommands.grxml")).AsTask();
            var yesNoFileTask         = StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///VoiceControl/YesNoCommands.grxml")).AsTask();
            var pieceConfirmationTask = StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///VoiceControl/PieceConfirmation.grxml")).AsTask();
            var cancelFileTask        = StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///VoiceControl/CancelCommand.grxml")).AsTask();

            await Task.WhenAll(commandsFileTask, yesNoFileTask, pieceConfirmationTask, cancelFileTask);

            var moveGrammar              = new SpeechRecognitionGrammarFileConstraint(commandsFileTask.Result, moveCommandsGrammarTag);
            var yesNoGrammar             = new SpeechRecognitionGrammarFileConstraint(yesNoFileTask.Result, yesNoCommandsGrammarTag);
            var pieceConfirmationGrammar = new SpeechRecognitionGrammarFileConstraint(pieceConfirmationTask.Result, pieceConfirmationGrammarTag);
            var cancelGrammar            = new SpeechRecognitionGrammarFileConstraint(cancelFileTask.Result, cancelCommandGrammarTag);

            grammarList.Add(moveGrammar);
            grammarList.Add(yesNoGrammar);
            grammarList.Add(pieceConfirmationGrammar);
            grammarList.Add(cancelGrammar);

#if DEBUG
            var debugFile = await StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///VoiceControl/DebugCommands.grxml")).AsTask();

            var debugGrammar = new SpeechRecognitionGrammarFileConstraint(debugFile, debugGrammarTag);
            grammarList.Add(debugGrammar);
#endif

            return(grammarList);
        }
Ejemplo n.º 12
0
        /// <summary>
        /// Initialize Speech Recognizer and compile constraints.
        /// </summary>
        /// <param name="recognizerLanguage">Language to use for the speech recognizer</param>
        /// <returns>Awaitable task.</returns>
        private async Task InitializeRecognizer(Language recognizerLanguage)
        {
            if (speechRecognizer != null)
            {
                await UninitializeRecognizer();
            }

            try
            {
                // Initialize the SRGS-compliant XML file.
                // For more information about grammars for Windows apps and how to
                // define and use SRGS-compliant grammars in your app, see
                // https://msdn.microsoft.com/en-us/library/dn596121.aspx

                // determine the language code being used.
                var languageTag        = recognizerLanguage.LanguageTag;
                var fileName           = String.Format("Content\\SRGS\\{0}\\SRGS.xml", languageTag);
                var grammarContentFile = await Package.Current.InstalledLocation.GetFileAsync(fileName);

                // Initialize the SpeechRecognizer and add the grammar.
                speechRecognizer = new SpeechRecognizer(recognizerLanguage);

                // Provide feedback to the user about the state of the recognizer. This can be used to provide
                // visual feedback to help the user understand whether they're being heard.
                speechRecognizer.StateChanged += SpeechRecognizer_StateChanged;

                var grammarConstraint = new SpeechRecognitionGrammarFileConstraint(grammarContentFile);
                speechRecognizer.Constraints.Add(grammarConstraint);
                var compilationResult = await speechRecognizer.CompileConstraintsAsync();

                // Check to make sure that the constraints were in a proper format and the recognizer was able to compile them.
                if (compilationResult.Status != SpeechRecognitionResultStatus.Success)
                {
                    // TODO HANDLE ERROR
                }
                else
                {
                    // Set EndSilenceTimeout to give users more time to complete speaking a phrase.
                    speechRecognizer.Timeouts.EndSilenceTimeout = TimeSpan.FromSeconds(1.2);

                    // Handle continuous recognition events. Completed fires when various error states occur. ResultGenerated fires when
                    // some recognized phrases occur, or the garbage rule is hit.
                    speechRecognizer.ContinuousRecognitionSession.Completed       += ContinuousRecognitionSession_Completed;
                    speechRecognizer.ContinuousRecognitionSession.ResultGenerated += ContinuousRecognitionSession_ResultGenerated;
                }
            }
            catch (Exception ex)
            {
                if ((uint)ex.HResult == HResultRecognizerNotFound)
                {
                    // TODO HANDLE ERROR
                }
                else
                {
                    var messageDialog = new Windows.UI.Popups.MessageDialog(ex.Message, "Exception");
                    await messageDialog.ShowAsync();
                }
            }
        }
Ejemplo n.º 13
0
        private async void ConvertButton_Click(object sender, RoutedEventArgs e)
        {
            // Disable button so it cannot be tapped twice.
            ConvertButton.IsEnabled = false;

            // Stop speech synthesis while recognizing.
            if (answerElement != null)
            {
                answerElement.Stop();
            }

            var speechRecognizer = new SpeechRecognizer();

            StorageFile file;

            if (viewModel.CheckForRepeat)
            {
                file = await StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///Grammars/Repeat.grxml"));
            }
            else
            {
                file = await StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///Grammars/Convert.grxml"));
            }

            var constraint = new SpeechRecognitionGrammarFileConstraint(file);

            speechRecognizer.Constraints.Add(constraint);

            await speechRecognizer.CompileConstraintsAsync();

            speechRecognizer.UIOptions.ShowConfirmation = false;

            try
            {
                var result = await speechRecognizer.RecognizeWithUIAsync();

                if (result.Status == SpeechRecognitionResultStatus.Success)
                {
                    viewModel.Convert(result);
                }
                else
                {
                    ConvertButton.IsEnabled = true;
                }
            }
            catch (Exception)
            {
                // Catch this so we don't crash when receiving a phone call.
                ConvertButton.IsEnabled = true;
            }
        }
Ejemplo n.º 14
0
    private async Task <int> LoadGrammarFileAsync(string sFileName)
    {
        if (speechRecognizer != null)
        {
            string sUrl        = "ms-appdata:///local/" + sFileName;
            var    storageFile = await Windows.Storage.StorageFile.GetFileFromApplicationUriAsync(new Uri(sUrl));

            var grammarFile = new SpeechRecognitionGrammarFileConstraint(storageFile, sFileName);

            speechRecognizer.Constraints.Add(grammarFile);
            await speechRecognizer.CompileConstraintsAsync();
        }

        return(0);
    }
Ejemplo n.º 15
0
        private async Task <ISpeechRecognitionConstraint> LoadDynamicConstraintAsync()
        {
            // Get template file
            var templateFile = await Package.Current.InstalledLocation.GetFileAsync(GRAMMAR_FILE);

            // Create dynamic file
            var dynamicFile = await ApplicationData.Current.TemporaryFolder.CreateFileAsync("DynamicGrammar.xml", CreationCollisionOption.ReplaceExisting);

            // Copy from template to dynamic and add new rules
            using (var templateStream = await templateFile.OpenReadAsync())
            {
                // Import grammar namespace
                XNamespace xmlns = "http://www.w3.org/2001/06/grammar";

                // Load template
                XDocument dynamicDoc = XDocument.Load(templateStream.AsStreamForRead());

                // Add dynamic rules
                AddDynamicRules(xmlns, dynamicDoc.Root);

                // Write out to temp file
                using (var dynamicStream = await dynamicFile.OpenAsync(FileAccessMode.ReadWrite))
                {
                    // Customize settings to be SRGS friendly
                    XmlWriterSettings srgsSettings = new XmlWriterSettings
                    {
                        Indent              = true,
                        NewLineHandling     = NewLineHandling.Entitize,
                        NewLineOnAttributes = false
                    };

                    // Create writer for dynamic file with proper settings
                    using (var dynamicWriter = XmlWriter.Create(dynamicStream.AsStreamForWrite(), srgsSettings))
                    {
                        // Save dynamic to file
                        dynamicDoc.WriteTo(dynamicWriter);
                    }
                }
            }

            // Load constraint from dynamic file
            var constraint = new SpeechRecognitionGrammarFileConstraint(dynamicFile);

            // Return the loaded constraint
            return(constraint);
        }
Ejemplo n.º 16
0
        public static async void onStart()
        {
            var storageFile = await Windows.Storage.StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///GrammarFileConstraint.grxml"));

            var grammarFileConstraint = new SpeechRecognitionGrammarFileConstraint(storageFile, "phonems");

            speechRecognizer.Constraints.Add(grammarFileConstraint);
            SpeechRecognitionCompilationResult result = await speechRecognizer.CompileConstraintsAsync();

            Debug.WriteLine("result compilation :" + result.Status.ToString());
            speechRecognizer.StateChanged        += onStateChanged;
            speechRecognizer.HypothesisGenerated += speechRecognizer_HypothesisGenerated;
            if (speechRecognizer.State == SpeechRecognizerState.Idle)
            {
                await speechRecognizer.ContinuousRecognitionSession.StartAsync();
            }
            //System.Diagnostics.Debug.WriteLine("Speech recognition started");
        }
Ejemplo n.º 17
0
        private async Task InitializeRecognizer(Language recognizerLanguage)
        {   //inicialiación del reconocedor
            if (speechRecognizer != null)
            {
                speechRecognizer.StateChanged -= SpeechRecognizer_StateChanged;

                this.speechRecognizer.Dispose();
                this.speechRecognizer = null;
            }

            try
            {
                //cargar el fichero de la gramática
                string      fileName          = String.Format("SRGS\\SRGSComandos.xml");
                StorageFile grammaContentFile = await Package.Current.InstalledLocation.GetFileAsync(fileName);

                //inicializamos el objeto reconocedor
                speechRecognizer = new SpeechRecognizer(recognizerLanguage);

                //activa el feedback al usuario
                speechRecognizer.StateChanged += SpeechRecognizer_StateChanged;

                //compilamos la gramática
                SpeechRecognitionGrammarFileConstraint grammarConstraint = new SpeechRecognitionGrammarFileConstraint(grammaContentFile);
                speechRecognizer.Constraints.Add(grammarConstraint);
                SpeechRecognitionCompilationResult compilationResult = await speechRecognizer.CompileConstraintsAsync();

                //si no hubo éxito...
                if (compilationResult.Status != SpeechRecognitionResultStatus.Success)
                {
                    MostrarTexto(txbEstado, "Error compilando la gramática");
                }
                else
                {
                    //MostrarTexto(txbEstado, "Gramática compilada"); DEBUG: no hace falta ya
                    speechRecognizer.Timeouts.EndSilenceTimeout = TimeSpan.FromSeconds(1.2);//damos tiempo a hablar
                }
            }
            catch (Exception e)
            {
                var messageDialog = new Windows.UI.Popups.MessageDialog(e.Message, "Excepción inicializando el  reconocimiento");
                await messageDialog.ShowAsync();
            }
        }
        public async Task CompileGrammar()
        {
            var grammarContentFile = await Package.Current.InstalledLocation.GetFileAsync(this.GrammarFile);

            var grammarConstraint = new SpeechRecognitionGrammarFileConstraint(grammarContentFile);

            this.SpeechRecognizer.Constraints.Add(grammarConstraint);

            var compilationResult = await this.SpeechRecognizer.CompileConstraintsAsync();

            if (compilationResult.Status == SpeechRecognitionResultStatus.Success)
            {
                await this.SpeechRecognizer.ContinuousRecognitionSession.StartAsync();

                return;
            }

            throw new Exception($"Grammar file was not compiled successfully: {compilationResult.Status}");
        }
Ejemplo n.º 19
0
        public async Task Initialze(MotorController motorController,
                                    ServoController servoController,
                                    AutomaticDrive automaticDrive)
        {
            _motorController = motorController;
            _servoController = servoController;
            _automaticDrive  = automaticDrive;

            _speechRecognizer = new SpeechRecognizer(new Language("de-DE"));

            var grammerFile = await Package.Current.InstalledLocation.GetFileAsync(@"Audio\SpeechRecognizerGrammer.xml");

            var grammarConstraint = new SpeechRecognitionGrammarFileConstraint(grammerFile);

            _speechRecognizer.Constraints.Add(grammarConstraint);
            var compilationResult = await _speechRecognizer.CompileConstraintsAsync();

            _speechRecognizer.ContinuousRecognitionSession.ResultGenerated += RecognationResult;
            _speechRecognizer.ContinuousRecognitionSession.Completed       += ContinuousRecognitionSession_Completed;
        }
Ejemplo n.º 20
0
        private async Task InitializeRecognizer(Language recognizerLanguage)
        {
            if (speechRecognizer != null)
            {
                // cleanup prior to re-initializing this scenario.
                speechRecognizer.StateChanged -= SpeechRecognizer_StateChanged;
                speechRecognizer.ContinuousRecognitionSession.Completed       -= ContinuousRecognitionSession_Completed;
                speechRecognizer.ContinuousRecognitionSession.ResultGenerated -=
                    ContinuousRecognitionSession_ResultGenerated;
                speechRecognizer.HypothesisGenerated -= SpeechRecognizer_HypothesisGenerated;

                this.speechRecognizer.Dispose();
                this.speechRecognizer = null;
            }

            this.speechRecognizer = new SpeechRecognizer(recognizerLanguage);

            speechRecognizer.StateChanged += SpeechRecognizer_StateChanged;

            Uri uri = new Uri("ms-appx:///Strings/srgs.grxml");

            var storageFile =
                await Windows.Storage.StorageFile.GetFileFromApplicationUriAsync(uri);

            var grammarFileConstraint = new SpeechRecognitionGrammarFileConstraint(storageFile, "commands");

            speechRecognizer.Constraints.Add(grammarFileConstraint);

            SpeechRecognitionCompilationResult result = await speechRecognizer.CompileConstraintsAsync();

            if (result.Status != SpeechRecognitionResultStatus.Success)
            {
                rootPage.NotifyUser("Grammar Compilation Failed: " + result.Status.ToString(), NotifyType.ErrorMessage);
            }


            speechRecognizer.ContinuousRecognitionSession.Completed       += ContinuousRecognitionSession_Completed;
            speechRecognizer.ContinuousRecognitionSession.ResultGenerated +=
                ContinuousRecognitionSession_ResultGenerated;
            speechRecognizer.HypothesisGenerated += SpeechRecognizer_HypothesisGenerated;
        }
Ejemplo n.º 21
0
        private async void GrammarFileConstraintRecognizing_OnClick(object sender, RoutedEventArgs e)
        {
            // SRGS 文法 (SpeechRecognitionGrammarFileConstraint)

            var speechRecognizer = new Windows.Media.SpeechRecognition.SpeechRecognizer();

            var storageFile = await StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///Sample.grxml"));

            var grammarFileCOnstraint = new SpeechRecognitionGrammarFileConstraint(storageFile, "colors");

            speechRecognizer.UIOptions.ExampleText = @"Ex. 'blue background', 'green text'";
            speechRecognizer.Constraints.Add(grammarFileCOnstraint);

            await speechRecognizer.CompileConstraintsAsync();

            var result = await speechRecognizer.RecognizeWithUIAsync();

            var dialog = new MessageDialog(result.Text, "Text spoken");

            dialog.ShowAsync();
        }
Ejemplo n.º 22
0
        private async void InitializeSpeechRecognizer()
        {
            this.spcLissener          = new SpeechRecognizer();
            spcLissener.StateChanged += spcLissener_StateChanged;
            spcLissener.ContinuousRecognitionSession.ResultGenerated += spcLissener_ResultGenerated;

            StorageFile GrammarContentFile = await Package.Current.InstalledLocation.GetFileAsync(@"jdAffiGrammar.xml");

            SpeechRecognitionGrammarFileConstraint GrammarConstraint = new SpeechRecognitionGrammarFileConstraint(GrammarContentFile);

            spcLissener.Constraints.Add(GrammarConstraint);

            SpeechRecognitionCompilationResult CompilationResult = await spcLissener.CompileConstraintsAsync();

            RenderStatus("Sprach Status: " + CompilationResult.Status.ToString());

            if (CompilationResult.Status == SpeechRecognitionResultStatus.Success)
            {
                await spcLissener.ContinuousRecognitionSession.StartAsync();
            }
        }
        public async Task <bool> LoadVoiceControlModule(IVoiceController module)
        {
            //If the module is not loaded.
            if (!IsModuleLoaded(module))
            {
                //Create the grammar for the passed in module.
                SpeechRecognitionGrammarFileConstraint grammar = await CreateGrammarFromFile(module.GrammarFilePath,
                                                                                             module.VoiceControlKey);

                //Add the grammar file from the passed in module to the speech recognizer
                _recognizer.Constraints.Add(grammar);
                //Store the module into the activeModules Dictionary
                activeModules[module.VoiceControlKey] = module;
                //Set the voice control memeber variables of the module.
                module.IsVoiceControlLoaded  = true;
                module.IsVoiceControlEnabled = true;
                module.Grammar = grammar;
                return(true);
            }
            return(false);
        }
        /// <summary>
        /// Creates a SpeechRecognizer instance and initializes the grammar.
        /// </summary>
        private async void InitializeRecognizer()
        {
            // Initialize the SRGS-compliant XML file.
            // For more information about grammars for Windows apps and how to
            // define and use SRGS-compliant grammars in your app, see
            // https://msdn.microsoft.com/en-us/library/dn596121.aspx

            StorageFile grammarContentFile = await Package.Current.InstalledLocation.GetFileAsync(@"SRGSColors.xml");

            // Initialize the SpeechRecognizer and add the grammar.
            speechRecognizer = new SpeechRecognizer();

            // Provide feedback to the user about the state of the recognizer. This can be used to provide
            // visual feedback to help the user understand whether they're being heard.
            speechRecognizer.StateChanged += SpeechRecognizer_StateChanged;

            SpeechRecognitionGrammarFileConstraint grammarConstraint = new SpeechRecognitionGrammarFileConstraint(grammarContentFile);

            speechRecognizer.Constraints.Add(grammarConstraint);
            SpeechRecognitionCompilationResult compilationResult = await speechRecognizer.CompileConstraintsAsync();

            // Check to make sure that the constraints were in a proper format and the recognizer was able to compile them.
            if (compilationResult.Status != SpeechRecognitionResultStatus.Success)
            {
                // Disable the recognition button.
                btnContinuousRecognize.IsEnabled = false;

                // Let the user know that the grammar didn't compile properly.
                resultTextBlock.Text = "Unable to compile grammar.";
            }

            // Set EndSilenceTimeout to give users more time to complete speaking a phrase.
            speechRecognizer.Timeouts.EndSilenceTimeout = TimeSpan.FromSeconds(1.2);

            // Handle continuous recognition events. Completed fires when various error states occur. ResultGenerated fires when
            // some recognized phrases occur, or the garbage rule is hit.
            speechRecognizer.ContinuousRecognitionSession.Completed       += ContinuousRecognitionSession_Completed;
            speechRecognizer.ContinuousRecognitionSession.ResultGenerated += ContinuousRecognitionSession_ResultGenerated;
        }
        /// <summary>
        /// Creates a SpeechRecognizer instance and initializes the grammar.
        /// </summary>
        private async Task InitializeRecognizer()
        {
            // Initialize the SRGS-compliant XML file.
            // For more information about grammars for Windows apps and how to
            // define and use SRGS-compliant grammars in your app, see
            // https://msdn.microsoft.com/en-us/library/dn596121.aspx

            StorageFile grammarContentFile = await Package.Current.InstalledLocation.GetFileAsync(@"SRGSColors.xml");

            // Initialize the SpeechRecognizer and add the grammar.
            speechRecognizer = new SpeechRecognizer();

            // Provide feedback to the user about the state of the recognizer.
            speechRecognizer.StateChanged += SpeechRecognizer_StateChanged;

            // RecognizeWithUIAsync allows developers to customize the prompts.
            speechRecognizer.UIOptions.ExampleText = @"Try ""blue background, red border, green circle"".";

            SpeechRecognitionGrammarFileConstraint grammarConstraint = new SpeechRecognitionGrammarFileConstraint(grammarContentFile);

            speechRecognizer.Constraints.Add(grammarConstraint);
            SpeechRecognitionCompilationResult compilationResult = await speechRecognizer.CompileConstraintsAsync();

            // Check to make sure that the constraints were in a proper format and the recognizer was able to compile it.
            if (compilationResult.Status != SpeechRecognitionResultStatus.Success)
            {
                // Disable the recognition buttons.
                btnRecognizeWithUI.IsEnabled    = false;
                btnRecognizeWithoutUI.IsEnabled = false;

                // Let the user know that the grammar didn't compile properly.
                resultTextBlock.Visibility = Visibility.Visible;
                resultTextBlock.Text       = "Unable to compile grammar.";
            }

            // Set EndSilenceTimeout to give users more time to complete speaking a phrase.
            speechRecognizer.Timeouts.EndSilenceTimeout = TimeSpan.FromSeconds(1.2);
        }
Ejemplo n.º 26
0
        private async void ConversationButton_Click(object sender, RoutedEventArgs e)
        {
            // Set the question.
            this.SpeechDialogBox.Question = "What's your favorite color?";

            // Let the control ask the question out loud.
            await this.SpeechDialogBox.Speak("What is your favorite color?");

            // Reset the control when it answered (optional).
            this.SpeechDialogBox.TextChanged += this.SpeechInputBox_TextChanged;

            // Teach the control to recognize the colors of the rainbow in a random text.
            var storageFile = await StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///Assets//ColorRecognizer.xml"));
            var grammarFileConstraint = new SpeechRecognitionGrammarFileConstraint(storageFile, "colors");
            this.SpeechDialogBox.Constraints.Clear();
            this.SpeechDialogBox.Constraints.Add(grammarFileConstraint);

            // Format the spoken response.
            this.SpeechDialogBox.ResponsePattern = "What a coincidence. {0} is my favorite color too.";

            // Start listening
            this.SpeechDialogBox.StartListening();
        }
Ejemplo n.º 27
0
        private async Task <SpeechRecognitionResult> SpeechRecognizeAsync()
        {
            if (_speechRecognizer == null)
            {
                // Create an instance of SpeechRecognizer.
                _speechRecognizer = new SpeechRecognizer();

                // Get a file.
                var grammarFile = await Windows.Storage.StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///Assets/Light.grxml"));

                // Create the constraint from the file.
                var srgsConstraint = new SpeechRecognitionGrammarFileConstraint(grammarFile, "light");

                // Add the constraint.
                _speechRecognizer.Constraints.Add(srgsConstraint);

                // Compile the dictation grammar by default.
                await _speechRecognizer.CompileConstraintsAsync();
            }

            // Start recognition and return the result.
            return(await _speechRecognizer.RecognizeWithUIAsync());
        }
Ejemplo n.º 28
0
        /// <summary>
        /// Starts the speech recognition
        /// </summary>
        public async void Start()
        {
            if (m_IsDisposed)
            {
                throw new ObjectDisposedException(nameof(DateTimeProvider));
            }

            var hasPermission = await HasMicrophonePermission();
            if (!hasPermission)
            {
                throw new UnauthorizedAccessException("No access to microphone!");
            }

            var grammarFile = await Package.Current.InstalledLocation.GetFileAsync(GrammerFile);
            var grammarConstraint = new SpeechRecognitionGrammarFileConstraint(grammarFile);
            m_Recognizer.Constraints.Add(grammarConstraint);

            var compilationResult = await m_Recognizer.CompileConstraintsAsync();
            if (compilationResult.Status == SpeechRecognitionResultStatus.Success)
            {
                await m_Recognizer.ContinuousRecognitionSession.StartAsync();
            }
        }
Ejemplo n.º 29
0
        public SpeakingService()
        {
            _mediaElement      = new MediaElement();
            _speechSynthesizer = new SpeechSynthesizer();

            _recognizerLanguage = SpeechRecognizer.SystemSpeechLanguage;
            _speechRecognizer   = new SpeechRecognizer(_recognizerLanguage);

            Task.Factory.StartNew(async() =>
            {
                string languagetag      = _recognizerLanguage.LanguageTag;
                string filename         = string.Format("srgs\\{0}\\basegrammar.xml", languagetag);
                StorageFile grammarfile = await Package.Current.InstalledLocation.GetFileAsync(filename);

                var grammar = new SpeechRecognitionGrammarFileConstraint(grammarfile, "basegrammar");
                _speechRecognizer.Constraints.Add(grammar);
                var compileResult = await _speechRecognizer.CompileConstraintsAsync();

                if (compileResult.Status == SpeechRecognitionResultStatus.Success)
                {
                    await StartRecognizing();
                }
            });
        }
        public async Task CompileGrammar()
        {
            try
            {
                StorageFile grammarContentFile = await Package.Current.InstalledLocation.GetFileAsync(GrammarFile);

                SpeechRecognitionGrammarFileConstraint grammarConstraint = new SpeechRecognitionGrammarFileConstraint(grammarContentFile);

                SpeechRecognizer.Constraints.Add(grammarConstraint);

                SpeechRecognitionCompilationResult compilationResult = await SpeechRecognizer.CompileConstraintsAsync();

                if (compilationResult.Status != SpeechRecognitionResultStatus.Success)
                {
                    return;
                }

                await SpeechRecognizer.ContinuousRecognitionSession.StartAsync();
            }
            catch (Exception)
            {
                // ignored
            }
        }
Ejemplo n.º 31
0
        // Initialize Speech Recognizer and start async recognition
        private async void initializeSpeechRecognizer()
        {
            // Initialize recognizer
            recognizer = new SpeechRecognizer();
                            
            // Set event handlers
            recognizer.StateChanged += RecognizerStateChanged;
            recognizer.ContinuousRecognitionSession.ResultGenerated += RecognizerResultGenerated;

            // Load Grammer file constraint
            string fileName = String.Format(SRGS_FILE);
            StorageFile grammarContentFile = await Package.Current.InstalledLocation.GetFileAsync(fileName);

            SpeechRecognitionGrammarFileConstraint grammarConstraint = new SpeechRecognitionGrammarFileConstraint(grammarContentFile);

            // Add to grammer constraint
            recognizer.Constraints.Add(grammarConstraint);

            // Compile grammer
            SpeechRecognitionCompilationResult compilationResult = await recognizer.CompileConstraintsAsync();

            UpdateStatus("Status: " + compilationResult.Status.ToString());

            // If successful, display the recognition result.
            if (compilationResult.Status == SpeechRecognitionResultStatus.Success)
            {
                UpdateStatus("Result: " + compilationResult.ToString());

                await recognizer.ContinuousRecognitionSession.StartAsync();
            }
            else
            {
                UpdateStatus("Status: " + compilationResult.Status);
            }
        }
Ejemplo n.º 32
0
        private async Task<ISpeechRecognitionConstraint> LoadDynamicConstraintAsync()
        {
            // Get template file
            var templateFile = await Package.Current.InstalledLocation.GetFileAsync(GRAMMAR_FILE);

            // Create dynamic file
            var dynamicFile = await ApplicationData.Current.TemporaryFolder.CreateFileAsync("DynamicGrammar.xml", CreationCollisionOption.ReplaceExisting);

            // Copy from template to dynamic and add new rules
            using (var templateStream = await templateFile.OpenReadAsync())
            {
                // Import grammar namespace
                XNamespace xmlns = "http://www.w3.org/2001/06/grammar";

                // Load template
                XDocument dynamicDoc = XDocument.Load(templateStream.AsStreamForRead());

                // Add dynamic rules
                AddDynamicRules(xmlns, dynamicDoc.Root);

                // Write out to temp file
                using (var dynamicStream = await dynamicFile.OpenAsync(FileAccessMode.ReadWrite))
                {
                    // Customize settings to be SRGS friendly
                    XmlWriterSettings srgsSettings = new XmlWriterSettings
                    {
                        Indent = true,
                        NewLineHandling = NewLineHandling.Entitize,
                        NewLineOnAttributes = false
                    };

                    // Create writer for dynamic file with proper settings
                    using (var dynamicWriter = XmlWriter.Create(dynamicStream.AsStreamForWrite(), srgsSettings))
                    {
                        // Save dynamic to file
                        dynamicDoc.WriteTo(dynamicWriter);
                    }
                }
            }

            // Load constraint from dynamic file
            var constraint = new SpeechRecognitionGrammarFileConstraint(dynamicFile);

            // Return the loaded constraint
            return constraint;
        }
        /// <summary>
        /// Initialize Speech Recognizer and compile constraints.
        /// </summary>
        /// <param name="recognizerLanguage">Language to use for the speech recognizer</param>
        /// <returns>Awaitable task.</returns>
        private async Task InitializeRecognizer(Language recognizerLanguage)
        {
            if (speechRecognizer != null)
            {
                // cleanup prior to re-initializing this scenario.
                speechRecognizer.ContinuousRecognitionSession.Completed -= ContinuousRecognitionSession_Completed;
                speechRecognizer.ContinuousRecognitionSession.ResultGenerated -= ContinuousRecognitionSession_ResultGenerated;
                speechRecognizer.StateChanged -= SpeechRecognizer_StateChanged;

                this.speechRecognizer.Dispose();
                this.speechRecognizer = null;
            }

            try {
                // Initialize the SRGS-compliant XML file.
                // For more information about grammars for Windows apps and how to
                // define and use SRGS-compliant grammars in your app, see
                // https://msdn.microsoft.com/en-us/library/dn596121.aspx

                // determine the language code being used.
                string languageTag = recognizerLanguage.LanguageTag;
                string fileName = String.Format("SRGS\\{0}\\SRGSColors.xml", languageTag);
                StorageFile grammarContentFile = await Package.Current.InstalledLocation.GetFileAsync(fileName);

                resultTextBlock.Text = speechResourceMap.GetValue("SRGSHelpText", speechContext).ValueAsString;

                // Initialize the SpeechRecognizer and add the grammar.
                speechRecognizer = new SpeechRecognizer(recognizerLanguage);

                // Provide feedback to the user about the state of the recognizer. This can be used to provide
                // visual feedback to help the user understand whether they're being heard.
                speechRecognizer.StateChanged += SpeechRecognizer_StateChanged;

                SpeechRecognitionGrammarFileConstraint grammarConstraint = new SpeechRecognitionGrammarFileConstraint(grammarContentFile);
                speechRecognizer.Constraints.Add(grammarConstraint);
                SpeechRecognitionCompilationResult compilationResult = await speechRecognizer.CompileConstraintsAsync();

                // Check to make sure that the constraints were in a proper format and the recognizer was able to compile them.
                if (compilationResult.Status != SpeechRecognitionResultStatus.Success)
                {
                    // Disable the recognition button.
                    btnContinuousRecognize.IsEnabled = false;

                    // Let the user know that the grammar didn't compile properly.
                    resultTextBlock.Text = "Unable to compile grammar.";
                }
                else
                {

                    // Set EndSilenceTimeout to give users more time to complete speaking a phrase.
                    speechRecognizer.Timeouts.EndSilenceTimeout = TimeSpan.FromSeconds(1.2);

                    // Handle continuous recognition events. Completed fires when various error states occur. ResultGenerated fires when
                    // some recognized phrases occur, or the garbage rule is hit.
                    speechRecognizer.ContinuousRecognitionSession.Completed += ContinuousRecognitionSession_Completed;
                    speechRecognizer.ContinuousRecognitionSession.ResultGenerated += ContinuousRecognitionSession_ResultGenerated;


                    btnContinuousRecognize.IsEnabled = true;

                    resultTextBlock.Text = speechResourceMap.GetValue("SRGSHelpText", speechContext).ValueAsString;
                    resultTextBlock.Visibility = Visibility.Visible;
                }
            }
            catch (Exception ex)
            {
                if ((uint)ex.HResult == HResultRecognizerNotFound)
                {
                    btnContinuousRecognize.IsEnabled = false;

                    resultTextBlock.Visibility = Visibility.Visible;
                    resultTextBlock.Text = "Speech Language pack for selected language not installed.";
                }
                else
                {
                    var messageDialog = new Windows.UI.Popups.MessageDialog(ex.Message, "Exception");
                    await messageDialog.ShowAsync();
                }
            }

        }
Ejemplo n.º 34
0
        private async Task InitializeRecognizer()
        {
            bool permissionGained = await RequestMicrophonePermission();
            if (!permissionGained)
            {
                stat.Text = "No mic permission";
                return;
            }
            // Create an instance of SpeechRecognizer.
            speechRecognizer = new SpeechRecognizer();
            StorageFile grammarContentFile = await Package.Current.InstalledLocation.GetFileAsync(@"grammar.xml");
            SpeechRecognitionGrammarFileConstraint grammarConstraint = new SpeechRecognitionGrammarFileConstraint(grammarContentFile);
            speechRecognizer.Constraints.Add(grammarConstraint);
            SpeechRecognitionCompilationResult compilationResult = await speechRecognizer.CompileConstraintsAsync();

            if (compilationResult.Status != SpeechRecognitionResultStatus.Success)
            {
                stat.Text = "Error:" + compilationResult.Status.ToString();
                return;
            }

            // Set EndSilenceTimeout to give users more time to complete speaking a phrase.
            speechRecognizer.Timeouts.EndSilenceTimeout = TimeSpan.FromSeconds(1.2);
            speechRecognizer.StateChanged += SpeechRecognizer_StateChanged;
            speechRecognizer.ContinuousRecognitionSession.ResultGenerated += ContinuousRecognitionSession_ResultGenerated;
            await speechRecognizer.ContinuousRecognitionSession.StartAsync();
        }
Ejemplo n.º 35
0
        // Initialize Speech Recognizer and start async recognition
        private async Task InitializeSpeechRecognizer()
        {
            // Initialize recognizer
            _recognizer = new SpeechRecognizer();


            // Set event handlers
            _recognizer.StateChanged += Recognizer_StateChanged;
            _recognizer.ContinuousRecognitionSession.ResultGenerated += ContinuousRecognitionSession_ResultGenerated;


            // Load Grammer file constraint
            string fileName = String.Format("Grammar\\Grammar.xml");
            StorageFile grammarContentFile = await Package.Current.InstalledLocation.GetFileAsync(fileName);

            SpeechRecognitionGrammarFileConstraint grammarConstraint =
                new SpeechRecognitionGrammarFileConstraint(grammarContentFile);


            // Add to grammer constraint
            _recognizer.Constraints.Add(grammarConstraint);

            // Compile grammer
            SpeechRecognitionCompilationResult compilationResult = await _recognizer.CompileConstraintsAsync();

            Debug.WriteLine("Status: " + compilationResult.Status.ToString());

            // If successful, display the recognition result.
            if (compilationResult.Status == SpeechRecognitionResultStatus.Success)
            {


                await _recognizer.ContinuousRecognitionSession.StartAsync();
            }
            else
            {

            }
        }
Ejemplo n.º 36
0
        public async Task <RecognizedSpeech> Recognize(string constraints, bool ui)
        {
            SpeechRecognitionGrammarFileConstraint grammarFileConstraint = null;
            var result  = new RecognizedSpeech();
            var isTable = false;
            Dictionary <string, string> dictionary = null;

            if (!string.IsNullOrWhiteSpace(constraints))
            {
                isTable = constraints.StartsWith("{table:");

                if (isTable)
                {
                    var name = constraints.Substring(7);
                    var i    = name.IndexOf("}", StringComparison.CurrentCultureIgnoreCase);
                    name = name.Substring(0, i);

                    var constraintBuilder = new StringBuilder();
                    dictionary = MainPage.Instance.mainDictionary[name];

                    Debug.WriteLine("table " + name + " count=" + dictionary.Count);

                    foreach (var key in dictionary.Keys)
                    {
                        constraintBuilder.Append(key.Replace(",", " "));
                        constraintBuilder.Append(",");
                    }

                    if (constraintBuilder.Length < 2)
                    {
                        result.error = -3;
                        return(result);
                    }

                    constraints = constraintBuilder.ToString(0, constraintBuilder.Length - 1);
                    constraints = constraints.Replace(";", "-").Replace("&amp", " and ").Replace("&", " and ");
                }

                // build grammar constraints
                var grammarFileTemplate =
                    await
                    StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///GrammarConstraintTemplate.grxml"));

                const string wordTemplate = "<item>{0}</item>";
                const string itemTemplate = "<item><one-of>{0}</one-of><tag>out=\"{1}\";</tag></item>";

                var    itemBuilder = new StringBuilder();
                var    items       = constraints.Split(';');
                string keyword     = null;
                foreach (var itemPart in items)
                {
                    var item = itemPart;

                    var equals = item.IndexOf('=');
                    if (equals > -1)
                    {
                        keyword = item.Substring(0, equals);
                        item    = item.Substring(equals + 1);
                    }

                    var words       = item.Split(',');
                    var wordBuilder = new StringBuilder();
                    foreach (var word in words)
                    {
                        wordBuilder.AppendFormat(wordTemplate, word);
                    }

                    if (!string.IsNullOrWhiteSpace(keyword))
                    {
                        itemBuilder.AppendFormat(itemTemplate, wordBuilder, keyword);
                    }
                    else
                    {
                        itemBuilder.Append(wordBuilder);
                    }
                }

                var localFolder = ApplicationData.Current.LocalFolder;

                var grammarTemplate = await FileIO.ReadTextAsync(grammarFileTemplate);

                var grammarFile =
                    await
                    localFolder.CreateFileAsync("GrammarConstraint.grxml", CreationCollisionOption.ReplaceExisting);

                var finalGrammarText = string.Format(grammarTemplate, itemBuilder);
                await FileIO.WriteTextAsync(grammarFile, finalGrammarText);

                grammarFileConstraint = new SpeechRecognitionGrammarFileConstraint(grammarFile, "constraints");
            }

            if (this.isRecognizing && this.recognizer != null)
            {
                await this.recognizer.StopRecognitionAsync();
            }

            this.recognizer = new SpeechRecognizer();

            // if (recognizer != null)
            // {
            // }
            // else
            // {
            // //recognizer.Constraints?.Clear();
            // //await recognizer.CompileConstraintsAsync();
            // }
            if (grammarFileConstraint != null)
            {
                this.recognizer.Constraints.Add(grammarFileConstraint);
            }

            SpeechRecognitionResult recognize = null;

            try
            {
                this.isRecognizing = false;
                this.SpeechStatusChanged?.Invoke(this, new SpeechArgs {
                    Status = SpeechStatus.None
                });

                await this.recognizer.CompileConstraintsAsync();

                this.isRecognizing = true;
                this.SpeechStatusChanged?.Invoke(this, new SpeechArgs {
                    Status = SpeechStatus.Listening
                });

                recognize = await(ui ? this.recognizer.RecognizeWithUIAsync() : this.recognizer.RecognizeAsync());
            }
            catch (Exception e)
            {
                Debug.WriteLine(e.GetType() + ":" + e.Message);

                if (recognize != null)
                {
                    result.status = recognize.Status;
                }

                result.confidence = 5;
                return(result);
            }
            finally
            {
                this.isRecognizing = false;
                this.SpeechStatusChanged?.Invoke(
                    this,
                    new SpeechArgs {
                    Status = this.isUserStopped ? SpeechStatus.Stopped : SpeechStatus.None
                });
            }

            result.status = this.isUserStopped ? SpeechRecognitionResultStatus.UserCanceled : recognize.Status;

            if (constraints == null)
            {
                result.text = recognize.Text;
                return(result);
            }

            result.confidence = (int)recognize.Confidence;

            var text = recognize.Text.ToUpperInvariant();

            var    items2   = constraints.Split(';');
            string keyword2 = null;
            var    index    = 1;

            foreach (var itemPart in items2)
            {
                var item = itemPart;

                var equals = item.IndexOf('=');
                if (equals > -1)
                {
                    keyword2 = item.Substring(0, equals);
                    item     = item.Substring(equals + 1);
                }

                var words      = item.Split(',');
                var innerIndex = 1;
                foreach (var word in words)
                {
                    if (word.ToUpperInvariant().Equals(text))
                    {
                        result.text = keyword2 ?? word;
                        if (isTable)
                        {
                            result.action = dictionary[result.text];
                        }

                        result.index = items2.Length == 1 ? innerIndex : index;
                        return(result);
                    }

                    innerIndex++;
                }

                index++;
            }

            result.text = recognize.Text;
            return(result);
        }
        /// <summary>
        /// Initialize Speech Recognizer and compile constraints.
        /// </summary>
        /// <param name="recognizerLanguage">Language to use for the speech recognizer</param>
        /// <returns>Awaitable task.</returns>
        private async Task InitializeRecognizer(Language recognizerLanguage)
        {
            if (speechRecognizer != null)
            {
                // cleanup prior to re-initializing this scenario.
                speechRecognizer.ContinuousRecognitionSession.Completed       -= ContinuousRecognitionSession_Completed;
                speechRecognizer.ContinuousRecognitionSession.ResultGenerated -= ContinuousRecognitionSession_ResultGenerated;
                speechRecognizer.StateChanged -= SpeechRecognizer_StateChanged;

                this.speechRecognizer.Dispose();
                this.speechRecognizer = null;
            }

            try {
                // Initialize the SRGS-compliant XML file.
                // For more information about grammars for Windows apps and how to
                // define and use SRGS-compliant grammars in your app, see
                // https://msdn.microsoft.com/en-us/library/dn596121.aspx

                // determine the language code being used.
                string      languageTag        = recognizerLanguage.LanguageTag;
                string      fileName           = String.Format("SRGS\\{0}\\SRGSColors.xml", languageTag);
                StorageFile grammarContentFile = await Package.Current.InstalledLocation.GetFileAsync(fileName);

                resultTextBlock.Text = speechResourceMap.GetValue("SRGSHelpText", speechContext).ValueAsString;

                // Initialize the SpeechRecognizer and add the grammar.
                speechRecognizer = new SpeechRecognizer(recognizerLanguage);

                // Provide feedback to the user about the state of the recognizer. This can be used to provide
                // visual feedback to help the user understand whether they're being heard.
                speechRecognizer.StateChanged += SpeechRecognizer_StateChanged;

                SpeechRecognitionGrammarFileConstraint grammarConstraint = new SpeechRecognitionGrammarFileConstraint(grammarContentFile);
                speechRecognizer.Constraints.Add(grammarConstraint);
                SpeechRecognitionCompilationResult compilationResult = await speechRecognizer.CompileConstraintsAsync();

                // Check to make sure that the constraints were in a proper format and the recognizer was able to compile them.
                if (compilationResult.Status != SpeechRecognitionResultStatus.Success)
                {
                    // Disable the recognition button.
                    btnContinuousRecognize.IsEnabled = false;

                    // Let the user know that the grammar didn't compile properly.
                    resultTextBlock.Text = "Unable to compile grammar.";
                }
                else
                {
                    // Set EndSilenceTimeout to give users more time to complete speaking a phrase.
                    speechRecognizer.Timeouts.EndSilenceTimeout = TimeSpan.FromSeconds(1.2);

                    // Handle continuous recognition events. Completed fires when various error states occur. ResultGenerated fires when
                    // some recognized phrases occur, or the garbage rule is hit.
                    speechRecognizer.ContinuousRecognitionSession.Completed       += ContinuousRecognitionSession_Completed;
                    speechRecognizer.ContinuousRecognitionSession.ResultGenerated += ContinuousRecognitionSession_ResultGenerated;


                    btnContinuousRecognize.IsEnabled = true;

                    resultTextBlock.Text       = speechResourceMap.GetValue("SRGSHelpText", speechContext).ValueAsString;
                    resultTextBlock.Visibility = Visibility.Visible;
                }
            }
            catch (Exception ex)
            {
                if ((uint)ex.HResult == HResultRecognizerNotFound)
                {
                    btnContinuousRecognize.IsEnabled = false;

                    resultTextBlock.Visibility = Visibility.Visible;
                    resultTextBlock.Text       = "Speech Language pack for selected language not installed.";
                }
                else
                {
                    var messageDialog = new Windows.UI.Popups.MessageDialog(ex.Message, "Exception");
                    await messageDialog.ShowAsync();
                }
            }
        }
Ejemplo n.º 38
0
        public async Task<RecognizedSpeech> Recognize(string constraints, bool ui)
        {
            SpeechRecognitionGrammarFileConstraint grammarFileConstraint = null;
            var result = new RecognizedSpeech();
            bool isTable = false;
            Dictionary<string, string> dictionary = null;

            if (!string.IsNullOrWhiteSpace(constraints))
            {
                isTable = constraints.StartsWith("{table:");

                if (isTable)
                {
                    var name = constraints.Substring(7);
                    var i = name.IndexOf("}", StringComparison.CurrentCultureIgnoreCase);
                    name = name.Substring(0, i);

                    var constraintBuilder = new StringBuilder();
                    dictionary = MainPage.Instance.mainDictionary[name];

                    Debug.WriteLine("table "+name+" count=" + dictionary.Count);

                    foreach (var key in dictionary.Keys)
                    {
                        constraintBuilder.Append(key.Replace(","," "));
                        constraintBuilder.Append(",");
                    }

                    if (constraintBuilder.Length < 2)
                    {
                        result.error = -3;
                        return result;
                    }

                    constraints = constraintBuilder.ToString(0, constraintBuilder.Length - 1);
                    constraints = constraints.Replace(";", "-").Replace("&amp"," and ").Replace("&"," and ");
                }

                //build grammar constraints
                var grammarFileTemplate =
                    await
                        StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///GrammarConstraintTemplate.grxml"));

                const string wordTemplate = "<item>{0}</item>";
                const string itemTemplate = "<item><one-of>{0}</one-of><tag>out=\"{1}\";</tag></item>";

                var itemBuilder = new StringBuilder();
                var items = constraints.Split(';');
                string keyword = null;
                foreach (var itemPart in items)
                {
                    var item = itemPart;

                    var equals = item.IndexOf('=');
                    if (equals > -1)
                    {
                        keyword = item.Substring(0, equals);
                        item = item.Substring(equals + 1);
                    }

                    var words = item.Split(',');
                    var wordBuilder = new StringBuilder();
                    foreach (var word in words)
                    {
                        wordBuilder.AppendFormat(wordTemplate, word);
                    }

                    if (!string.IsNullOrWhiteSpace(keyword))
                    {
                        itemBuilder.AppendFormat(itemTemplate, wordBuilder, keyword);
                    }
                    else
                    {
                        itemBuilder.Append(wordBuilder);
                    }
                }

                var localFolder = ApplicationData.Current.LocalFolder;

                var grammarTemplate = await FileIO.ReadTextAsync(grammarFileTemplate);
                var grammarFile =
                    await
                        localFolder.CreateFileAsync("GrammarConstraint.grxml", CreationCollisionOption.ReplaceExisting);
                var finalGrammarText = string.Format(grammarTemplate, itemBuilder);
                await FileIO.WriteTextAsync(grammarFile, finalGrammarText);

                grammarFileConstraint = new SpeechRecognitionGrammarFileConstraint(grammarFile, "constraints");
            }

            if (isRecognizing && recognizer != null)
            {
                await recognizer.StopRecognitionAsync();
            }

            recognizer = new SpeechRecognizer();

            //if (recognizer != null)
            //{
            //}
            //else
            //{
            //    //recognizer.Constraints?.Clear();
            //    //await recognizer.CompileConstraintsAsync();
            //}

            if (grammarFileConstraint != null)
            {
                recognizer.Constraints.Add(grammarFileConstraint);
            }

            SpeechRecognitionResult recognize = null;

            try
            {
                isRecognizing = false;
                SpeechStatusChanged?.Invoke(this, new SpeechArgs { Status = SpeechStatus.None });

                await recognizer.CompileConstraintsAsync();

                isRecognizing = true;
                SpeechStatusChanged?.Invoke(this, new SpeechArgs { Status = SpeechStatus.Listening });

                recognize = await (ui ? recognizer.RecognizeWithUIAsync() : recognizer.RecognizeAsync());
            }
            catch (Exception e)
            {
                Debug.WriteLine(e.GetType() + ":" + e.Message);

                if (recognize != null)
                {
                    result.status = recognize.Status;
                }

                result.confidence = 5;
                return result;
            }
            finally
            {
                isRecognizing = false;
                SpeechStatusChanged?.Invoke(this, new SpeechArgs { Status = isUserStopped ? SpeechStatus.Stopped : SpeechStatus.None });
            }

            result.status = isUserStopped ? SpeechRecognitionResultStatus.UserCanceled : recognize.Status;

            if (constraints == null)
            {
                result.text = recognize.Text;
                return result;
            }

            result.confidence = (int) recognize.Confidence;

            var text = recognize.Text.ToUpperInvariant();

            var items2 = constraints.Split(';');
            string keyword2 = null;
            var index = 1;
            foreach (var itemPart in items2)
            {
                var item = itemPart;

                var equals = item.IndexOf('=');
                if (equals > -1)
                {
                    keyword2 = item.Substring(0, equals);
                    item = item.Substring(equals + 1);
                }

                var words = item.Split(',');
                var innerIndex = 1;
                foreach (var word in words)
                {
                    if (word.ToUpperInvariant().Equals(text))
                    {
                        result.text = keyword2 ?? word;
                        if (isTable)
                        {
                            result.action = dictionary[result.text];
                        }

                        result.index = items2.Length == 1 ? innerIndex : index;
                        return result;
                    }

                    innerIndex++;
                }

                index++;
            }

            result.text = recognize.Text;
            return result;
        }