Пример #1
0
        /// <summary>
        /// Handle events fired when a result is generated. This may include a garbage rule that fires when general room noise
        /// or side-talk is captured (this will have a confidence of Rejected typically, but may occasionally match a rule with
        /// low confidence).
        /// </summary>
        /// <param name="sender">The Recognition session that generated this result</param>
        /// <param name="args">Details about the recognized speech</param>
        /// <remarks>
        /// <para> This method raises the PhraseRecognized event. Keep in mind that the
        /// ContinuousRecognitionSession.ResultGenerated event is raised on an arbitrary thread
        /// from the thread pool. If a <see cref="SpeechManager"/> client has thread affinity,
        /// like in a XAML-based UI, you need to marshal the call to the client's thread.
        /// </para>
        /// <para>In a UWP app, use the <see cref="CoreDispatcher"/> to execute the call
        /// on the main UI thread.</para>
        /// </remarks>
        private void ContinuousRecognitionSession_ResultGenerated(
            SpeechContinuousRecognitionSession sender,
            SpeechContinuousRecognitionResultGeneratedEventArgs args)
        {
            if (args.Result.Status != SpeechRecognitionResultStatus.Success)
            {
#if VERBOSE_DEBUG
                Debug.WriteLine("SpeechManager: ResultGenerated: {0}", args.Result.Status);
#endif
                return;
            }

            // Unpack event arg data.
            bool   hasConstraint = args.Result.Constraint != null;
            var    confidence    = args.Result.Confidence;
            string phrase        = args.Result.Text;

            // The garbage rule doesn't have a tag associated with it, and
            // the other rules return a string matching the tag provided
            // when the grammar was compiled.
            string tag = hasConstraint ? args.Result.Constraint.Tag : "unknown";
            if (tag == "unknown")
            {
#if VERBOSE_DEBUG
                Debug.WriteLine("SpeechManager: ResultGenerated: garbage rule hit");
#endif
                return;
            }
            else
            {
#if VERBOSE_DEBUG
                string msg = String.Format("SpeechManager: ResultGenerated: {0}", phrase);
                Debug.WriteLine(msg);
#endif
            }

            if (hasConstraint && args.Result.Constraint.Type == SpeechRecognitionConstraintType.List)
            {
                // The List constraint type represents speech from
                // a compiled grammar of commands.
                CommandVerb verb = GetPhraseIntent(phrase);

                // You may decide to use per-phrase confidence levels in order to
                // tune the behavior of your grammar based on testing.
                if (confidence == SpeechRecognitionConfidence.Medium ||
                    confidence == SpeechRecognitionConfidence.High)
                {
                    Person person = null;
                    if (PhraseToPersonDictionary.ContainsKey(phrase))
                    {
                        person = PhraseToPersonDictionary[phrase];
                    }

                    // Raise the PhraseRecognized event. Clients with thread affinity,
                    // like in a XAML-based UI, need to marshal the call to the
                    // client's thread.
                    PhraseRecognizedEventArgs eventArgs = new PhraseRecognizedEventArgs(
                        person,
                        phrase,
                        verb,
                        args);
                    OnPhraseRecognized(eventArgs);
                }
            }
            else if (hasConstraint && args.Result.Constraint.Type == SpeechRecognitionConstraintType.Topic)
            {
                // The Topic constraint type represents speech from dictation.

                // Raise the PhraseRecognized event. Clients with thread affinity,
                // like in a XAML-based UI, need to marshal the call to the
                // client's thread.
                PhraseRecognizedEventArgs eventArgs = new PhraseRecognizedEventArgs(
                    null,
                    phrase,
                    CommandVerb.Dictation,
                    args);
                OnPhraseRecognized(eventArgs);
            }
        }
        /// <summary>
        /// Handles the <see cref="SpeechManager.PhraseRecognized"/> event.
        /// </summary>
        /// <param name="sender">the <see cref="SpeechManager"/> that raised the event.</param>
        /// <param name="e">The event data.</param>
        private async void speechManager_PhraseRecognized(object sender, PhraseRecognizedEventArgs e)
        {
            Person person = e.PhraseTargetPerson;
            string phrase = e.PhraseText;
            CommandVerb verb = e.Verb;

            string msg = String.Format("Heard phrase: {0}", phrase);
            Debug.WriteLine(msg);

            switch(verb)
            {
                case CommandVerb.Dictation:
                    {
                        // The phrase came from dictation, so transition speech recognition
                        // to listen for command phrases.
                        await _dispatcher.RunAsync(CoreDispatcherPriority.Normal, async () =>
                        {
                            FocusedNote.NoteBusinessObject.NoteText = phrase;
                            await _speechManager.SetRecognitionMode(SpeechRecognitionMode.CommandPhrases);
                         });

                        break;
                    }
                case CommandVerb.Create:
                    {
                        // A command for creating a note was recognized.
                        await _dispatcher.RunAsync(CoreDispatcherPriority.Normal, async () =>
                        {
                             _activeNote = CreateNote(person);
                             _activeNote.NoteText = "Dictate your note here!";
                             await _speechManager.SpeakAsync("Dictate your note", _media);
                             await _speechManager.SetRecognitionMode(SpeechRecognitionMode.Dictation);
                         });

                        break;
                    }
                case CommandVerb.Read:
                    {
                        // The command for reading a note was recognized.
                        bool focusedNoteAssigned = await FocusedNoteAssigned();
                        if (focusedNoteAssigned)
                        {
                            await _dispatcher.RunAsync(CoreDispatcherPriority.Normal, async () =>
                            {
                                await _speechManager.SpeakAsync(
                                    FocusedNote.NoteBusinessObject.NoteText,
                                    _media);
                             });
                        }

                        break;
                    }
                case CommandVerb.Edit:
                    {
                        // The command for editing a note was recognized.
                        bool focusedNoteAssigned = await FocusedNoteAssigned();
                        if (focusedNoteAssigned)
                        {
                            await _dispatcher.RunAsync(CoreDispatcherPriority.Normal, async () =>
                            {
                                 await _speechManager.SpeakAsync("Dictate your note", _media);
                                 await _speechManager.SetRecognitionMode(SpeechRecognitionMode.Dictation);
                             });
                        }

                        break;
                    }
                case CommandVerb.Delete:
                    {
                        // The command for deleting a note was recognized.
                        bool focusedNoteAssigned = await FocusedNoteAssigned();
                        if (focusedNoteAssigned)
                        {
                            await _dispatcher.RunAsync(CoreDispatcherPriority.Normal, async () =>
                            {
                                FocusedNote.OnDeleteNoteEvent();
                                 await _speechManager.SpeakAsync("Note deleted", _media);
                             });
                        }

                        break;
                    }
                case CommandVerb.Show:
                    {
                        Debug.WriteLine("SpeechManager.PhraseRecognized handler: Show TBD");
                        break;
                    }
                case CommandVerb.Help:
                    {
                        // A command for spoken help was recognized.
                        await _dispatcher.RunAsync(CoreDispatcherPriority.Normal, async () =>
                        {
                            await _speechManager.SpeakAsync(_helpString, _media);
                        });
                        
                        break;
                    }
                default:
                    {
                        Debug.WriteLine("SpeechManager.PhraseRecognized handler: Couldn't determine phrase intent");
                        break;
                    }
            }
        }
Пример #3
0
 protected virtual void OnPhraseRecognized(PhraseRecognizedEventArgs e)
 {
     PhraseRecognized?.Invoke(this, e);
 }
Пример #4
0
        /// <summary>
        /// Handles the <see cref="SpeechManager.PhraseRecognized"/> event.
        /// </summary>
        /// <param name="sender">the <see cref="SpeechManager"/> that raised the event.</param>
        /// <param name="e">The event data.</param>
        private async void speechManager_PhraseRecognized(object sender, PhraseRecognizedEventArgs e)
        {
            Person      person = e.PhraseTargetPerson;
            string      phrase = e.PhraseText;
            CommandVerb verb   = e.Verb;

            string msg = String.Format("Heard phrase: {0}", phrase);

            Debug.WriteLine(msg);

            switch (verb)
            {
            case CommandVerb.Dictation:
            {
                // The phrase came from dictation, so transition speech recognition
                // to listen for command phrases.
                await _dispatcher.RunAsync(CoreDispatcherPriority.Normal, async() =>
                    {
                        FocusedNote.NoteBusinessObject.NoteText = phrase;
                        await _speechManager.SetRecognitionMode(SpeechRecognitionMode.CommandPhrases);
                    });

                break;
            }

            case CommandVerb.Create:
            {
                // A command for creating a note was recognized.
                await _dispatcher.RunAsync(CoreDispatcherPriority.Normal, async() =>
                    {
                        _activeNote          = CreateNote(person);
                        _activeNote.NoteText = "Dictate your note here!";
                        await _speechManager.SpeakAsync("Dictate your note", _media);
                        await _speechManager.SetRecognitionMode(SpeechRecognitionMode.Dictation);
                    });

                break;
            }

            case CommandVerb.Read:
            {
                // The command for reading a note was recognized.
                bool focusedNoteAssigned = await FocusedNoteAssigned();

                if (focusedNoteAssigned)
                {
                    await _dispatcher.RunAsync(CoreDispatcherPriority.Normal, async() =>
                        {
                            await _speechManager.SpeakAsync(
                                FocusedNote.NoteBusinessObject.NoteText,
                                _media);
                        });
                }

                break;
            }

            case CommandVerb.Edit:
            {
                // The command for editing a note was recognized.
                bool focusedNoteAssigned = await FocusedNoteAssigned();

                if (focusedNoteAssigned)
                {
                    await _dispatcher.RunAsync(CoreDispatcherPriority.Normal, async() =>
                        {
                            await _speechManager.SpeakAsync("Dictate your note", _media);
                            await _speechManager.SetRecognitionMode(SpeechRecognitionMode.Dictation);
                        });
                }

                break;
            }

            case CommandVerb.Delete:
            {
                // The command for deleting a note was recognized.
                bool focusedNoteAssigned = await FocusedNoteAssigned();

                if (focusedNoteAssigned)
                {
                    await _dispatcher.RunAsync(CoreDispatcherPriority.Normal, async() =>
                        {
                            FocusedNote.OnDeleteNoteEvent();
                            await _speechManager.SpeakAsync("Note deleted", _media);
                        });
                }

                break;
            }

            case CommandVerb.Show:
            {
                Debug.WriteLine("SpeechManager.PhraseRecognized handler: Show TBD");
                break;
            }

            case CommandVerb.Help:
            {
                // A command for spoken help was recognized.
                await _dispatcher.RunAsync(CoreDispatcherPriority.Normal, async() =>
                    {
                        await _speechManager.SpeakAsync(_helpString, _media);
                    });

                break;
            }

            default:
            {
                Debug.WriteLine("SpeechManager.PhraseRecognized handler: Couldn't determine phrase intent");
                break;
            }
            }
        }