private void Start() { Advertise("VoicePub", "/hololens/audio/user_transcript", 1, out pub); voicebox = gameObject.GetComponent <TextToSpeech>(); // Activation phrase for dictation Keywords.Add("Hello", () => { ros.std_msgs.String msg = new ros.std_msgs.String("Hello!"); if (pub != null) { pub.SendMessage(msg); } voicebox.StartSpeaking("Hello"); }); Keywords.Add("record this", () => { PhraseRecognitionSystem.Shutdown(); StartBeep.Play(); dictationRecognizer.Start(); }); dictationRecognizer = new DictationRecognizer(); dictationRecognizer.DictationComplete += DictationComplete; dictationRecognizer.DictationError += DictationError; dictationRecognizer.DictationHypothesis += DictationHypothesis; dictationRecognizer.DictationResult += DictationResult; keywordRecognizer = new KeywordRecognizer(Keywords.Keys.ToArray()); keywordRecognizer.OnPhraseRecognized += KeywordRecognizer_OnPhraseRecognized; keywordRecognizer.Start(); }
public void keywordRestart() { dictationRecognizer.Stop(); dictationRecognizer.Dispose(); PhraseRecognitionSystem.Restart(); keywordRecognizer.Start(); }
public void DictationRecognizer_DictationComplete(DictationCompletionCause cause) { //Shuts off dictator and restarts keyword recognizer Debug.Log("Dictation Timeout"); D_Recognizer.Dispose(); PhraseRecognitionSystem.Restart(); }
private IEnumerator _toggleDictation(bool state) { if (state) { keywordRecognizer.Stop(); PhraseRecognitionSystem.Shutdown(); while (PhraseRecognitionSystem.Status == SpeechSystemStatus.Running) { yield return(null); } dictationRecognizer.Start(); } else { dictationRecognizer.Stop(); while (dictationRecognizer.Status == SpeechSystemStatus.Running) { yield return(null); } PhraseRecognitionSystem.Restart(); keywordRecognizer.Start(); } }
/// <summary> /// Ends the recording session. /// </summary> public static IEnumerator StopRecording() { #if UNITY_EDITOR || UNITY_WSA if (!IsListening || isTransitioning) { Debug.LogWarning("Unable to stop recording"); yield break; } IsListening = false; isTransitioning = true; Microphone.End(DeviceName); if (dictationRecognizer.Status == SpeechSystemStatus.Running) { dictationRecognizer.Stop(); } while (dictationRecognizer.Status == SpeechSystemStatus.Running) { yield return(null); } PhraseRecognitionSystem.Restart(); isTransitioning = false; #else return(null); #endif }
public void StartDictation() { if (PhraseRecognitionSystem.Status != SpeechSystemStatus.Stopped) { PhraseRecognitionSystem.Shutdown(); } m_DictationRecognizer = new DictationRecognizer(); m_DictationRecognizer.DictationResult += (string text, ConfidenceLevel confidence) => { m_Recognitions.text += text + "\n"; }; m_DictationRecognizer.DictationHypothesis += ((string text) => { m_Hypotheses.text += text + "\n"; }); m_DictationRecognizer.DictationComplete += ((DictationCompletionCause completionCause) => { if (completionCause != DictationCompletionCause.Complete) { Debug.LogErrorFormat("Dictation completed unsuccessfully: {0}.", completionCause); } }); m_DictationRecognizer.DictationError += ((string error, int hresult) => { Debug.LogErrorFormat("Dictation error: {0}; HResult = {1}.", error, hresult); }); m_DictationRecognizer.Start(); m_Recognitions.text = ""; m_Hypotheses.text = ""; enableUI("StartDictation"); }
public void StartGrammer() { if (PhraseRecognitionSystem.Status != SpeechSystemStatus.Stopped) { PhraseRecognitionSystem.Shutdown(); } try { m_GrammerRecognizer = new GrammarRecognizer(Application.streamingAssetsPath + "/SRGS/AmexGrammer.xml", ConfidenceLevel.Low); } catch (Exception ex) { Debug.Log(ex.Message); } m_GrammerRecognizer.OnPhraseRecognized += ((PhraseRecognizedEventArgs args) => { Debug.Log("Phrase recognized.."); StringBuilder builder = new StringBuilder(); builder.AppendFormat("{0} ({1}){2}", args.text, args.confidence, Environment.NewLine); m_Recognitions.text += builder.ToString(); }); m_Recognitions.text = ""; m_GrammerRecognizer.Start(); m_Hypotheses.text = m_GrammerRecognizer.IsRunning? "True": "False"; m_Hypotheses.text += "\n" + m_GrammerRecognizer.GrammarFilePath; enableUI("StartGrammer"); }
/// <summary> /// Ends the recording session. /// </summary> public static async Task StopRecordingAsync() { #if UNITY_STANDALONE_WIN || UNITY_WSA || UNITY_EDITOR_WIN if (!IsListening || isTransitioning) { Debug.LogWarning("Unable to stop recording"); return; } IsListening = false; isTransitioning = true; if (hasListener) { InputSystem.PopModalInputHandler(); hasListener = false; } Microphone.End(DeviceName); if (dictationRecognizer.Status == SpeechSystemStatus.Running) { dictationRecognizer.Stop(); } await new WaitUntil(() => PhraseRecognitionSystem.Status != SpeechSystemStatus.Running); PhraseRecognitionSystem.Restart(); await new WaitUntil(() => PhraseRecognitionSystem.Status == SpeechSystemStatus.Running); isTransitioning = false; #endif // UNITY_STANDALONE_WIN || UNITY_WSA || UNITY_EDITOR_WIN }
/// <inheritdoc /> public async Task StartRecordingAsync(GameObject listener = null, float initialSilenceTimeout = 5f, float autoSilenceTimeout = 20f, int recordingTime = 10, string micDeviceName = "") { #if UNITY_STANDALONE_WIN || UNITY_WSA || UNITY_EDITOR_WIN IMixedRealityInputSystem inputSystem = Service as IMixedRealityInputSystem; if (IsListening || isTransitioning || inputSystem == null || !Application.isPlaying) { Debug.LogWarning("Unable to start recording"); return; } if (dictationRecognizer == null && InputSystemProfile.SpeechCommandsProfile.SpeechRecognizerStartBehavior == AutoStartBehavior.ManualStart) { InitializeDictationRecognizer(); } hasFailed = false; IsListening = true; isTransitioning = true; if (listener != null) { hasListener = true; inputSystem.PushModalInputHandler(listener); } if (PhraseRecognitionSystem.Status == SpeechSystemStatus.Running) { PhraseRecognitionSystem.Shutdown(); } await waitUntilPhraseRecognitionSystemHasStopped; Debug.Assert(PhraseRecognitionSystem.Status == SpeechSystemStatus.Stopped); // Query the maximum frequency of the default microphone. int minSamplingRate; // Not used. deviceName = micDeviceName; Microphone.GetDeviceCaps(deviceName, out minSamplingRate, out samplingRate); dictationRecognizer.InitialSilenceTimeoutSeconds = initialSilenceTimeout; dictationRecognizer.AutoSilenceTimeoutSeconds = autoSilenceTimeout; dictationRecognizer.Start(); await waitUntilDictationRecognizerHasStarted; Debug.Assert(dictationRecognizer.Status == SpeechSystemStatus.Running); if (dictationRecognizer.Status == SpeechSystemStatus.Failed) { inputSystem.RaiseDictationError(inputSource, "Dictation recognizer failed to start!"); return; } // Start recording from the microphone. dictationAudioClip = Microphone.Start(deviceName, false, recordingTime, samplingRate); textSoFar = new StringBuilder(); isTransitioning = false; #else await Task.CompletedTask; #endif }
private void Awake() { dictationRecognizer = new DictationRecognizer(); dictationRecognizer.AutoSilenceTimeoutSeconds = TIMEOUT; // 3.a: Register for dictationRecognizer.DictationHypothesis and implement DictationHypothesis below // This event is fired while the user is talking. As the recognizer listens, it provides text of what it's heard so far. dictationRecognizer.DictationHypothesis += DictationRecognizer_DictationHypothesis; // 3.a: Register for dictationRecognizer.DictationComplete and implement DictationComplete below // This event is fired when the recognizer stops, whether from Stop() being called, a timeout occurring, or some other error. dictationRecognizer.DictationComplete += DictationRecognizer_DictationComplete; dictationRecognizer = new DictationRecognizer(); dictationRecognizer.AutoSilenceTimeoutSeconds = TIMEOUT; // 3.a: Register for dictationRecognizer.DictationHypothesis and implement DictationHypothesis below // This event is fired while the user is talking. As the recognizer listens, it provides text of what it's heard so far. dictationRecognizer.DictationHypothesis += DictationRecognizer_DictationHypothesis; // 3.a: Register for dictationRecognizer.DictationComplete and implement DictationComplete below // This event is fired when the recognizer stops, whether from Stop() being called, a timeout occurring, or some other error. dictationRecognizer.DictationComplete += DictationRecognizer_DictationComplete; PhraseRecognitionSystem.Shutdown(); dictationRecognizer.Start(); // Query the maximum frequency of the default microphone. Use 'unused' to ignore the minimum frequency. int unused; Microphone.GetDeviceCaps(deviceName, out unused, out samplingRate); Microphone.Start(deviceName, false, messageLength, samplingRate); }
/// <summary> /// Ends the recording session. /// </summary> private IEnumerator StopListeningInternal() { #if UNITY_WSA || UNITY_STANDALONE_WIN if (!isListening || isTransitioning) { LogWarn("Unable to stop recording"); yield break; } isListening = false; isTransitioning = true; LogInfo("Stopping microphone"); Microphone.End(deviceName); LogInfo("Stopping dictation"); if (dictationRecognizer.Status == SpeechSystemStatus.Running) { dictationRecognizer.Stop(); } LogInfo("Waiting for speech to stop"); while (dictationRecognizer.Status == SpeechSystemStatus.Running) { yield return(null); } PhraseRecognitionSystem.Restart(); isTransitioning = false; LogInfo("Stopped listening"); #else return(null); #endif }
/// <summary> ############################################################ /// --------------------- Dictation Methods below ---------------------- /// </summary> ########################################################### public void noteTaking() { // stop keyword recognizer to prevent dictation recognition conflict PhraseRecognitionSystem.Shutdown(); Debug.Log("Shutting down Phrase Recognition"); dictationRecognizer = new DictationRecognizer(); dictationRecognizer.InitialSilenceTimeoutSeconds = 6f; dictationRecognizer.AutoSilenceTimeoutSeconds = 6f; dictationRecognizer.DictationResult += dictationRecognizer_DictationResult; dictationRecognizer.DictationHypothesis += dictationRecognizer_DictationHypothesis; dictationRecognizer.DictationComplete += dictationRecognizer_DictationComplete; dictationRecognizer.DictationError += dictationRecognizer_DictationError; // Used for debugging to show dictation parameters has been activated. // So, dictation can be used in App. Debug.Log("Initiliazed Dictation Recognizer"); // Start dictation recogntion dictationRecognizer.Start(); // Change bool to true for dictation control IsRunning = true; checkDictationOn(); // Used for debugging to show dictation recognizer has started. Debug.Log("Dictation started"); }
/* Dictation complete event handler * Converts the user input array to a single string. If no user input was recognized, * uses TTS to tell the user that no input was recognized. Otherwise, tells the user * what was heard and tries to convert it to a location. If conversion was successful * then the user will be guided to the route, otherwise use TTS to inform the user that * a route could not be found. Restarts the keyword recognizer whether a route was found * or not incase the user wants to change their location. */ public void DictationRecognizer_DictationComplete(DictationCompletionCause cause) { userInputLength = 0; Debug.Log("SI: Dictation complete, stopped listening"); userInputStr = string.Join(" ", userInputArr).TrimEnd(); Debug.Log("SI: You said " + userInputStr); if (emptyInput(userInputStr)) { Say("Sorry, I did not catch that. Please try again."); } else { string StrToSend = "Your chosen destination is " + userInputStr; Say(StrToSend); try { router.PlaceNameToCoords(userInputStr); } catch { Say("Sorry, I cannot find a route to " + userInputStr + ", please choose another destination."); } } for (int i = 0; i < userInputArr.Length; i++) { userInputArr[i] = null; } PhraseRecognitionSystem.Restart(); }
public void StopRecording(DictationRecognizer.DictationResultDelegate dictationResultDelegate) { Microphone.End(deviceName); dictationRecognizer.Stop(); dictationRecognizer.DictationResult -= dictationResultDelegate; PhraseRecognitionSystem.Restart(); }
private void OnBotResponse(object sender, Assets.BotDirectLine.BotResponseEventArgs e) { Debug.Log($"OnBotResponse: {e.ToString()}"); switch (e.EventType) { case EventTypes.ConversationStarted: if (!string.IsNullOrWhiteSpace(e.ConversationId)) { // Store the ID textToSpeech.SpeakSsml("<?xml version=\"1.0\"?><speak speed=\"80%\" version=\"1.0\" xmlns=\"http://www.w3.org/2001/10/synthesis\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://www.w3.org/2001/10/synthesis http://www.w3.org/TR/speech-synthesis/synthesis.xsd\" xml:lang=\"en-US\">Bot connection established!</speak>"); conversationId = e.ConversationId; } else { textToSpeech.SpeakSsml("<?xml version=\"1.0\"?><speak speed=\"80%\" version=\"1.0\" xmlns=\"http://www.w3.org/2001/10/synthesis\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://www.w3.org/2001/10/synthesis http://www.w3.org/TR/speech-synthesis/synthesis.xsd\" xml:lang=\"en-US\">Error while connecting to Bot!</speak>"); } break; case EventTypes.MessageSent: if (!string.IsNullOrEmpty(conversationId)) { // Get the bot's response(s) BotDirectLineManager.Instance.GetMessagesAsync(conversationId).Wait(); } break; case EventTypes.MessageReceived: // Handle the received message(s) if (!string.IsNullOrWhiteSpace(conversationId)) { var messageActivity = e.Messages.LastOrDefault(); Debug.Log(messageActivity.Text); textToSpeech.SpeakSsml("<?xml version=\"1.0\"?><speak speed=\"80%\" version=\"1.0\" xmlns=\"http://www.w3.org/2001/10/synthesis\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://www.w3.org/2001/10/synthesis http://www.w3.org/TR/speech-synthesis/synthesis.xsd\" xml:lang=\"en-US\"> " + messageActivity.Text + "</speak>"); dictationRecognizer.Stop(); dictationRecognizer.DictationResult -= DictationRecognizer_DictationResult; dictationRecognizer.DictationHypothesis -= DictationRecognizer_DictationHypothesis; dictationRecognizer.DictationComplete -= DictationRecognizer_DictationComplete; dictationRecognizer.DictationError -= DictationRecognizer_DictationError; dictationRecognizer.Dispose(); //do //{ //} while (dictationRecognizer.Status != SpeechSystemStatus.Stopped); PhraseRecognitionSystem.Restart(); } break; case EventTypes.Error: // Handle the error break; } }
public void DictationRecognizer_DictationError(string error, int hresult) { ShowOnBillboard(WrapText(error, 40)); D_Recognizer.Dispose(); // For some reason phrase recognition is broken // after dictation error unless we restart it. PhraseRecognitionSystem.Restart(); }
public void keywordRestart() { dictationRecognizer.Stop(); dictationRecognizer.Dispose(); IsRunning = false; checkDictationOn(); PhraseRecognitionSystem.Restart(); keywordRecognizer.Start(); }
private void RestartPhraseRecognitionSystem() { if (PhraseRecognitionSystem.Status == SpeechSystemStatus.Running) { return; } PhraseRecognitionSystem.Restart(); }
private void ShutdownPhraseRecognitionSystem() { if (PhraseRecognitionSystem.Status != SpeechSystemStatus.Running) { return; } PhraseRecognitionSystem.Shutdown(); }
private void DictationRecognizer_DictationResult(string text, ConfidenceLevel confidence) { GameObject holoMenu = null; Debug.Log("String heard: " + text); dictationRecognizer.Stop(); // Cases to set holoMenu to the correct menu if (text.ToLower().Equals("main") || text.ToLower().Equals("main menu") || text.ToLower().Equals("maine") || text.ToLower().Equals("mean")) { holoMenu = mc.m_mainMenu; } else if (text.ToLower().Equals("biometrics")) { holoMenu = mc.m_biometricsMenu; } else if (text.ToLower().Equals("help")) { holoMenu = mc.m_helpMenu; } else if (text.ToLower().Equals("music")) { holoMenu = mc.m_musicMenu; } else if (text.ToLower().Equals("settings")) { holoMenu = mc.m_settingsMenu; } else if (text.ToLower().Equals("brightness")) { holoMenu = mc.m_brightnessMenu; } else if (text.ToLower().Equals("volume")) { holoMenu = mc.m_volumeMenu; } else if (text.ToLower().Equals("procedure")) { holoMenu = mc.m_blankTaskMenu; } else { Debug.Log("Cmd not recognized."); // This does not fail eloquently } // call function in MenuController to retrieve the specific menu if (holoMenu != null) { mc.Retrieve(holoMenu); } dictationRecognizer.Dispose(); PhraseRecognitionSystem.Restart(); }
void OnEnable() { wordRecog = new DictationRecognizer(ConfidenceLevel.Low, DictationTopicConstraint.Dictation); wordRecog.DictationResult += WordRecog_DictationResult; PhraseRecognitionSystem.Shutdown(); wordRecog.Start(); Debug.Log("Enabled"); textMeshPro.GetComponent <TextMeshPro>().text = "Listening..."; }
//where and when do we call this public void stopKeywordRecognizer() { PhraseRecognitionSystem.Shutdown(); if (keywordRecognizer != null) { keywordRecognizer.Stop(); keywordRecognizer.Dispose(); } }
void IDictationHandler.OnDictationError(DictationEventData eventData) { isRecording = false; speechToTextOutput = eventData.DictationResult; Debug.LogError(eventData.DictationResult); StartCoroutine(DictationInputManager.StopRecording()); PhraseRecognitionSystem.Restart(); }
public void noteTaking() { // stop keyword recognizer to prevent dictation recognition conflict PhraseRecognitionSystem.Shutdown(); Debug.Log("Shutting down Phrase Recognition"); dictationRecognizer = new DictationRecognizer(); dictationRecognizer.InitialSilenceTimeoutSeconds = 6f; dictationRecognizer.AutoSilenceTimeoutSeconds = 6f; dictationRecognizer.DictationResult += (string text, ConfidenceLevel confidence) => { // Displays what the App belives was spoken and displays it in console Debug.LogFormat("Dictation result: {0}", text); // Displays what was said to the UI dictationDisplay.text = text; }; dictationRecognizer.DictationHypothesis += (text) => { // Displays what the App processed was spoken Debug.LogFormat("Dictation hypothesis: {0}", text); }; dictationRecognizer.DictationComplete += (completionCause) => { if (completionCause != DictationCompletionCause.Complete) { Debug.LogErrorFormat("Dictation completed unsuccessfully: {0}.", completionCause); } Debug.Log("Dictation complete"); keywordRestart(); }; dictationRecognizer.DictationError += (error, hresult) => { Debug.LogErrorFormat("Dictation error: {0}; HResult = {1}.", error, hresult); }; // Used for debugging to show dictation parameters has been activated. // So, dictation can be used in App. Debug.Log("Initiliazed Dictation Recognizer"); // Start dictation recogntion dictationRecognizer.Start(); // Change bool to true for dictation control IsRunning = true; // Used for debugging to show dictation recognizer has started. Debug.Log("Dictation started"); }
void destroyDictationRecognizer() { if (mc.m_overlapMessage.gameObject.activeSelf) { mc.toggleDisplay(mc.m_overlapMessage); } dictationRecognizer.Stop(); dictationRecognizer.Dispose(); PhraseRecognitionSystem.Restart(); }
private IEnumerator FinishStopRecording() { while (dictationRecognizer.Status == SpeechSystemStatus.Running) { yield return(null); } PhraseRecognitionSystem.Restart(); isTransitioning = false; }
//method to restart game private void ResetGame() { //set movement state false startgame.SetMovementState(false); //Shut down phraze recognition system, as cannot have both dictation and phraze active at same time - https://docs.microsoft.com/en-us/windows/mixed-reality/voice-input-in-unity PhraseRecognitionSystem.Shutdown(); GameObject.Find("Kinect Manager").GetComponent <KManager>()._kinect.Close(); //reload scene SceneManager.LoadScene(0); }
public AudioClip StartRecording(DictationRecognizer.DictationResultDelegate dictationResultDelegate) { if (PhraseRecognitionSystem.Status == SpeechSystemStatus.Running) { PhraseRecognitionSystem.Shutdown(); } dictationRecognizer.DictationResult += dictationResultDelegate; dictationRecognizer.Start(); return(Microphone.Start(deviceName, false, RECORD_MAX_TIME_IN_SECONDS, samplingRate)); }
/// <summary> /// Turns on the dictation recognizer and begins recording audio from the default microphone. /// </summary> /// <returns>The audio clip recorded from the microphone.</returns> public AudioClip StartRecording() { PhraseRecognitionSystem.Shutdown(); dictationRecognizer.Start(); dictationDisplay.text = "Dictation is starting. It may take time to display your text the first time, but begin speaking now..."; hasRecordingStarted = true; return(Microphone.Start(deviceName, true, messageLength, samplingRate)); }
/// <summary> /// This event is fired when the recognizer stops, whether from Stop() being called, a timeout occurring, or some other error. /// Typically, this will simply return "Complete". In this case, we check to see if the recognizer timed out. /// </summary> /// <param name="cause">An enumerated reason for the session completing.</param> private void DictationRecognizer_DictationComplete(DictationCompletionCause cause) { Debug.Log("MICROPHONE COMPLETE: " + cause); if (cause == DictationCompletionCause.TimeoutExceeded) { dictationRecognizer.Start(); return; } ; PhraseRecognitionSystem.Restart(); }