private void sre_SpeechRecognized(object sender, SpeechRecognizedEventArgs e) { Console.WriteLine("Speech recognized: " + e.Result.Text); Serial_Interface.Input_Value = e.Result.Text; var response = Cleverbot.getChatResponse(session, e.Result.Text); Console.WriteLine("Cleverbot: " + response); Serial_Interface.Output_Value = response; string[] syllables = response.Split('a', 'e', 'i', 'o', 'u', 'y'); List <byte[]> data = new List <byte[]>(); for (int i = 0; i < syllables.Length; i++) { data[i] = new byte[4]; data[i][0] = 0; data[i][1] = 255; int tmp = syllables[i].Length; tmp = tmp * 15; data[i][2] = (byte)tmp; data[i][3] = 0; } synth.Speak(response); }
private void sre_SpeechRecognized(object sender, SpeechRecognizedEventArgs e) { Console.WriteLine("Speech recognized: " + e.Result.Text); // Serial_Interface.Input_Value = e.Result.Text; Main_UI.set_Input_Text(e.Result.Text); var response = Cleverbot.getChatResponse(session, e.Result.Text); Main_UI.set_Output_Text(response); // Serial_Interface.Output_Value = response; string[] syllables = response.Split('a', 'e', 'i', 'o', 'u', 'y'); List <byte[]> data = new List <byte[]>(); for (int i = 0; i < syllables.Length; i++) { data.Add(new byte[4]); //data[i] = new byte[4]; data[i][0] = 0; data[i][1] = 200; int tmp = syllables[i].Length; tmp = tmp * 15; data[i][2] = (byte)tmp; data[i][3] = 0; } //add one final position to make sure mouth is closed when finished byte[] f = new byte[4]; f[0] = 0; f[1] = 255; f[2] = 0; f[3] = 0; data.Add(f); //make sure link is open if (!Main_UI.link.IsOpen) { MessageBox.Show("Error Com Port not open...", "Error", MessageBoxButtons.OK); } else { //loop to write data for (int j = 0; j < data.Count; j++) { Main_UI.link.Write(data[j], 0, data[j].Length); } } synth.Speak(response); if (should_listen) { recognizer.RecognizeAsync(); } }
private void sre_SpeechRecognized(object sender, SpeechRecognizedEventArgs e) { Main_UI.set_Input_Text(e.Result.Text); var response = Cleverbot.getChatResponse(session, e.Result.Text); Main_UI.set_Output_Text(response); Console.WriteLine("Cleverbot: " + response); synth.Speak(response); if (should_listen) { recognizer.RecognizeAsync(); } }
public SpeechHandler() { session = Cleverbot.createSession(); recognizer = new SpeechRecognitionEngine(new System.Globalization.CultureInfo("en-US")); synth = new SpeechSynthesizer(); recognizer.SetInputToDefaultAudioDevice(); synth.SetOutputToDefaultAudioDevice(); DictationGrammar defaultDictationGrammar = new DictationGrammar(); defaultDictationGrammar.Name = "default dictation"; defaultDictationGrammar.Enabled = true; recognizer.LoadGrammar(defaultDictationGrammar); recognizer.SpeechRecognized += new EventHandler <SpeechRecognizedEventArgs>(sre_SpeechRecognized); }
private void sre_SpeechRecognized(object sender, SpeechRecognizedEventArgs e) { Console.WriteLine("Speech recognized: " + e.Result.Text); // Serial_Interface.Input_Value = e.Result.Text; Main_UI.set_Input_Text(e.Result.Text); var response = Cleverbot.getChatResponse(session, e.Result.Text); Main_UI.set_Output_Text(response); // Serial_Interface.Output_Value = response; string[] syllables = response.Split('a', 'e', 'i', 'o', 'u', 'y'); List <byte[]> data = new List <byte[]>(); for (int i = 0; i < syllables.Length; i++) { data[i] = new byte[4]; data[i][0] = 0; data[i][1] = 255; int tmp = syllables[i].Length; tmp = tmp * 15; data[i][2] = (byte)tmp; data[i][3] = 0; } //loop to write data for (int j = 0; j < data.Count; j++) { Main_UI.link.Write(data[j], 0, data[j].Length); } synth.Speak(response); if (should_listen) { recognizer.RecognizeAsync(); } }