public static void Main(string[] args) { // Obtain a KinectSensor if any are available KinectSensor sensor = (from sensorToCheck in KinectSensor.KinectSensors where sensorToCheck.Status == KinectStatus.Connected select sensorToCheck).FirstOrDefault(); if (sensor == null) { Console.WriteLine( "No Kinect sensors are attached to this computer or none of the ones that are\n" + "attached are \"Connected\".\n" + "Attach the KinectSensor and restart this application.\n" + "If that doesn't work run SkeletonViewer-WPF to better understand the Status of\n" + "the Kinect sensors.\n\n" + "Press any key to continue.\n"); // Give a chance for user to see console output before it is dismissed Console.ReadKey(true); return; } sensor.Start(); // Obtain the KinectAudioSource to do audio capture KinectAudioSource source = sensor.AudioSource; source.EchoCancellationMode = EchoCancellationMode.None; // No AEC for this sample source.AutomaticGainControlEnabled = false; // Important to turn this off for speech recognition System.Speech.Recognition.RecognizerInfo ri = System.Speech.Recognition.SpeechRecognitionEngine.InstalledRecognizers().FirstOrDefault(); using (var recoEngine = new System.Speech.Recognition.SpeechRecognitionEngine(ri.Id)) { // Create the question dictation grammar. System.Speech.Recognition.DictationGrammar customDictationGrammar = new System.Speech.Recognition.DictationGrammar(); customDictationGrammar.Name = "Dictation"; customDictationGrammar.Enabled = true; // Create a SpeechRecognitionEngine object and add the grammars to it. recoEngine.LoadGrammar(customDictationGrammar); recoEngine.SpeechRecognized += (s, sargs) => Console.Write(sargs.Result.Text); using (Stream s = source.Start()) { recoEngine.SetInputToAudioStream(s, new System.Speech.AudioFormat.SpeechAudioFormatInfo(System.Speech.AudioFormat.EncodingFormat.Pcm, 16000, 16, 1, 32000, 2, null)); Console.WriteLine("Dictating. Press ENTER to stop"); recoEngine.RecognizeAsync(System.Speech.Recognition.RecognizeMode.Multiple); Console.ReadLine(); Console.WriteLine("Stopping recognizer ..."); recoEngine.RecognizeAsyncStop(); } } sensor.Stop(); }
public bool StopRecognizer() { if (_recognizerRunning) { try { _recognizer.RecognizeAsyncStop(); _recognizerRunning = false; return(true); } catch { return(false); } } else { return(true); } }
void engine_SpeechRecognized(object sender, System.Speech.Recognition.SpeechRecognizedEventArgs e) { System.Speech.Synthesis.SpeechSynthesizer reader = new System.Speech.Synthesis.SpeechSynthesizer(); System.Speech.Recognition.SpeechRecognitionEngine engine = new System.Speech.Recognition.SpeechRecognitionEngine(); try { engine = new System.Speech.Recognition.SpeechRecognitionEngine(); String message = ""; String results = e.Result.Text; if (!listening) { return; } switch (results) { case "help": reader.Speak("The available commands are restart, shutdown, status report, and players."); Logger.Log(LogType.ConsoleOutput, "The available commands are restart, shutdown, status report, and a players."); bVoice.ForeColor = System.Drawing.Color.Black; results = ""; engine.RecognizeAsyncStop(); engine.Dispose(); listening = false; break; case "restart": reader.Speak("The server is now restarting."); ShutdownParams param = new ShutdownParams(ShutdownReason.Restarting, TimeSpan.FromSeconds(5), true, true, "Restarting", Player.Console); Server.Shutdown(param, true); bVoice.ForeColor = System.Drawing.Color.Black; results = ""; engine.RecognizeAsyncStop(); engine.Dispose(); listening = false; break; case "shutdown": reader.Speak("The server is now shutting down."); Shutdown(ShutdownReason.ShuttingDown, true); bVoice.ForeColor = System.Drawing.Color.Black; results = ""; engine.RecognizeAsyncStop(); engine.Dispose(); listening = false; break; case "status report": reader.Speak("Server has been up for " + Math.Round(DateTime.UtcNow.Subtract(Server.StartTime).TotalHours, 1, MidpointRounding.AwayFromZero) + " hours."); Player.Console.ParseMessage("/sinfo", true, false); bVoice.ForeColor = System.Drawing.Color.Black; results = ""; engine.RecognizeAsyncStop(); engine.Dispose(); listening = false; break; case "players": foreach (Player p in Server.Players) { message += p.Name; } reader.Speak(message); Player.Console.ParseMessage("/players", true, false); bVoice.ForeColor = System.Drawing.Color.Black; results = ""; engine.RecognizeAsyncStop(); engine.Dispose(); listening = false; break; default: bVoice.ForeColor = System.Drawing.Color.Black; results = ""; engine.RecognizeAsyncStop(); engine.Dispose(); listening = false; break; } } catch (Exception) { //Audio Device is either missing or damaged, actual Exception is System.Speech.Internal.Synthesis.AudioException engine.RecognizeAsyncStop(); engine.Dispose(); return; } }
public void Stop() { recEngine.RecognizeAsyncStop(); }
void engine_SpeechRecognized(object sender, System.Speech.Recognition.SpeechRecognizedEventArgs e) { System.Speech.Synthesis.SpeechSynthesizer reader = new System.Speech.Synthesis.SpeechSynthesizer(); System.Speech.Recognition.SpeechRecognitionEngine engine = new System.Speech.Recognition.SpeechRecognitionEngine(); try { engine = new System.Speech.Recognition.SpeechRecognitionEngine(); String message = ""; String results = e.Result.Text; if (!listening) { return; } switch (results) { case "help": reader.Speak("The available commands are restart, shutdown, status report, and players."); Logger.Log(LogType.ConsoleOutput, "The available commands are restart, shutdown, status report, and a players."); bVoice.ForeColor = System.Drawing.Color.Black; results = ""; engine.RecognizeAsyncStop(); engine.Dispose(); listening = false; break; case "restart": reader.Speak("The server is now restarting."); ShutdownParams param = new ShutdownParams(ShutdownReason.Restarting, TimeSpan.FromSeconds(5), true, true, "Restarting", Player.Console); Server.Shutdown(param, true); bVoice.ForeColor = System.Drawing.Color.Black; results = ""; engine.RecognizeAsyncStop(); engine.Dispose(); listening = false; break; case "shutdown": reader.Speak("The server is now shutting down."); Shutdown(ShutdownReason.ShuttingDown, true); bVoice.ForeColor = System.Drawing.Color.Black; results = ""; engine.RecognizeAsyncStop(); engine.Dispose(); listening = false; break; case "status report": reader.Speak("Server has been up for " + Math.Round(DateTime.UtcNow.Subtract(Server.StartTime).TotalHours, 1, MidpointRounding.AwayFromZero) + " hours."); Player.Console.ParseMessage("/sinfo", true, false); bVoice.ForeColor = System.Drawing.Color.Black; results = ""; engine.RecognizeAsyncStop(); engine.Dispose(); listening = false; break; case "players": foreach (Player p in Server.Players) { message += p.Name; } reader.Speak(message); Player.Console.ParseMessage("/players", true, false); bVoice.ForeColor = System.Drawing.Color.Black; results = ""; engine.RecognizeAsyncStop(); engine.Dispose(); listening = false; break; default: bVoice.ForeColor = System.Drawing.Color.Black; results = ""; engine.RecognizeAsyncStop(); engine.Dispose(); listening = false; break; } } catch(Exception) { //Audio Device is either missing or damaged, actual Exception is System.Speech.Internal.Synthesis.AudioException engine.RecognizeAsyncStop(); engine.Dispose(); return; } }