static void Main(string[] args) { var keyphrasesFile = "keyphrases.txt"; if (args.Length != 0) { keyphrasesFile = args[0]; } RecognizerInfo info = null; foreach (var ri in SpeechRecognitionEngine.InstalledRecognizers()) { if (ri.Culture.TwoLetterISOLanguageName.Equals("en")) { info = ri; break; } } if (info == null) { return; } using (_recognizer = new SpeechRecognitionEngine(info)) { var keyphrases = new Choices(getChoices(keyphrasesFile)); var gb = new GrammarBuilder(keyphrases) { Culture = info.Culture }; // Create the Grammar instance. var g = new Grammar(gb) { Name = "Keyphrases" }; _recognizer.RequestRecognizerUpdate(); _recognizer.LoadGrammar(g); _recognizer.SpeechRecognized += recognizer_SpeechRecognized; _recognizer.SpeechRecognitionRejected += recognizer_SpeechNotRecognized; _recognizer.SetInputToDefaultAudioDevice(); _recognizer.RecognizeAsync(RecognizeMode.Multiple); while (true) { Console.ReadLine(); } } }
private void Gramatica() { try { sr = new Microsoft.Speech.Recognition.SpeechRecognitionEngine(ci); } catch (Exception ex) { MessageBox.Show("Erro: " + ex.Message); } var gramatica = new Microsoft.Speech.Recognition.Choices(); gramatica.Add(words); var gb = new Microsoft.Speech.Recognition.GrammarBuilder(); gb.Append(gramatica); try { var g = new Microsoft.Speech.Recognition.Grammar(gb); try { sr.RequestRecognizerUpdate(); sr.LoadGrammarAsync(g); sr.SpeechRecognized += Sr_SpeechRecognized; sr.SetInputToDefaultAudioDevice(); ss.SetOutputToDefaultAudioDevice(); sr.RecognizeAsync(Microsoft.Speech.Recognition.RecognizeMode.Multiple); } catch (Exception ex) { MessageBox.Show("Erro: " + ex.Message); } } catch (Exception ex) { MessageBox.Show("Erro: " + ex.Message); } }
void LoadSettings(string configFileName) { var settings = File.ReadAllText(configFileName); jarvisName = Regex.Match(settings, "name: \"(.+?)\"").Groups[1].Value; helloMessage = Regex.Match(settings, "hello_message: \"(.+?)\"").Groups[1].Value; threshold = double.Parse(Regex.Match(settings, "confidence_threshold: (.+?) ", RegexOptions.Compiled).Groups[1].Value.Replace('.', ',')); recognize_language = Regex.Match(settings, "recognize_language: \"(.+?)\"").Groups[1].Value; speech_language = Regex.Match(settings, "speech_language: \"(.+?)\"").Groups[1].Value; #region DaysOfWeek var daysMatch = Regex.Match(settings, "daysofweek:((\\s\"(?<DayName>.+?)\")+)"); var daysCaptures = daysMatch.Groups["DayName"].Captures; var daysList = new List <string>(); foreach (Capture match in daysCaptures) { daysList.Add(match.Value); } daysofweek = daysList.ToArray(); #endregion x_hours = Regex.Match(settings, "x_hours: \"(.+?)\"").Groups[1].Value; x_minutes = Regex.Match(settings, "x_minutes: \"(.+?)\"").Groups[1].Value; recognized = Regex.Match(settings, "recognized: \"(.+?)\"").Groups[1].Value; ignored = Regex.Match(settings, "ignored: \"(.+?)\"").Groups[1].Value; commandsList.Clear(); var commandsMatches = Regex.Matches(settings, "^\"(?<VoiceCommand>.+?)\"\\s*\"(?<CommandType>.+?)\"(?: \"(?<Option>.+?)\")*", RegexOptions.Multiline); foreach (Match m in commandsMatches) { var cmd = new Command(); cmd.VoiceCommand = m.Groups["VoiceCommand"].Value; cmd.Type = m.Groups["CommandType"].Value; cmd.Option = m.Groups["Option"].Value; commandsList.Add(cmd); } var allVoiceCommands = commandsList.Select(c => c.VoiceCommand).ToArray(); commands = new Choices(); commands.Add(allVoiceCommands); gBuilder = new GrammarBuilder(); gBuilder.Append(commands); gBuilder.Culture = new System.Globalization.CultureInfo(recognize_language); if (prev_recognize_language != recognize_language) { recEngine = new Microsoft.Speech.Recognition.SpeechRecognitionEngine(new System.Globalization.CultureInfo(recognize_language)); try { recEngine.SetInputToDefaultAudioDevice(); } catch { RecognizedText_TextBox.Text += "В системе не выбрано устройство записи звука по-умолчанию. Выберите и перезапустите программу."; return; } prev_recognize_language = recognize_language; grammar = new Grammar(gBuilder); recEngine.LoadGrammarAsync(grammar); recEngine.SpeechRecognized += RecEngine_SpeechRecognized; recEngine.SpeechHypothesized += RecEngine_SpeechHypothesized; recEngine.RecognizeAsync(RecognizeMode.Multiple); } var voice = new System.Speech.Synthesis.SpeechSynthesizer().GetInstalledVoices().Where(v => v.VoiceInfo.Name.Contains(speech_language)).ToArray()[0].VoiceInfo.Name; synth.SelectVoice(voice); try { synth.SetOutputToDefaultAudioDevice(); } catch (Exception) { RecognizedText_TextBox.Text += "В системе не выбрано устройство воспроизведения звука по-умолчанию. Выберите и перезапустите программу."; return; } synth.Volume = 100; Say(helloMessage); }
public SpeechRecognizer(TranscriptRecorderSession transcriptRecorder) { _transcriptRecorder = transcriptRecorder; _speechTranscript = new List<Microsoft.Speech.Recognition.RecognitionResult>(); _isActive = false; _isRecognizing = false; // Create a speech recognition connector _speechRecognitionConnector = new SpeechRecognitionConnector(); _currentSRLocale = ConfigurationManager.AppSettings[SpeechRecogLocaleKey]; if (String.IsNullOrEmpty(_currentSRLocale)) { NonBlockingConsole.WriteLine("No locale specified, using default locale for speech recognition: " + DefaultLocale); _currentSRLocale = DefaultLocale; } // Create speech recognition engine and start recognizing by attaching connector to engine try { _speechRecognitionEngine = new Microsoft.Speech.Recognition.SpeechRecognitionEngine(); /* System.Globalization.CultureInfo localeCultureInfo = new System.Globalization.CultureInfo(_currentSRLocale); foreach (RecognizerInfo r in Microsoft.Speech.Recognition.SpeechRecognitionEngine.InstalledRecognizers()) { if (r.Culture.Equals(localeCultureInfo)) { _speechRecognitionEngine = new Microsoft.Speech.Recognition.SpeechRecognitionEngine(r); break; } } if (_speechRecognitionEngine == null) { _speechRecognitionEngine = new SpeechRecognitionEngine(); } */ //_speechRecognitionEngine = new Microsoft.Speech.Recognition.SpeechRecognitionEngine(new System.Globalization.CultureInfo(_currentSRLocale)); } catch (Exception e) { NonBlockingConsole.WriteLine("Error: Unable to load SpeechRecognition locale: " + _currentSRLocale + ". Exception: " + e.ToString()); // Use default locale NonBlockingConsole.WriteLine("Falling back to default locale for SpeechRecognitionEngine: " + DefaultLocale); _currentSRLocale = DefaultLocale; _speechRecognitionEngine = new SpeechRecognitionEngine(); //_speechRecognitionEngine = new Microsoft.Speech.Recognition.SpeechRecognitionEngine(new System.Globalization.CultureInfo(_currentSRLocale)); } _speechRecognitionEngine.SpeechDetected += new EventHandler<Microsoft.Speech.Recognition.SpeechDetectedEventArgs>(SpeechRecognitionEngine_SpeechDetected); _speechRecognitionEngine.RecognizeCompleted += new EventHandler<Microsoft.Speech.Recognition.RecognizeCompletedEventArgs>(SpeechRecognitionEngine_RecognizeCompleted); _speechRecognitionEngine.LoadGrammarCompleted += new EventHandler<Microsoft.Speech.Recognition.LoadGrammarCompletedEventArgs>(SpeechRecognitionEngine_LoadGrammarCompleted); _grammars = new List<Microsoft.Speech.Recognition.Grammar>(); // TODO: Add default installed speech recognizer grammar // TODO: Might already be done via compiling with Recognition Settings File? // Add default locale language grammar file (if it exists) String localLanguageGrammarFilePath = Path.Combine(Environment.CurrentDirectory, @"en-US.cfgpp"); if (File.Exists(localLanguageGrammarFilePath)) { NonBlockingConsole.WriteLine("SpeechRecognizer(). Adding locale language file at path: " + localLanguageGrammarFilePath); GrammarBuilder builder = new GrammarBuilder(); builder.AppendRuleReference(localLanguageGrammarFilePath); Grammar localeLanguageGrammar = new Grammar(builder); localeLanguageGrammar.Name = "Local language grammar"; //localeLanguageGrammar.Priority = 1; _grammars.Add(localeLanguageGrammar); } string[] recognizedString = { "hello", "bye", "yes", "no", "help", "zero", "one", "two", "three", "four", "five", "six", "seven", "eight", "nine", "ten", "exit" }; Choices numberChoices = new Choices(recognizedString); Grammar basicGrammar = new Grammar(new GrammarBuilder(numberChoices)); basicGrammar.Name = "Basic Grammar"; //basicGrammar.Priority = 2; _grammars.Add(basicGrammar); LoadSpeechGrammarAsync(); }
public Jarvis() { modules = new LinkedList<IJModule>(); /*************** IJModule Instatiation Stuff ****************/ modules.AddLast(new MusicControl(preferences.mediaplayerprocess, preferences.initialvolume, preferences.volumeincrements)); if (preferences.usegooglevoice) modules.AddLast(new GoogleVoice(preferences.googleemail, preferences.googlepassword, preferences.googleaddressbook)); if (preferences.facebookrssfeed != null) modules.AddLast(new Facebook(preferences.facebookrssfeed)); if (preferences.usegooglecalendar) modules.AddLast(new GoogleCalendar(preferences.googleemail, preferences.googlepassword, preferences.googlecalendaralerttime)); alertThread = new Thread(new ThreadStart(alertFunction)); alertThread.Name = "Alert Thread"; alertThread.Start(); /****************Get Grammar From Modules*********************/ var grammars = new LinkedList<Microsoft.Speech.Recognition.Grammar>(); foreach (IJModule module in modules) { if(module.getGrammarFile() != null) { var gb = new Microsoft.Speech.Recognition.GrammarBuilder(); gb.AppendRuleReference("file://" + System.Environment.CurrentDirectory + "\\" + module.getGrammarFile()); Console.WriteLine("file://"+System.Environment.CurrentDirectory+"\\" + module.getGrammarFile()); grammars.AddLast(new Microsoft.Speech.Recognition.Grammar(gb)); } } /************ Speech Recognition Stuff **********************/ dictation = new System.Speech.Recognition.SpeechRecognitionEngine(); dictation.SetInputToDefaultAudioDevice(); dictation.LoadGrammar(new DictationGrammar()); dictation.SpeechRecognized += SreSpeechRecognized; sensor = (from sensorToCheck in KinectSensor.KinectSensors where sensorToCheck.Status == KinectStatus.Connected select sensorToCheck).FirstOrDefault(); if (sensor == null) { Console.WriteLine( "No Kinect sensors are attached to this computer or none of the ones that are\n" + "attached are \"Connected\".\n" + "Press any key to continue.\n"); Console.ReadKey(true); return; } sensor.Start(); KinectAudioSource source = sensor.AudioSource; source.EchoCancellationMode = EchoCancellationMode.CancellationOnly; source.AutomaticGainControlEnabled = false; Microsoft.Speech.Recognition.RecognizerInfo ri = GetKinectRecognizer(); Debug.WriteLine(ri.Id); if (ri == null) { Console.WriteLine("Could not find Kinect speech recognizer. Please refer to the sample requirements."); return; } int wait = 4; while (wait > 0) { Console.Write("Device will be ready for speech recognition in {0} second(s).\r", wait--); Thread.Sleep(1000); } //sensor.DepthStream.Enable(DepthImageFormat.Resolution640x480Fps30); sre = new Microsoft.Speech.Recognition.SpeechRecognitionEngine(ri.Id); foreach(Microsoft.Speech.Recognition.Grammar g in grammars){ sre.LoadGrammar(g); } sre.SpeechRecognized += SreSpeechRecognized; using (Stream s = source.Start()) { sre.SetInputToAudioStream( s, new Microsoft.Speech.AudioFormat.SpeechAudioFormatInfo(Microsoft.Speech.AudioFormat.EncodingFormat.Pcm, 16000, 16, 1, 32000, 2, null)); Console.WriteLine("Recognizing speech. Say: 'red', 'green' or 'blue'. Press ENTER to stop"); sre.RecognizeAsync(Microsoft.Speech.Recognition.RecognizeMode.Multiple); Console.ReadLine(); Console.WriteLine("Stopping recognizer ..."); sre.RecognizeAsyncStop(); } source.Stop(); alertThread.Abort(); }
public SpeechRecognizer(TranscriptRecorderSession transcriptRecorder) { _transcriptRecorder = transcriptRecorder; _speechTranscript = new List <Microsoft.Speech.Recognition.RecognitionResult>(); _isActive = false; _isRecognizing = false; // Create a speech recognition connector _speechRecognitionConnector = new SpeechRecognitionConnector(); _currentSRLocale = ConfigurationManager.AppSettings[SpeechRecogLocaleKey]; if (String.IsNullOrEmpty(_currentSRLocale)) { NonBlockingConsole.WriteLine("No locale specified, using default locale for speech recognition: " + DefaultLocale); _currentSRLocale = DefaultLocale; } // Create speech recognition engine and start recognizing by attaching connector to engine try { _speechRecognitionEngine = new Microsoft.Speech.Recognition.SpeechRecognitionEngine(); /* * System.Globalization.CultureInfo localeCultureInfo = new System.Globalization.CultureInfo(_currentSRLocale); * foreach (RecognizerInfo r in Microsoft.Speech.Recognition.SpeechRecognitionEngine.InstalledRecognizers()) * { * if (r.Culture.Equals(localeCultureInfo)) * { * _speechRecognitionEngine = new Microsoft.Speech.Recognition.SpeechRecognitionEngine(r); * break; * } * } * if (_speechRecognitionEngine == null) * { * _speechRecognitionEngine = new SpeechRecognitionEngine(); * } */ //_speechRecognitionEngine = new Microsoft.Speech.Recognition.SpeechRecognitionEngine(new System.Globalization.CultureInfo(_currentSRLocale)); } catch (Exception e) { NonBlockingConsole.WriteLine("Error: Unable to load SpeechRecognition locale: " + _currentSRLocale + ". Exception: " + e.ToString()); // Use default locale NonBlockingConsole.WriteLine("Falling back to default locale for SpeechRecognitionEngine: " + DefaultLocale); _currentSRLocale = DefaultLocale; _speechRecognitionEngine = new SpeechRecognitionEngine(); //_speechRecognitionEngine = new Microsoft.Speech.Recognition.SpeechRecognitionEngine(new System.Globalization.CultureInfo(_currentSRLocale)); } _speechRecognitionEngine.SpeechDetected += new EventHandler <Microsoft.Speech.Recognition.SpeechDetectedEventArgs>(SpeechRecognitionEngine_SpeechDetected); _speechRecognitionEngine.RecognizeCompleted += new EventHandler <Microsoft.Speech.Recognition.RecognizeCompletedEventArgs>(SpeechRecognitionEngine_RecognizeCompleted); _speechRecognitionEngine.LoadGrammarCompleted += new EventHandler <Microsoft.Speech.Recognition.LoadGrammarCompletedEventArgs>(SpeechRecognitionEngine_LoadGrammarCompleted); _grammars = new List <Microsoft.Speech.Recognition.Grammar>(); // TODO: Add default installed speech recognizer grammar // TODO: Might already be done via compiling with Recognition Settings File? // Add default locale language grammar file (if it exists) String localLanguageGrammarFilePath = Path.Combine(Environment.CurrentDirectory, @"en-US.cfgpp"); if (File.Exists(localLanguageGrammarFilePath)) { NonBlockingConsole.WriteLine("SpeechRecognizer(). Adding locale language file at path: " + localLanguageGrammarFilePath); GrammarBuilder builder = new GrammarBuilder(); builder.AppendRuleReference(localLanguageGrammarFilePath); Grammar localeLanguageGrammar = new Grammar(builder); localeLanguageGrammar.Name = "Local language grammar"; //localeLanguageGrammar.Priority = 1; _grammars.Add(localeLanguageGrammar); } string[] recognizedString = { "hello", "bye", "yes", "no", "help", "zero", "one", "two", "three", "four", "five", "six", "seven", "eight", "nine", "ten", "exit" }; Choices numberChoices = new Choices(recognizedString); Grammar basicGrammar = new Grammar(new GrammarBuilder(numberChoices)); basicGrammar.Name = "Basic Grammar"; //basicGrammar.Priority = 2; _grammars.Add(basicGrammar); LoadSpeechGrammarAsync(); }