private void InitializeSpeechEngine() { foreach (var el in SpeechRecognitionEngine.InstalledRecognizers()) { SpeechEngine = new SpeechRecognitionEngine(el.Id); } SpeechEngine = new SpeechRecognitionEngine(); Choices commands = new Choices(); commands.Add(new string[] { "play", "pause", "stop" }); GrammarBuilder gb = new GrammarBuilder(); gb.Append(commands); // Create the Grammar instance. Grammar g = new Grammar(gb); try { SpeechEngine.LoadGrammar(g); SpeechEngine.RequestRecognizerUpdate(); SpeechEngine.SetInputToDefaultAudioDevice(); SpeechEngine.RecognizeAsync(RecognizeMode.Multiple); SpeechEngine.SpeechRecognized += SpeechEngine_SpeechRecognized; } catch { return; } }
public SpeechRec() { if (SpeechRecognitionEngine.InstalledRecognizers().Count == 0) { throw new Exception("InstalledRecognizers returned 0"); } choicesList = new string[] { "computer", "cancel query", "add task", "list task", "time", "start chrome", "start notepad", "start battle", "start snip", "list choices" }; choices.Add(choicesList); Grammar grammar = new Grammar(new GrammarBuilder(choices)); speechRecognitionEngine.RequestRecognizerUpdate(); speechRecognitionEngine.LoadGrammar(grammar); speechRecognitionEngine.SpeechRecognized += SpeechRecognitionEngine_SpeechRecognized; speechRecognitionEngine.SetInputToDefaultAudioDevice(); speechRecognitionEngine.RecognizeAsync(RecognizeMode.Multiple); speechSynthesizer.Speak("Ready"); }
private static Speech.Recognition.RecognizerInfo TryGetKinectRecognizer() { IEnumerable <Speech.Recognition.RecognizerInfo> recognizers; // This is required to catch the case when an expected recognizer is not installed. // By default - the x86 Speech Runtime is always expected. try { recognizers = SpeechRecognitionEngine.InstalledRecognizers(); } catch (COMException) { return(null); } foreach (Speech.Recognition.RecognizerInfo recognizer in recognizers) { string value; recognizer.AdditionalInfo.TryGetValue("Kinect", out value); if ("True".Equals(value, StringComparison.OrdinalIgnoreCase) && "en-US".Equals(recognizer.Culture.Name, StringComparison.OrdinalIgnoreCase)) { return(recognizer); } } return(null); }
private void PrintEngineName() { foreach (var recognizers in SpeechRecognitionEngine.InstalledRecognizers()) { Console.WriteLine("Starting {0}", recognizers.Description); } }
public NotesWindow() { InitializeComponent(); var currentCulture = (from r in SpeechRecognitionEngine.InstalledRecognizers() where r.Culture.Equals(Thread.CurrentThread.CurrentCulture) select r).FirstOrDefault(); //recognizer = new SpeechRecognitionEngine(currentCulture); GrammarBuilder builder = new GrammarBuilder(); builder.AppendDictation(); Grammar grammar = new Grammar(builder); //recognizer.LoadGrammar(grammar); //recognizer.SetInputToDefaultAudioDevice(); //recognizer.SpeechRecognized += Recognizer_SpeechRecognize; var fontFamilies = Fonts.SystemFontFamilies.OrderBy(f => f.Source); fontFamilyComboBox.ItemsSource = fontFamilies; List <double> fontSizes = new List <double>() { 8, 9, 10, 11, 12, 14, 16, 28, 48, 72 }; fontSizeComboBox.ItemsSource = fontSizes; }
public void Init(MainWindow window) { try { // Parse the CSV file var csvFile = this.parser.GetData("voice"); this.PopulateResponseTable(this.parser.ParseData(csvFile)); info = null; foreach (RecognizerInfo ri in SpeechRecognitionEngine.InstalledRecognizers()) { if (ri.Culture.TwoLetterISOLanguageName.Equals("en")) { info = ri; break; } } var gb = new GrammarBuilder(); rec = new SpeechRecognitionEngine(info); gb.Culture = info.Culture; gb.Append(list); Grammar gr = new Grammar(gb); rec.RequestRecognizerUpdate(); rec.LoadGrammar(gr); rec.SpeechRecognized += new EventHandler <SpeechRecognizedEventArgs>(Rec_SpeachRecognized); rec.SetInputToDefaultAudioDevice(); rec.RecognizeAsync(RecognizeMode.Multiple); } catch (Exception ex) { MessageBox.Show(window, "An error occured: " + ex.Message + "\r\n\r\n(Try running MonikAI as an administrator.)"); } }
public NotesWindow() { InitializeComponent(); viewModel = this.Resources["vm"] as NotesVM; container.DataContext = viewModel; viewModel.SelectedNoteChanged += ViewModel_SelectedNoteChanged; var currentCulture = (from r in SpeechRecognitionEngine.InstalledRecognizers() where r.Culture.Equals(Thread.CurrentThread.CurrentCulture) select r).FirstOrDefault(); recognizer = new SpeechRecognitionEngine(currentCulture); GrammarBuilder builder = new GrammarBuilder(); builder.AppendDictation(); Grammar grammar = new Grammar(builder); recognizer.LoadGrammar(grammar); recognizer.SetInputToDefaultAudioDevice(); recognizer.SpeechRecognized += Recognizer_SpeechRecognized; var fontFamilies = Fonts.SystemFontFamilies.OrderBy(f => f.Source); fontFamilyComboBox.ItemsSource = fontFamilies; List <double> fontSizes = new List <double>() { 8, 9, 10, 11, 12, 14, 16, 28 }; fontSizeComboBox.ItemsSource = fontSizes; }
public bool InitSTT(string recognizerID = null) { try { Console.Write("InitSTT"); Initialized = false; var RecognizerInfoLit = SpeechRecognitionEngine.InstalledRecognizers(); _recognition = new SpeechRecognitionEngine(new CultureInfo("en-US")); _recognition.LoadGrammar(new Grammar(new GrammarBuilder("exit"))); _recognition.LoadGrammar(new DictationGrammar()); loadAdditionalGrammer(_recognition); //_recognition.BabbleTimeout = new TimeSpan(0); // _recognition.InitialSilenceTimeout = new TimeSpan(0); _recognition.SpeechHypothesized += recognition_SpeechHypothesized; _recognition.SpeechRecognized += recognition_SpeechRecognized; _recognition.SpeechDetected += recognition_SpeechDetected; _recognition.RecognizeCompleted += recognition_RecognizeCompleted; _recognition.SpeechRecognitionRejected += (recognition_SpeechRecognizedRejected); _speechFormat = new SpeechAudioFormatInfo(_audioFormat.SampleRate, (AudioBitsPerSample)_audioFormat.BitRate, (AudioChannel)_audioFormat.Channels); //_recognition.UnloadAllGrammars(); Initialized = true; return(true); } catch (Exception ex) { Console.WriteLine(ex); } return(false); }
public SpeechRecognizer(List <string> wordsToRecognize, EventHandler <SpeechRecognizedEventArgs> speechCallback) { if (wordsToRecognize != null && wordsToRecognize.Count > 0) { string RecognizerId = "SR_MS_en-US_Kinect_10.0"; speechSource = new KinectAudioSource(); speechSource.FeatureMode = true; speechSource.AutomaticGainControl = false; speechSource.SystemMode = SystemMode.OptibeamArrayOnly; this.speechRecInfo = (from r in SpeechRecognitionEngine.InstalledRecognizers() where r.Id == RecognizerId select r).FirstOrDefault(); SetGrammar(wordsToRecognize); SetSpeechCallback(speechCallback); if (speechCallback != null) { this.speechCallback = speechCallback; speechEngine.SpeechRecognized += new EventHandler <SpeechRecognizedEventArgs>(speechCallback); } stream = speechSource.Start(); speechEngine.SetInputToAudioStream(stream, new SpeechAudioFormatInfo( EncodingFormat.Pcm, 16000, 16, 1, 32000, 2, null)); speechEngine.RecognizeAsync(RecognizeMode.Multiple); } }
static void RecognizeSpeechAndWriteToConsole() { // Select a speech recognizer that supports English. RecognizerInfo info = null; Console.WriteLine("Reconocedores disponibles:"); foreach (RecognizerInfo ri in SpeechRecognitionEngine.InstalledRecognizers()) { Console.WriteLine(ri.Culture.TwoLetterISOLanguageName); if (ri.Culture.TwoLetterISOLanguageName.Equals("es")) { info = ri; break; } } if (info == null) { return; } _recognizer = new SpeechRecognitionEngine(info); _recognizer.LoadGrammar(CreateGrammar()); // "test" grammar _recognizer.SpeechRecognized += _recognizeSpeechAndWriteToConsole_SpeechRecognized; // if speech is recognized, call the specified method _recognizer.SpeechRecognitionRejected += _recognizeSpeechAndWriteToConsole_SpeechRecognitionRejected; // if recognized speech is rejected, call the specified method _recognizer.SetInputToDefaultAudioDevice(); // set the input to the default audio device _recognizer.RecognizeAsync(RecognizeMode.Multiple); // recognize speech asynchronous }
private static RecognizerInfo GetRecogniser() { foreach (var recognizer in SpeechRecognitionEngine.InstalledRecognizers()) { var country = recognizer.Culture.Name; var value = string.Empty; recognizer.AdditionalInfo.TryGetValue("Kinect", out value); if (string.IsNullOrEmpty(value)) { continue; } if (Compare(value, "True") && Compare(country, "en-US")) { return(recognizer); } if (Compare(value, "True") && Compare(country, "en-UK")) { return(recognizer); } } return(null); }
/// <summary> /// State Initializer /// </summary> public void InitializeKinectAudio() { KinectSensor sensor = (from sensorToCheck in KinectSensor.KinectSensors where sensorToCheck.Status == KinectStatus.Connected select sensorToCheck).FirstOrDefault(); if (null == sensor) { // Sensor not connected, bail throw new InvalidOperationException("Can not find Kinect sensor, make sure Kinect is connected"); } this.kinectAudioSource = sensor.AudioSource; this.kinectAudioSource.AutomaticGainControlEnabled = false; //Important to turn this off for speech recognition (Kinect SDK recommendaiton) sensor.Start(); this.kinectAudioStream = this.kinectAudioSource.Start(); Func <RecognizerInfo, bool> matchingFunc = r => { string value; r.AdditionalInfo.TryGetValue("Kinect", out value); return("True".Equals(value, StringComparison.InvariantCultureIgnoreCase) && "en-US".Equals(r.Culture.Name, StringComparison.InvariantCultureIgnoreCase)); }; sr.RecognizerInfo ri = SpeechRecognitionEngine.InstalledRecognizers().Where(matchingFunc).FirstOrDefault(); if (null == ri) { // If Speech recognizer is not detected, we have no choice but bail throw new InvalidOperationException("Can not find en-US speech recognizer"); } this.state.Recognizer = new sr.SpeechRecognitionEngine(ri.Id); }
public static void MainVoice() { SpeechRecognitionEngine.InstalledRecognizers(); // Create an in-process speech recognizer for the en-US locale. using ( SpeechRecognitionEngine recognizer = new SpeechRecognitionEngine( new CultureInfo("en-US"))) { // Create and load a dictation grammar. recognizer.LoadGrammar(new DictationGrammar()); // Add a handler for the speech recognized event. recognizer.SpeechRecognized += new EventHandler <SpeechRecognizedEventArgs>(recognizer_SpeechRecognized); // Configure input to the speech recognizer. recognizer.SetInputToDefaultAudioDevice(); // Start asynchronous, continuous speech recognition. recognizer.RecognizeAsync(RecognizeMode.Multiple); // Keep the console window open. while (true) { Console.ReadLine(); } } }
private readonly DictationGrammar m_grammar; //自然语法 public Recognizer() { var myCIintl = new CultureInfo("en-US"); var rs = SpeechRecognitionEngine.InstalledRecognizers(); if (rs.Count > 0) { foreach (var config in rs)//获取所有语音引擎 { if (config.Culture.Equals(myCIintl) && config.Id == "MS-1033-80-DESK") { m_recognizer = new SpeechRecognitionEngine(config); break; }//选择美国英语的识别引擎 } if (m_recognizer == null)//如果没有适合的语音引擎,则选用第一个 { m_recognizer = new SpeechRecognitionEngine(rs[0]); } } if (m_recognizer != null) { var kws = Settings.Default.Keywords; var fg = new string[kws.Count]; kws.CopyTo(fg, 0); InitializeSpeechRecognitionEngine(fg);//初始化语音识别引擎 m_grammar = new DictationGrammar(); } else { Console.WriteLine("创建语音识别失败"); } }
private void button1_Click(object sender, EventArgs e) { var recognizers = SpeechRecognitionEngine.InstalledRecognizers(); SpeechRecognitionEngine recognizer = new SpeechRecognitionEngine(); Choices colors = new Choices(); colors.Add("red"); colors.Add("green"); colors.Add("blue"); GrammarBuilder gb = new GrammarBuilder(); gb.Append(colors); Grammar dictationGrammar = new Grammar(gb); recognizer.LoadGrammar(dictationGrammar); try { button1.Text = "Speak Now"; recognizer.SetInputToDefaultAudioDevice(); RecognitionResult result = recognizer.Recognize(); button1.Text = result.Text; } catch (InvalidOperationException exception) { button1.Text = String.Format("Could not recognize input from default aduio device. Is a microphone or sound card available?\r\n{0} - {1}.", exception.Source, exception.Message); } finally { recognizer.UnloadAllGrammars(); } }
public void SpeechRecognitionEngineSetup2() { manualResetEvent = new ManualResetEvent(false); var RecognizerInfoLit = SpeechRecognitionEngine.InstalledRecognizers(); if (!_initialized) { return; } CultureInfo ci = new CultureInfo("en-US"); SpeechRecognitionEngine sre = new SpeechRecognitionEngine(ci); sre.LoadGrammar(new Grammar(new GrammarBuilder("exit"))); sre.LoadGrammar(new DictationGrammar()); //DictationGrammar dg = new DictationGrammar("grammar:dictation"); //dg.SpeechRecognized += (sre_SpeechRecognized); //dg.Enabled = true; //sre.LoadGrammar(dg); //grammer(sre); sre.SetInputToDefaultAudioDevice(); Console.WriteLine(sre.InitialSilenceTimeout.ToString()); sre.SpeechHypothesized += (sre_SpeechHypothesized); sre.SpeechRecognized += (sre_SpeechRecognized); sre.SpeechDetected += (sre_SpeechDetected); sre.SpeechRecognitionRejected += (sre_SpeechRecognizedRejected); sre.RecognizeCompleted += (sre_RecognizeCompleted); sre.RecognizeAsync(RecognizeMode.Multiple); manualResetEvent.WaitOne(); }
private RecognizerInfo obtenerLP() { //Comienza a checar todos los languagepack que tengamos instalados foreach (RecognizerInfo recognizer in SpeechRecognitionEngine.InstalledRecognizers()) { string value; recognizer.AdditionalInfo.TryGetValue("Kinect", out value); //Aqui es donde elegimos el lenguaje, si se dan cuenta hay una parte donde dice "es-MX" para cambiar el lenguaje a ingles de EU basta con //cambiar el valor a "en-US" if ("True".Equals(value, StringComparison.OrdinalIgnoreCase) && "es-MX".Equals(recognizer.Culture.Name, StringComparison.OrdinalIgnoreCase)) { //Si se encontro el language pack solicitado se retorna a recognizer return(recognizer); } } //En caso de que no se encuentre ningun languaje pack se retorna un valor nulo return(null); }
private void StartSpeechRecognition() { // KinectAudioSource source = kinectDevice.AudioSource; // //关闭回声抑制模式 // source.EchoCancellationMode = EchoCancellationMode.None; // source.AutomaticGainControlEnabled = false; //RecognizerInfo ri = _source = CreateAudioSource(); Func <RecognizerInfo, bool> matchingFunc = r => { string value; r.AdditionalInfo.TryGetValue("Kinect", out value); return("True".Equals(value, StringComparison.InvariantCultureIgnoreCase) && "en-US".Equals(r.Culture.Name, StringComparison.InvariantCultureIgnoreCase)); }; RecognizerInfo ri = SpeechRecognitionEngine.InstalledRecognizers().Where(matchingFunc).FirstOrDefault(); _sre = new SpeechRecognitionEngine(ri.Id); CreateGrammars(ri); _sre.SpeechRecognized += sre_SpeechRecognized; _sre.SpeechHypothesized += sre_SpeechHypothesized; _sre.SpeechRecognitionRejected += sre_SpeechRecognitionRejected; Stream s = _source.Start(); _sre.SetInputToAudioStream(s, new SpeechAudioFormatInfo( EncodingFormat.Pcm, 16000, 16, 1, 32000, 2, null)); _sre.RecognizeAsync(RecognizeMode.Multiple); }
void initialyzeSpeechEngine() { RecognizerInfo info = null; if (currentOptions.language == null) { currentOptions.language = System.Globalization.CultureInfo.CurrentUICulture.DisplayName; } foreach (RecognizerInfo ri in SpeechRecognitionEngine.InstalledRecognizers()) { if (ri.Culture.DisplayName.Equals(currentOptions.language)) { info = ri; break; } } if (info == null && SpeechRecognitionEngine.InstalledRecognizers().Count != 0) { info = SpeechRecognitionEngine.InstalledRecognizers()[0]; } if (info == null) { return; } richTextBox_Output.AppendText("Using " + info.Culture.DisplayName + "\n"); speechEngine = new SpeechRecognitionEngine(info); speechEngine.SpeechRecognized += new EventHandler <SpeechRecognizedEventArgs>(sr_speechRecognized); speechEngine.AudioLevelUpdated += new EventHandler <AudioLevelUpdatedEventArgs>(sr_audioLevelUpdated); try { speechEngine.SetInputToDefaultAudioDevice(); } catch (InvalidOperationException) { richTextBox_Output.AppendText("No microphone were found\n"); } speechEngine.MaxAlternates = 3; }
/// <summary> /// Gets the metadata for the speech recognizer (acoustic model) most suitable to /// process audio from Kinect device. /// </summary> /// <returns> /// RecognizerInfo if found, <code>null</code> otherwise. /// </returns> private static RecognizerInfo GetKinectRecognizer() { IEnumerable <RecognizerInfo> recognizers = SpeechRecognitionEngine.InstalledRecognizers(); CultureInfo ci = CultureInfo.CurrentCulture; RecognizerInfo ri = null; foreach (RecognizerInfo recognizer in recognizers) { if (ri == null) { // Use the first one we found if none are perfect match ri = recognizer; } if (recognizer.Name.Equals(ci.Name)) { // We have a recognizer matching our current culture return(recognizer); } } // TODO: throw an exception if null return(ri); }
public ComponentControl() { this.AudioSource = new KinectAudioSource(); this.AudioSource.FeatureMode = true; this.AudioSource.AutomaticGainControl = false; this.AudioSource.SystemMode = SystemMode.OptibeamArrayOnly; this.AudioSource.BeamChanged += new EventHandler <BeamChangedEventArgs>(AudioSource_BeamChanged); this.Recognizer = SpeechRecognitionEngine.InstalledRecognizers().Where(r => r.Id == RecognizerId).FirstOrDefault(); if (this.Recognizer == null) { throw new Exception("Could not find Kinect speech recognizer"); } this.Engine = new SpeechRecognitionEngine(Recognizer.Id); this.Engine.UnloadAllGrammars(); this.LoadGrammer(); this.AudioStream = this.AudioSource.Start(); this.Engine.SetInputToAudioStream(this.AudioStream, new SpeechAudioFormatInfo(EncodingFormat.Pcm, 16000, 16, 1, 32000, 2, null)); this.Engine.SpeechHypothesized += new EventHandler <SpeechHypothesizedEventArgs>(Engine_SpeechHypothesized); this.Engine.RecognizeAsync(RecognizeMode.Multiple); Console.WriteLine("Speech recognition initialized"); }
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - /* * private void setupGrammarAndCommands() * { * ui.log("Recognizer.setupGrammarAndCommands()"); * * Choices colors = new Choices(); * colors.Add(new string[] { * "move left", * "move right", * "move up", * "move down", * "select previous word", * "select next word", * "press alt", * "press f", * "press s" * }); * * gb = new GrammarBuilder(colors); * //gb.Append(colors); * * g = new Grammar(gb); * g.Name = "colours"; * * sre.LoadGrammar(g); * * sre.RecognizeAsync(RecognizeMode.Multiple); * } */ // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - private SpeechRecognitionEngine setupSpeechRecognitionEngine() { ui.log("Recognizer.setupSpeechRecognitionEngine()"); SpeechRecognitionEngine s = null; String culture = "en-US"; foreach (RecognizerInfo config in SpeechRecognitionEngine.InstalledRecognizers()) { if (config.Culture.ToString() == culture) { s = new SpeechRecognitionEngine(config); break; ui.log("foreach found something!"); } } // if the desired culture is not found, then load default if (s == null) { MessageBox.Show("The desired culture is not installed on this machine, the speech-engine will continue using " + SpeechRecognitionEngine.InstalledRecognizers()[0].Culture.ToString() + " as the default culture.", "Culture " + culture + " not found!"); s = new SpeechRecognitionEngine(SpeechRecognitionEngine.InstalledRecognizers()[0]); } s.SetInputToDefaultAudioDevice(); return(s); }
//Đã check public NoteWindow() { InitializeComponent(); viewModel = new NoteVM(); container.DataContext = viewModel; viewModel.SelectedNoteChanged += ViewModel_SelectedNoteChanged; viewModel.SelectedNotebookChanged += ViewModel_SelectedNotebookChanged; var currentCulture = (from r in SpeechRecognitionEngine.InstalledRecognizers() where r.Culture.Equals(Thread.CurrentThread.CurrentCulture) select r).FirstOrDefault(); recognizer = new SpeechRecognitionEngine(currentCulture); GrammarBuilder builder = new GrammarBuilder(); builder.AppendDictation(); Grammar grammar = new Grammar(builder); recognizer.LoadGrammar(grammar); // đặt thiết bị input là thiết bị mình default trong maý recognizer.SetInputToDefaultAudioDevice(); recognizer.SpeechRecognized += Recognizer_SpeechRecognized; //sẽ trả về 1 list font var fontFamilies = Fonts.SystemFontFamilies.OrderBy(f => f.Source); fontFamilyComboBox.ItemsSource = fontFamilies; //sẽ trả về 1 list font chữ. Nhưng Combobox này có thể chỉnh số bên trong được đã được định nghĩa trong file xmal List <double> fontSizes = new List <double>() { 8, 9, 10, 11, 12, 14, 28, 48, 72 }; fontSizeComboBox.ItemsSource = fontSizes; }
public VoiceRegTest() { // Select a speech recognizer that supports English. RecognizerInfo info = null; foreach (RecognizerInfo ri in SpeechRecognitionEngine.InstalledRecognizers()) { if (ri.Culture.TwoLetterISOLanguageName.Equals("en")) { info = ri; break; } } if (info == null) { Console.WriteLine("Din't have languagepack"); } ; Console.WriteLine("Found this Langugepack: " + info.Description); masterEngine = new SpeechRecognitionEngine(info); commands = new Choices(); }
private RecognizerInfo findReconizerInfo(String recoId, String language, bool findKinect) { RecognizerInfo info = null; try { var recognizers = SpeechRecognitionEngine.InstalledRecognizers(); foreach (var recInfo in recognizers) { Log("Id: " + recInfo.Id + " Name: " + recInfo.Name + " Culture: " + recInfo.Culture + " Kinect: " + recInfo.AdditionalInfo.ContainsKey("Kinect")); if (!language.Equals(recInfo.Culture.Name, StringComparison.OrdinalIgnoreCase)) { continue; } if (!String.IsNullOrEmpty(recoId) && recoId.Equals(recInfo.Id, StringComparison.OrdinalIgnoreCase)) { continue; } if (findKinect && recInfo.AdditionalInfo.ContainsKey("Kinect")) { continue; } info = recInfo; } } catch (COMException) { } return(info); }
public NotesWindow() { InitializeComponent(); notesVM = new NotesVM(); mainContainer.DataContext = notesVM; notesVM.SelectedNoteChanged += NotesVM_SelectedNoteChanged; /* * var currentCulture = from r in SpeechRecognitionEngine.InstalledRecognizers() * where r.Culture.Equals(Thread.CurrentThread.CurrentCulture) * select r; */ var currentCulture = SpeechRecognitionEngine.InstalledRecognizers() .Where(r => r.Culture.Equals(Thread.CurrentThread.CurrentUICulture)) .FirstOrDefault(); speechRecognizer = new SpeechRecognitionEngine(currentCulture); GrammarBuilder grammarBuilder = new GrammarBuilder(); grammarBuilder.AppendDictation(); speechRecognizer.LoadGrammar(new Grammar(grammarBuilder)); speechRecognizer.SetInputToDefaultAudioDevice(); speechRecognizer.SpeechRecognized += SpeechRecognizer_SpeechRecognized; cmbFontFamily.ItemsSource = Fonts.SystemFontFamilies.OrderBy(f => f.Source); cmbFontSize.ItemsSource = new List <double>() { 8, 9, 10, 11, 12, 14, 16, 28, 48, 72 }; }
/// <summary> /// Gets the metadata for the speech recognizer (acoustic model) most suitable to /// process audio from Kinect device. /// </summary> /// <returns> /// RecognizerInfo if found, <code>null</code> otherwise. /// </returns> private static RecognizerInfo TryGetKinectRecognizer() { IEnumerable <RecognizerInfo> recognizers; try { recognizers = SpeechRecognitionEngine.InstalledRecognizers(); } catch (COMException) { return(null); } foreach (RecognizerInfo recognizer in recognizers) { string value; recognizer.AdditionalInfo.TryGetValue("Kinect", out value); if ("True".Equals(value, StringComparison.OrdinalIgnoreCase) && "en-US".Equals(recognizer.Culture.Name, StringComparison.OrdinalIgnoreCase)) { return(recognizer); } } return(null); }
public NotesWindow() { InitializeComponent(); VM = new NotesVM(); ContainerDockPanel.DataContext = VM; VM.SelectedNoteChanged += VM_SelectedNoteChanged; var currentCulture = (from r in SpeechRecognitionEngine.InstalledRecognizers() where r.Culture.Equals(Thread.CurrentThread.CurrentCulture) select r).FirstOrDefault(); recognizer = new SpeechRecognitionEngine(currentCulture); recognizer.SpeechRecognized += Recognizer_SpeechRecognized; GrammarBuilder builder = new GrammarBuilder(); builder.AppendDictation(); Grammar grammar = new Grammar(builder); recognizer.LoadGrammar(grammar); recognizer.SetInputToDefaultAudioDevice(); var fontFamilySource = Fonts.SystemFontFamilies.OrderBy(f => f.Source).ToList(); List <double> fontSizeSource = new List <double>() { 8, 10, 12, 14, 16, 18, 20, 24, 30, 35, 40, 50, 60, 70, 80 }; fontFamilyComboBox.ItemsSource = fontFamilySource; fontSizeComboBox.ItemsSource = fontSizeSource; }
//-------------------------------------------------------------------------------------------- private SpeechRecognitionEngine createSpeechEngine(string preferredCulture = "") { if (preferredCulture.Length > 0) { foreach (RecognizerInfo config in SpeechRecognitionEngine.InstalledRecognizers()) { if (config.Culture.ToString() == preferredCulture) { speechRecognitionEngine = new SpeechRecognitionEngine(config); break; } } } else { speechRecognitionEngine = new SpeechRecognitionEngine(SpeechRecognitionEngine.InstalledRecognizers()[0].Culture); } if (speechRecognitionEngine == null) { System.Windows.MessageBox.Show("El idioma no está instalado en la máquina, se continuará con el idioma por defecto: " + SpeechRecognitionEngine.InstalledRecognizers()[0].Culture.ToString()); speechRecognitionEngine = new SpeechRecognitionEngine(SpeechRecognitionEngine.InstalledRecognizers()[0]); } return(speechRecognitionEngine); }
private void init_speech() { this.label1.Content = "like"; ReadOnlyCollection <RecognizerInfo> a = SpeechRecognitionEngine.InstalledRecognizers(); _recinfo = a[1]; _recognizer = new SpeechRecognitionEngine(_recinfo); Choices cmd = new Choices(); string[] d = Directory.GetFiles("wavkaz"); wavstream = new Stream[d.Length]; for (int i = 0; i < comms.Length; i++) { cmd.Add(comms[i]); } _gb = new GrammarBuilder(cmd); _gb.Culture = _recinfo.Culture; _recognizer.LoadGrammar(new Grammar(_gb)); _synth = new SpeechSynthesizer(); _synth.SetOutputToDefaultAudioDevice(); _recognizer.SetInputToDefaultAudioDevice(); _recognizer.RecognizeAsync(RecognizeMode.Multiple); _recognizer.SpeechRecognized += new EventHandler <SpeechRecognizedEventArgs>(_recognizer_SpeechRecognized); }