private void btnRecognize_Click(object sender, RoutedEventArgs e) { text = txtInput.Text.ToLower(); text = CleanText(text); words = text.Split(' '); wcount = words.Length; //words = words.Distinct().ToArray(); txtOutput.Text = text; Choices choices = new Choices(words); GrammarBuilder gb = new GrammarBuilder(new GrammarBuilder(choices), 0, wcount); //GrammarBuilder gb = new GrammarBuilder(txtInput.Text.Trim()); gb.Culture = new CultureInfo("es-MX"); Grammar grammar = new Grammar(gb); //recognizer = new SpeechRecognitionEngine("SR_MS_es-MX_TELE_11.0"); //recognizer = new SpeechRecognitionEngine(new CultureInfo("es-MX")); recognizer.LoadGrammar(grammar); recognizer.SetInputToWaveFile(@"E:\Proyectos\Audio Timestamps\chapter01.wav"); //recognizer.SetInputToDefaultAudioDevice(); recognizer.RecognizeCompleted += new EventHandler<RecognizeCompletedEventArgs>(RecognizeCompletedHandler); recognizer.RecognizeAsync(RecognizeMode.Multiple); }
public void InicializeSpeechRecognize() { RecognizerInfo ri = GetKinectRecognizer(); if (ri == null) { throw new RecognizerNotFoundException(); } try { _sre = new SpeechRecognitionEngine(ri.Id); } catch(Exception e) { Console.WriteLine(e.Message); throw e; } var choises = new Choices(); foreach(CommandSpeechRecognition cmd in _commands.Values) { choises.Add(cmd.Choise); } var gb = new GrammarBuilder {Culture = ri.Culture}; gb.Append(choises); var g = new Grammar(gb); _sre.LoadGrammar(g); _sre.SpeechRecognized += SreSpeechRecognized; _sre.SpeechHypothesized += SreSpeechHypothesized; _sre.SpeechRecognitionRejected += SreSpeechRecognitionRejected; }
public SpeechEngine(KinectHandler handler) { _kinectHandler = handler; _commands = new GrammarBuilder(); _grammar = new Grammar(_commands); _grammar.SpeechRecognized += new EventHandler<SpeechRecognizedEventArgs>(_grammar_SpeechRecognized); }
public static SpeechRecognitionEngine getEngine(String lang) { if(init) recEngine.Dispose(); Console.WriteLine("Kastat current engine"); culture = new System.Globalization.CultureInfo(lang); choices = new Choices(); grammarBuilder = new GrammarBuilder(); VoiceCommands.Init(lang); choices.Add(VoiceCommands.GetAllCommands()); grammarBuilder.Culture = culture; grammarBuilder.Append(choices); grammar = new Grammar(grammarBuilder); Console.WriteLine("Initialiserat svenskt grammar"); try { recEngine = new SpeechRecognitionEngine(culture); recEngine.LoadGrammarAsync(grammar); Console.WriteLine("Laddat enginen med " + lang); } catch (UnauthorizedAccessException e) { Console.WriteLine("Error: UnauthorizedAccessException"); Console.WriteLine(e.ToString()); } init = true; recEngine.SetInputToDefaultAudioDevice(); return recEngine; }
//Speech recognizer private SpeechRecognitionEngine CreateSpeechRecognizer() { RecognizerInfo ri = GetKinectRecognizer(); SpeechRecognitionEngine sre; sre = new SpeechRecognitionEngine(ri.Id); //words we need the program to recognise var grammar = new Choices(); grammar.Add(new SemanticResultValue("moustache", "MOUSTACHE")); grammar.Add(new SemanticResultValue("top hat", "TOP HAT")); grammar.Add(new SemanticResultValue("glasses", "GLASSES")); grammar.Add(new SemanticResultValue("sunglasses", "SUNGLASSES")); grammar.Add(new SemanticResultValue("tie", "TIE")); grammar.Add(new SemanticResultValue("bow", "BOW")); grammar.Add(new SemanticResultValue("bear", "BEAR")); //etc var gb = new GrammarBuilder { Culture = ri.Culture }; gb.Append(grammar); var g = new Grammar(gb); sre.LoadGrammar(g); //Events for recognising and rejecting speech sre.SpeechRecognized += SreSpeechRecognized; sre.SpeechRecognitionRejected += SreSpeechRecognitionRejected; return sre; }
public GrammarBuilder GetGrammar_Custom(string grammar) { Choices globalChoices = new Choices(); string[] sentences = grammar.Split('|'); foreach (string s in sentences) { GrammarBuilder sentenceBuilder = new GrammarBuilder(); string[] words = s.Split(' '); foreach (string w in words) { if (m_vocabulories.ContainsKey(w)) sentenceBuilder.Append(new Choices(m_vocabulories[w].ToArray())); else if (w == "#Dictation") sentenceBuilder.AppendDictation(); else if (w == "#WildCard") sentenceBuilder.AppendWildcard(); else if (w != "") sentenceBuilder.Append(w); } globalChoices.Add(sentenceBuilder); } GrammarBuilder globalBuilder = new GrammarBuilder(globalChoices); globalBuilder.Culture = m_culture; Console.WriteLine(globalBuilder.DebugShowPhrases); return globalBuilder; }
public SpeechRecogniser() { RecognizerInfo ri = SpeechRecognitionEngine.InstalledRecognizers().Where(r => r.Id == RecognizerId).FirstOrDefault(); if (ri == null) return; sre = new SpeechRecognitionEngine(ri.Id); // Build a simple grammar of shapes, colors, and some simple program control var instruments = new Choices(); foreach (var phrase in InstrumentPhrases) instruments.Add(phrase.Key); var objectChoices = new Choices(); objectChoices.Add(instruments); var actionGrammar = new GrammarBuilder(); //actionGrammar.AppendWildcard(); actionGrammar.Append(objectChoices); var gb = new GrammarBuilder(); gb.Append(actionGrammar); var g = new Grammar(gb); sre.LoadGrammar(g); sre.SpeechRecognized += sre_SpeechRecognized; sre.SpeechHypothesized += sre_SpeechHypothesized; sre.SpeechRecognitionRejected += new EventHandler<SpeechRecognitionRejectedEventArgs>(sre_SpeechRecognitionRejected); var t = new Thread(StartDMO); t.Start(); valid = true; }
public void initRS() { try { SpeechRecognitionEngine sre = new SpeechRecognitionEngine(new CultureInfo("en-US")); var words = new Choices(); words.Add("Hello"); words.Add("Jump"); words.Add("Left"); words.Add("Right"); var gb = new GrammarBuilder(); gb.Culture = new System.Globalization.CultureInfo("en-US"); gb.Append(words); Grammar g = new Grammar(gb); sre.LoadGrammar(g); sre.SpeechRecognized += new EventHandler<SpeechRecognizedEventArgs>(sre_SpeechRecognized); sre.SetInputToDefaultAudioDevice(); sre.RecognizeAsync(RecognizeMode.Multiple); } catch (Exception e) { label1.Text = "init RS Error : " + e.ToString(); } }
private GrammarBuilder addCommand() { //<pleasantries> <command> <CLASS> <prep> <Time><year> //Pleasantries: I'd like to, please, I want to, would you //Command: Add, Remove //Class: a class, this class, that class, that other class //When: to Spring 2012 Choices commands = new Choices(); SemanticResultValue commandSRV; commandSRV = new SemanticResultValue("add", (int)CommandTypes.Add); commands.Add(commandSRV); commandSRV = new SemanticResultValue("take", (int)CommandTypes.Add); commands.Add(commandSRV); commandSRV = new SemanticResultValue("put", (int)CommandTypes.Add); commands.Add(commandSRV); SemanticResultKey commandSemKey = new SemanticResultKey(Slots.Command.ToString(), commands); // put the whole command together GrammarBuilder finalCommand = new GrammarBuilder(); finalCommand.Append(this.pleasantries, 0, 1); finalCommand.Append(commandSemKey); finalCommand.Append(this.course, 0, 1); finalCommand.Append(this.semester, 0, 1); return finalCommand; }
public Grammar BuildGrammar() { Choices choiceBuilder = new Choices(); // Next GrammarBuilder nextBuilder = new GrammarBuilder(); nextBuilder.Append(new Choices("next song", "play the next song", "skip this song", "play next song")); choiceBuilder.Add(nextBuilder); // Previous GrammarBuilder prevBuilder = new GrammarBuilder(); prevBuilder.Append(new Choices("last song", "previous song", "play the last song", "play the previous song")); choiceBuilder.Add(prevBuilder); // Pause GrammarBuilder pauseBuilder = new GrammarBuilder(); pauseBuilder.Append(new Choices("pause song", "pause this song", "pause song playback")); choiceBuilder.Add(pauseBuilder); // Stop GrammarBuilder stopBuilder = new GrammarBuilder(); stopBuilder.Append(new Choices("stop song", "stop song playback", "stop the music")); choiceBuilder.Add(stopBuilder); // Resume GrammarBuilder resumeBuilder = new GrammarBuilder(); resumeBuilder.Append(new Choices("resume playback", "resume song", "resume playing")); choiceBuilder.Add(resumeBuilder); return new Grammar(new GrammarBuilder(choiceBuilder)); }
internal void LoadCurrentSyllabus(SyllabusTracker syllabusTracker) { if (_speechRecognitionEngine == null) return; // not currently running recognition _speechRecognitionEngine.RequestRecognizerUpdate(); _speechRecognitionEngine.UnloadAllGrammars(); // new choices consolidation for commands - one command per syllabus file line var commandLoad = new Choices(); foreach (var baseSyllabus in syllabusTracker.Syllabi) { foreach (var command in baseSyllabus.Commands) { commandLoad.Add(command); } } // add commands - should be per input language, but now English VoiceCommands.AddCommands(commandLoad); var gBuilder = new GrammarBuilder(); gBuilder.Append(commandLoad); var grammar = new Grammar(gBuilder) { Name = "Syllabus" }; _speechRecognitionEngine.LoadGrammar(grammar); var dictgrammar = new DictationGrammar("grammar:dictation#pronunciation") { Name = "Random" }; _speechRecognitionEngine.LoadGrammar(dictgrammar); }
static void Main(string[] args) { try { ss.SetOutputToDefaultAudioDevice(); Console.WriteLine("\n(Speaking: I am awake)"); ss.Speak("I am awake"); CultureInfo ci = new CultureInfo("en-us"); sre = new SpeechRecognitionEngine(ci); sre.SetInputToDefaultAudioDevice(); sre.SpeechRecognized += sre_SpeechRecognized; Choices ch_StartStopCommands = new Choices(); ch_StartStopCommands.Add("Alexa record"); ch_StartStopCommands.Add("speech off"); ch_StartStopCommands.Add("klatu barada nikto"); GrammarBuilder gb_StartStop = new GrammarBuilder(); gb_StartStop.Append(ch_StartStopCommands); Grammar g_StartStop = new Grammar(gb_StartStop); sre.LoadGrammarAsync(g_StartStop); sre.RecognizeAsync(RecognizeMode.Multiple); // multiple grammars while (done == false) { ; } Console.WriteLine("\nHit <enter> to close shell\n"); Console.ReadLine(); } catch (Exception ex) { Console.WriteLine(ex.Message); Console.ReadLine(); } }
public void load_listen(VI_Profile profile, VI_Settings settings, ListView statusContainer) { this.profile = profile; this.settings = settings; this.statusContainer = statusContainer; vi_syn = profile.synth; vi_syn.SelectVoice(settings.voice_info); vi_sre = new SpeechRecognitionEngine(settings.recognizer_info); GrammarBuilder phrases_grammar = new GrammarBuilder(); List<string> glossory = new List<string>(); foreach (VI_Phrase trigger in profile.Profile_Triggers) { glossory.Add(trigger.value); } if (glossory.Count == 0) { MessageBox.Show("You need to add at least one Trigger"); return; } phrases_grammar.Append(new Choices(glossory.ToArray())); vi_sre.LoadGrammar(new Grammar(phrases_grammar)); //set event function vi_sre.SpeechRecognized += phraseRecognized; vi_sre.SpeechRecognitionRejected += _recognizer_SpeechRecognitionRejected; vi_sre.SetInputToDefaultAudioDevice(); vi_sre.RecognizeAsync(RecognizeMode.Multiple); }
public VoiceSelect() { precision = .5; newWordReady = false; RecognizerInfo ri = GetKinectRecognizer(); SpeechRecognitionEngine tempSpeechRec; tempSpeechRec = new SpeechRecognitionEngine(ri.Id); var grammar = new Choices(); grammar.Add("select one", "SELECT ONE", "Select One"); grammar.Add("select two", "SELECT TWO", "Select Two"); grammar.Add("pause", "PAUSE"); grammar.Add("exit", "EXIT"); grammar.Add("single player", "SINGLE PLAYER"); grammar.Add("co op mode", "CO OP MODE"); grammar.Add("settings", "SETTINGS"); grammar.Add("instructions", "INSTRUCTIONS"); grammar.Add("statistics", "STATISTICS"); grammar.Add("Main Menu", "MAIN MENU"); grammar.Add("resume", "RESUME"); grammar.Add("restart level", "RESTART LEVEL"); grammar.Add("replay", "REPLAY"); grammar.Add("next", "NEXT"); grammar.Add("Easy", "EASY"); grammar.Add("Hard", "HARD"); /* grammar.Add("level one"); grammar.Add("level two"); grammar.Add("level three"); grammar.Add("level four"); grammar.Add("level five"); grammar.Add("level six"); grammar.Add("player one left"); grammar.Add("player one right"); grammar.Add("player two left"); grammar.Add("player two right"); grammar.Add("room low"); grammar.Add("room medium"); grammar.Add("room high"); grammar.Add("sounds on"); grammar.Add("sounds off"); grammar.Add("reset stats"); */ var gb = new GrammarBuilder { Culture = ri.Culture }; gb.Append(grammar); // Create the actual Grammar instance, and then load it into the speech recognizer. var g = new Grammar(gb); tempSpeechRec.LoadGrammar(g); tempSpeechRec.SpeechRecognized += phraseRecognized; tempSpeechRec.SpeechHypothesized += phraseHyphothesized; tempSpeechRec.SpeechRecognitionRejected += phraseRejected; speechRec = tempSpeechRec; }
public IntroGrammar() { Choices majors = new Choices(); majors.Add(new SemanticResultValue("Computer Science", "CSC")); SemanticResultKey majorKey = new SemanticResultKey(Slots.Major.ToString(), majors); Choices years = new Choices(); for (int i = 2001; i < 2020; i++) { years.Add(new SemanticResultValue(i.ToString(), i)); } SemanticResultKey year = new SemanticResultKey(Slots.GradYear.ToString(), years); Choices yesOrNo = new Choices(); yesOrNo.Add(new SemanticResultValue("yes", "yes")); yesOrNo.Add(new SemanticResultValue("yeah", "yes")); yesOrNo.Add(new SemanticResultValue("yep", "yes")); yesOrNo.Add(new SemanticResultValue("no", "no")); yesOrNo.Add(new SemanticResultValue("nope", "no")); SemanticResultKey yesNo = new SemanticResultKey(Slots.YesNo.ToString(), yesOrNo); Choices options = new Choices(); options.Add(majorKey); options.Add(year); options.Add(yesNo); GrammarBuilder builder = new GrammarBuilder(); builder.Append(options); grammar = new Grammar(builder); }
public MainWindow() { InitializeComponent(); var config = new JsonConfigHandler( System.IO.Path.Combine( Environment.GetFolderPath( Environment.SpecialFolder.ApplicationData ), "LeagueTag" ) ); //config.Populate(); config.Save(); //config.Save( return; var engine = new SpeechRecognitionEngine(); var builder = new GrammarBuilder(); builder.Append( "tag" ); builder.Append( new Choices( "baron", "dragon" ) ); engine.RequestRecognizerUpdate(); engine.LoadGrammar( new Grammar( builder ) ); engine.SpeechRecognized += engine_SpeechRecognized; engine.SetInputToDefaultAudioDevice(); engine.RecognizeAsync( RecognizeMode.Multiple ); CompositionTarget.Rendering += CompositionTarget_Rendering; this.DataContext = this; }
void BuildSpeechEngine(RecognizerInfo rec) { _speechEngine = new SpeechRecognitionEngine(rec.Id); var choices = new Choices(); choices.Add("venus"); choices.Add("mars"); choices.Add("earth"); choices.Add("jupiter"); choices.Add("sun"); var gb = new GrammarBuilder { Culture = rec.Culture }; gb.Append(choices); var g = new Grammar(gb); _speechEngine.LoadGrammar(g); //recognized a word or words that may be a component of multiple complete phrases in a grammar. _speechEngine.SpeechHypothesized += new EventHandler<SpeechHypothesizedEventArgs>(SpeechEngineSpeechHypothesized); //receives input that matches any of its loaded and enabled Grammar objects. _speechEngine.SpeechRecognized += new EventHandler<SpeechRecognizedEventArgs>(_speechEngineSpeechRecognized); //receives input that does not match any of its loaded and enabled Grammar objects. _speechEngine.SpeechRecognitionRejected += new EventHandler<SpeechRecognitionRejectedEventArgs>(_speechEngineSpeechRecognitionRejected); //C# threads are MTA by default and calling RecognizeAsync in the same thread will cause an COM exception. var t = new Thread(StartAudioStream); t.Start(); }
public Grammar BuildGrammar() { Choices choiceBuilder = new Choices(); // Songs if (SongHelper.SongCount() > 0) // it freaks out if there's nothing in the one-of bit. { GrammarBuilder songBuilder = new GrammarBuilder(); songBuilder.Append("play song"); songBuilder.Append(SongHelper.GenerateSongChoices()); choiceBuilder.Add(songBuilder); } GrammarBuilder shuffleBuilder = new GrammarBuilder(); shuffleBuilder.Append("shuffle all songs"); choiceBuilder.Add(shuffleBuilder); // Playlists if (SongHelper.PlaylistCount() > 0) { GrammarBuilder playListBuilder = new GrammarBuilder(); playListBuilder.Append("play playlist"); playListBuilder.Append(SongHelper.GeneratePlaylistChoices()); choiceBuilder.Add(playListBuilder); GrammarBuilder shufflePlayListBuilder = new GrammarBuilder(); shufflePlayListBuilder.Append("shuffle playlist"); shufflePlayListBuilder.Append(SongHelper.GeneratePlaylistChoices()); choiceBuilder.Add(shufflePlayListBuilder); } Grammar gram = new Grammar(new GrammarBuilder(choiceBuilder)); return gram; }
//here is the fun part: create the speech recognizer private SpeechRecognitionEngine CreateSpeechRecognizer() { //set recognizer info RecognizerInfo ri = GetKinectRecognizer(); //create instance of SRE SpeechRecognitionEngine sre; sre = new SpeechRecognitionEngine(ri.Id); //Now we need to add the words we want our program to recognise var grammar = new Choices(); grammar.Add("Record"); grammar.Add("Store"); grammar.Add("Replay"); grammar.Add("Stop"); grammar.Add("Learn"); grammar.Add("Finish"); //set culture - language, country/region var gb = new GrammarBuilder { Culture = ri.Culture }; gb.Append(grammar); //set up the grammar builder var g = new Grammar(gb); sre.LoadGrammar(g); //Set events for recognizing, hypothesising and rejecting speech sre.SpeechRecognized += SreSpeechRecognized; sre.SpeechHypothesized += SreSpeechHypothesized; sre.SpeechRecognitionRejected += SreSpeechRecognitionRejected; return sre; }
public GrammarBuilder GetSpellGrammar() { try { var dictationBuilder = new GrammarBuilder // creating a new grammar builder { Culture = new CultureInfo(Settings.CultureInfo) }; dictationBuilder.AppendDictation(); // append dictation to the created grammar builder var dictaphoneGb = new GrammarBuilder { Culture = new CultureInfo(Settings.CultureInfo) }; dictaphoneGb.Append(new Choices("A", "B", "C", "D", "E", "F", "G", "H", "I", "J", "K", "L", "M", "N", "O", "P", "Q", "R", "S", "T", "U", "V", "W", "X", "Y", "Z")); var dictation = new GrammarBuilder { Culture = new CultureInfo(Settings.CultureInfo) }; var length = Convert.ToInt32(ConfigurationManager.AppSettings.Get("SpellGrammarLength")); for (var i = 0; i < length; i++) { dictation.Append(dictaphoneGb, 0, 200); dictation.Append(dictationBuilder, 0 /* minimum repeat */, 10 /* maximum repeat*/ ); } return dictation; } catch (Exception ex) { Log.ErrorLog(ex); throw; } }
public Main() { InitializeComponent(); fontList.SelectedIndex = 0; squareCenter = squareButton.Checked; speechEngine.SpeechRecognized +=new EventHandler<SpeechRecognizedEventArgs>(speechEngine_SpeechRecognized); speechEngine.SetInputToDefaultAudioDevice(); Choices choices = new Choices("primes", "squares", "dots", "numbers"); foreach(string item in fontList.Items) choices.Add("set font " + item); for (int i = 0; i <= 999; ++i) choices.Add("set size " + i); GrammarBuilder grammarBuilder = new GrammarBuilder(choices); speechEngine.LoadGrammar(new Grammar(grammarBuilder)); speechEngine.RecognizeAsync(RecognizeMode.Multiple); init(); }
public void CreateGrammar() { var b = new GrammarBuilder(); b.Append(Config.StopListening); Grammar = new Grammar(b); }
public GrammarBuilder GetWebsiteNamesGrammar() { try { Settings.CultureInfo = "en-GB"; var webSiteNames = new List<string>(); using (var fs = File.Open(VbwFileManager.FilePath() + "fnc_brwsr_websites" + VbwFileManager.FileExtension(), FileMode.Open, FileAccess.Read, FileShare.ReadWrite)) using (var bs = new BufferedStream(fs)) using (var sr = new StreamReader(bs)) { string line; while ((line = sr.ReadLine()) != null) { webSiteNames.Add(line); } } var dictationBuilder = new GrammarBuilder // creating a new grammar builder { Culture = new CultureInfo(Settings.CultureInfo) }; dictationBuilder.AppendDictation(); // append dictation to the created grammar builder var dictaphoneGb = new GrammarBuilder { Culture = new CultureInfo(Settings.CultureInfo) }; dictaphoneGb.Append(dictationBuilder, 0 /* minimum repeat */, 10 /* maximum repeat*/ ); dictaphoneGb.Append(new Choices(webSiteNames.ToArray())); dictaphoneGb.Append(dictationBuilder, 0 /* minimum repeat */, 10 /* maximum repeat*/ ); return dictaphoneGb; } catch (Exception ex) { Log.ErrorLog(ex); throw; } }
private Grammar BrightnessGrammar() { // Change/Set Brightness to Choices var choices = new Choices(); for (var i = -255; i <= 255; i++) { SemanticResultValue choiceResultValue = new SemanticResultValue(i.ToString(), i); GrammarBuilder resultValueBuilder = new GrammarBuilder(choiceResultValue); choices.Add(resultValueBuilder); } GrammarBuilder changeGrammar = "Change"; GrammarBuilder setGrammar = "Set"; GrammarBuilder brightnessGrammar = "Brightness"; GrammarBuilder toGrammar = "To"; SemanticResultKey resultKey = new SemanticResultKey("brightness", choices); GrammarBuilder resultContrast = new GrammarBuilder(resultKey); Choices alternatives = new Choices(changeGrammar, setGrammar); GrammarBuilder result = new GrammarBuilder(alternatives); result.Append(brightnessGrammar); result.Append(toGrammar); result.Append(resultContrast); Grammar grammar = new Grammar(result); grammar.Name = "Set Brightness"; return grammar; }
public The_Road_To_100() { InitializeComponent(); PmainManu.BringToFront(); PmainManu.Dock = DockStyle.Fill; organizeMenu(); if (Directory.Exists(@"C:\The Road To 100\user.ID 1")) { setPersonal_Screen(); Bcontinue.Enabled = true; } else { DirectoryInfo di = Directory.CreateDirectory(@"C:\The Road To 100"); di.Create(); di.Attributes = FileAttributes.Directory | FileAttributes.Hidden; } Choices commands = new Choices(); commands.Add(new string[] { "start", "finish", "close" }); GrammarBuilder GB = new GrammarBuilder(); GB.Append(commands); Grammar grammar = new Grammar(GB); sre.LoadGrammarAsync(grammar); sre.SetInputToDefaultAudioDevice(); sre.SpeechRecognized += sre_src; }
public void CreateGrammar() { var b = new GrammarBuilder(); b.Append(Config.ComputerName); Grammar = new Grammar(b); }
private void CreateSpeechRecongnition() { //Initialize speech recognition var recognizerInfo = (from a in SpeechRecognitionEngine.InstalledRecognizers() where a.Culture.Name == this.language select a).FirstOrDefault(); if (recognizerInfo != null) { this.speechEngine = new SpeechRecognitionEngine(recognizerInfo.Id); Choices recognizerString = new Choices(); recognizerString.Add(this.words); GrammarBuilder grammarBuilder = new GrammarBuilder(); //Specify the culture to match the recognizer in case we are running in a different culture. grammarBuilder.Culture = recognizerInfo.Culture; grammarBuilder.Append(recognizerString); // Create the actual Grammar instance, and then load it into the speech recognizer. var grammar = new Grammar(grammarBuilder); //載入辨識字串 this.speechEngine.LoadGrammarAsync(grammar); this.speechEngine.SpeechRecognized += SreSpeechRecognized; this.speechEngine.SetInputToDefaultAudioDevice(); this.speechEngine.RecognizeAsync(RecognizeMode.Multiple); } }
private void Window1_Load() { obj.SpeakAsync("hello, Please Choose the Model of car...."); // Create a simple grammar that recognizes "red", "green", or "blue". Choices models = new Choices(); models.Add(new string[] { "toyota", "suzuki", "honda", "kia","bmw"}); // Create a GrammarBuilder object and append the Choices object. GrammarBuilder gb = new GrammarBuilder(); gb.Append(models); // Create the Grammar instance and load it into the speech recognition engine. Grammar g = new Grammar(gb); recognizer.LoadGrammar(g); recognizer.Enabled= true; // Register a handler for the SpeechRecognized event. recognizer.SpeechRecognized += new EventHandler<SpeechRecognizedEventArgs>(sre_SpeechRecognized); }
public Grammar BuildGrammar() { GrammarBuilder gB = new GrammarBuilder(); gB.Culture = new System.Globalization.CultureInfo("en-GB"); gB.Append(getGrammar()); Grammar g = new Grammar(gB); return g; }
public Grammar[] CreateGrammar_Alphabetic() { List <Grammar> grammars = new List <Grammar>(); List <string[]> alphabet = new List <string[]>(); //consonants alphabet.Add(new string[] { "b", "bb" }); alphabet.Add(new string[] { "d", "dd", "ed" }); alphabet.Add(new string[] { "f", "ff", "ph", "gh", "lf", "ft" }); alphabet.Add(new string[] { "g", "gg", "gh", "gu", "gue" }); alphabet.Add(new string[] { "h", "wh" }); alphabet.Add(new string[] { "j", "ge", "g", "dge", "di", "gg" }); alphabet.Add(new string[] { "k", "c", "ch", "cc", "lk", "qu", "ck", "X" }); alphabet.Add(new string[] { "l", "ll" }); alphabet.Add(new string[] { "m", "mm", "mb", "mn", "lm" }); alphabet.Add(new string[] { "n", "nn", "kn", "gn", "pn", "ng", "ngue" }); alphabet.Add(new string[] { "p", "pp" }); alphabet.Add(new string[] { "r", "rr", "wr", "rh" }); alphabet.Add(new string[] { "s", "ss", "c", "sc", "ps", "st", "ce", "se" }); alphabet.Add(new string[] { "t", "tt", "th", "ed" }); alphabet.Add(new string[] { "v", "f", "ph", "ve" }); alphabet.Add(new string[] { "w", "wh", "u", "o" }); alphabet.Add(new string[] { "y", "i", "j" }); alphabet.Add(new string[] { "z", "zz", "s", "ss", "x", "ze", "se" }); //digraphs alphabet.Add(new string[] { "zh", "s", "si", "z" }); alphabet.Add(new string[] { "ch", "tch", "tu", "ti", "te" }); alphabet.Add(new string[] { "sh", "ce", "s", "ci", "si", "ch", "sci", "ti" }); alphabet.Add(new string[] { "th" }); //short vowels alphabet.Add(new string[] { "a", "ai" }); alphabet.Add(new string[] { "e", "ea", "u", "ie", "ai", "a", "eo", "ei", "ai", "a", "eo", "ei", "ae", "ay" }); alphabet.Add(new string[] { "i", "e", "o", "u", "ui", "y", "ie" }); alphabet.Add(new string[] { "o", "a", "ho" }); alphabet.Add(new string[] { "u", "o", "oo", "ou" }); alphabet.Add(new string[] { "oo", "u", "ou", "o" }); //long vowels alphabet.Add(new string[] { "ai", "a", "eigh", "aigh", "ay", "et", "ei", "et", "ei", "au", "a-e", "ea", "ey" }); alphabet.Add(new string[] { "ee", "e", "ea", "y", "ey", "oe", "ie", "i", "ei", "eo", "ay" }); alphabet.Add(new string[] { "i", "y", "igh", "ie", "uy", "ye", "ai", "is", "eigh", "i-e" }); alphabet.Add(new string[] { "oa", "o-e", "o", "oe", "ow", "ough", "eau", "oo", "ew" }); alphabet.Add(new string[] { "oo", "ew", "ue", "u-e", "oe", "ough", "ui", "o", "oeu", "ou" }); alphabet.Add(new string[] { "u", "you", "ew", "iew", "yu", "eue", "eau", "ieu", "eu" }); alphabet.Add(new string[] { "oi", "oy", "uoy" }); alphabet.Add(new string[] { "ow", "ou", "ough" }); alphabet.Add(new string[] { "er", "ar", "our", "or", "i", "e", "u", "ur", "re", "eur" }); //R' controlled vowels alphabet.Add(new string[] { "air", "are", "ear", "ere", "eir", "ayer" }); alphabet.Add(new string[] { "ar", "a", "au", "er", "ear" }); alphabet.Add(new string[] { "ir", "er", "ur", "ear", "or", "our", "ur" }); alphabet.Add(new string[] { "aw", "a", "or", "oor", "ore", "oar", "our", "augh", "ar", "ough", "au" }); alphabet.Add(new string[] { "ear", "eer", "ere", "ier" }); alphabet.Add(new string[] { "ure", "our" }); foreach (var l in alphabet) { string[] words = l.ToArray(); Choices choice = new Choices(words); GrammarBuilder builder = new GrammarBuilder(choice, 0, 1); Grammar grammar = new Grammar(builder); grammars.Add(grammar); } return(grammars.ToArray()); }
/// <summary> /// Execute startup tasks /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> private void WindowLoaded(object sender, RoutedEventArgs e) { brush = Brushes.White; // Create the drawing group we'll use for drawing this.drawingGroup = new DrawingGroup(); // Create an image source that we can use in our image control this.imageSource = new DrawingImage(this.drawingGroup); // Display the drawing using our image control Image.Source = this.imageSource; // Look through all sensors and start the first connected one. // This requires that a Kinect is connected at the time of app startup. // To make your app robust against plug/unplug, // it is recommended to use KinectSensorChooser provided in Microsoft.Kinect.Toolkit (See components in Toolkit Browser). foreach (var potentialSensor in KinectSensor.KinectSensors) { if (potentialSensor.Status == KinectStatus.Connected) { this.sensor = potentialSensor; break; } } if (null != this.sensor) { // Turn on the skeleton stream to receive skeleton frames this.sensor.SkeletonStream.Enable(); // Add an event handler to be called whenever there is new color frame data this.sensor.SkeletonFrameReady += this.SensorSkeletonFrameReady; // Start the sensor! try { this.sensor.Start(); } catch (IOException) { this.sensor = null; } } if (null == this.sensor) { this.statusBarText.Text = "Connect Kinect and try again"; return; } //For audio recogninzer RecognizerInfo ri = (from recognizer in SpeechRecognitionEngine.InstalledRecognizers() where "en-US".Equals(recognizer.Culture.Name, StringComparison.OrdinalIgnoreCase) select recognizer).FirstOrDefault(); if (ri != null) { this.speechEngine = new SpeechRecognitionEngine(ri.Id); //var fontsizes = new Choices(); //fontsizes.Add(new SemanticResultValue("SMALL", "Small")); //fontsizes.Add(new SemanticResultValue("MEDIUM", "Medium")); //fontsizes.Add(new SemanticResultValue("LARGE", "Large")); //gb.Append(fontsizes); var colors = new Choices(); colors.Add("yellow"); colors.Add("red"); colors.Add("pink"); colors.Add("green"); //colors.Add("quit"); //var quit = new Choices(); //quit.Add("quit"); var gb = new GrammarBuilder(); gb.Culture = ri.Culture; gb.Append(colors); //gb.Append(quit); var g = new Grammar(gb); speechEngine.LoadGrammar(g); this.speechEngine.SpeechRecognized += new EventHandler <SpeechRecognizedEventArgs>(speechEngine_SpeechRecognized); speechEngine.SetInputToAudioStream(sensor.AudioSource.Start(), new SpeechAudioFormatInfo(EncodingFormat.Pcm, 16000, 16, 1, 32000, 2, null)); speechEngine.RecognizeAsync(RecognizeMode.Multiple); var q = new GrammarBuilder(); q.Append("quit"); var quit = new Grammar(q); speechEngine.LoadGrammar(quit); } }
// // public bool load_listen() // // load_listen() establishes the speech recognition engine based on the command glossary stored within the // currently loaded Profile. load_listen() may fail, returning Boolean FALSE, if a Profile's glossary does // not meet the engine's grammar requirements; load_listen() will also fail, returning Boolean FALSE, should // an exception occur that cannot be resolved within the method. load_listen() will return Boolean TRUE upon // success. // // Optimizations : 04.28.15 // public bool load_listen() { // Don't allocate anything if we have no phrases to hook. if (GAVPI.Profile.Profile_Triggers != null && GAVPI.Profile.Profile_Triggers.Count == 0) { MessageBox.Show("You need to add at least one Trigger"); return(false); } synth = GAVPI.Profile.synth; synth.SelectVoice(GAVPI.Settings.voice_info); speech_re = new SpeechRecognitionEngine(GAVPI.Settings.recognizer_info); GrammarBuilder phrases_grammar = new GrammarBuilder(); // Grammer must match speech recognition language localization phrases_grammar.Culture = GAVPI.Settings.recognizer_info; List <string> glossory = new List <string>(); // Add trigger phrases to glossory of voice recognition engine. foreach (Phrase trigger in GAVPI.Profile.Profile_Triggers) { glossory.Add(trigger.value); } phrases_grammar.Append(new Choices(glossory.ToArray())); speech_re.LoadGrammar(new Grammar(phrases_grammar)); // event function hook speech_re.SpeechRecognized += phraseRecognized; speech_re.SpeechRecognitionRejected += _recognizer_SpeechRecognitionRejected; try { speech_re.SetInputToDefaultAudioDevice(); } catch (InvalidOperationException exception) { // For the time being, we're only catching failures to address an input device (typically a // microphone). MessageBox.Show("Have you connected a microphone to this computer?\n\n" + "Please ensure that you have successfull connected and configured\n" + "your microphone before trying again.", "I cannot hear you! (" + exception.Message + ")", MessageBoxButtons.OK, MessageBoxIcon.Exclamation, MessageBoxDefaultButton.Button1); return(false); } speech_re.RecognizeAsync(RecognizeMode.Multiple); // Install Push to talk key hooks. KeyboardHook.KeyDown += pushtotalk_keyDownHook; KeyboardHook.KeyUp += pushtotalk_keyUpHook; KeyboardHook.InstallHook(); if (GAVPI.Settings.pushtotalk_mode != "Hold" && GAVPI.Settings.pushtotalk_mode != "Toggle" && GAVPI.Settings.pushtotalk_mode != "Single") { pushtotalk_active = true; } // We have successfully establish an instance of a SAPI engine with a well-formed grammar. IsListening = true; return(true); }
public void launchVoiceRecognizer() { //Get the info of the voice recognizer engine the user wants to use //RecognizerInfo recognizer = GetKinectRecognizer(); RecognizerInfo recognizer = null; ReadOnlyCollection <RecognizerInfo> allRecognizers = SpeechRecognitionEngine.InstalledRecognizers(); for (int i = 0; i < allRecognizers.Count; i++) { if (allRecognizers[i].Id == server.serverMasterOptions.audioOptions.recognizerEngineID) { recognizer = allRecognizers[i]; break; } } if (recognizer == null) { throw new Exception("Couldn't find voice recognizer core."); } //Wait 4 seconds for the Kinect to be ready, may not be necessary, but the sample does this //Thread.Sleep(4000); engine = new SpeechRecognitionEngine(server.serverMasterOptions.audioOptions.recognizerEngineID); Choices vocab = new Choices(); for (int i = 0; i < server.serverMasterOptions.voiceCommands.Count; i++) { vocab.Add(server.serverMasterOptions.voiceCommands[i].recognizedWord); } GrammarBuilder gb = new GrammarBuilder { Culture = recognizer.Culture }; gb.Append(vocab); Grammar grammar = new Grammar(gb); engine.LoadGrammar(grammar); //Setup events engine.SpeechRecognized += new EventHandler <SpeechRecognizedEventArgs>(engine_SpeechRecognized); engine.SpeechHypothesized += new EventHandler <SpeechHypothesizedEventArgs>(engine_SpeechHypothesized); engine.SpeechRecognitionRejected += new EventHandler <SpeechRecognitionRejectedEventArgs>(engine_SpeechRecognitionRejected); //According to the speech recognition sample, this turns off adaptation of the acoustical mode, which can degrade recognizer accuracy over time engine.UpdateRecognizerSetting("AdaptationOn", 0); if (server.serverMasterOptions.audioOptions.sourceID >= 0 && server.serverMasterOptions.audioOptions.sourceID < server.kinects.Count) { KinectAudioSource source = server.kinects[server.serverMasterOptions.audioOptions.sourceID].kinect.AudioSource; audioStream = source.Start(); engine.SetInputToAudioStream(audioStream, new Microsoft.Speech.AudioFormat.SpeechAudioFormatInfo(Microsoft.Speech.AudioFormat.EncodingFormat.Pcm, 16000, 16, 1, 32000, 2, null)); } else { engine.SetInputToDefaultAudioDevice(); } engine.RecognizeAsync(RecognizeMode.Multiple); }
public static void SetAssistantName(string name) { optionalComponent = new GrammarBuilder(new GrammarBuilder(name), 0, 1); }
/// <summary> /// Kinect enabled apps should customize which Kinect services it initializes here. /// </summary> /// <param name="kinectSensorManager"></param> /// <param name="sensor"></param> private void InitializeKinectServices(KinectSensorManager kinectSensorManager, KinectSensor sensor) { // Application should enable all streams first. // configure the color stream kinectSensorManager.ColorFormat = ColorImageFormat.RgbResolution640x480Fps30; kinectSensorManager.ColorStreamEnabled = true; // configure the depth stream kinectSensorManager.DepthStreamEnabled = true; kinectSensorManager.TransformSmoothParameters = new TransformSmoothParameters { Smoothing = 0.5f, Correction = 0.5f, Prediction = 0.5f, JitterRadius = 0.05f, MaxDeviationRadius = 0.04f }; // configure the skeleton stream sensor.SkeletonFrameReady += OnSkeletonFrameReady; kinectSensorManager.SkeletonStreamEnabled = true; // initialize the gesture recognizer gestureController = new GestureController(); gestureController.GestureRecognized += OnGestureRecognized; kinectSensorManager.KinectSensorEnabled = true; RecognizerInfo ri = GetKinectRecognizer(); if (null != ri) { // recognitionSpans = new List<Span> { forwardSpan, backSpan, rightSpan, leftSpan }; this.speechEngine = new SpeechRecognitionEngine(ri.Id); var directions = new Choices(); //directions.Add(new SemanticResultValue("forward", "FORWARD")); //directions.Add(new SemanticResultValue("forwards", "FORWARD")); //directions.Add(new SemanticResultValue("straight", "FORWARD")); //directions.Add(new SemanticResultValue("backward", "BACKWARD")); //directions.Add(new SemanticResultValue("backwards", "BACKWARD")); //directions.Add(new SemanticResultValue("back", "BACKWARD")); //directions.Add(new SemanticResultValue("turn left", "LEFT")); //directions.Add(new SemanticResultValue("turn right", "RIGHT")); directions.Add(new SemanticResultValue("lights on", "LIGHTS_ON")); directions.Add(new SemanticResultValue("lights off", "LIGHTS_OFF")); directions.Add(new SemanticResultValue("screen up", "SCREEN_UP")); directions.Add(new SemanticResultValue("screen down", "SCREEN_DOWN")); directions.Add(new SemanticResultValue("projector on", "PROJECTOR_ON")); directions.Add(new SemanticResultValue("projector off", "PROJECTOR_OFF")); directions.Add(new SemanticResultValue("presentation mode", "PRESENTATION_MODE")); directions.Add(new SemanticResultValue("blackboard mode", "BLACKBOARD_MODE")); var gb = new GrammarBuilder { Culture = ri.Culture }; gb.Append(directions); var g = new Grammar(gb); // Create a grammar from grammar definition XML file. //using (var memoryStream = new MemoryStream(Encoding.ASCII.GetBytes(Properties.Resources.SpeechGrammar))) //{ // var g = new Grammar(memoryStream); // speechEngine.LoadGrammar(g); //} speechEngine.LoadGrammar(g); speechEngine.SpeechRecognized += SpeechRecognized; speechEngine.SpeechRecognitionRejected += SpeechRejected; speechEngine.SetInputToAudioStream(sensor.AudioSource.Start(), new SpeechAudioFormatInfo(EncodingFormat.Pcm, 16000, 16, 1, 32000, 2, null)); speechEngine.RecognizeAsync(RecognizeMode.Multiple); } else { //this.statusBarText.Text = Properties.Resources.NoSpeechRecognizer; } if (!kinectSensorManager.KinectSensorAppConflict) { // addition configuration, as needed } }
public void initialiseSpeechEngine() { initialised = false; if (location != null && location.Length > 0) { try { Console.WriteLine("Attempting to initialise speech recognition for user specified location " + location); initWithLocale(location); Console.WriteLine("Success"); } catch (Exception e) { Console.WriteLine("Unable to initialise speech engine with voice recognition pack for location " + location + ". Check that SpeechPlatformRuntime.msi and MSSpeech_SR_" + location + "_TELE.msi are installed."); Console.WriteLine("Exception message: " + e.Message); return; } } else { try { Console.WriteLine("Attempting to initialise speech recognition for any English locale"); initWithLocale(defaultLocale); Console.WriteLine("Success"); } catch (Exception e) { Console.WriteLine("Unable to initialise speech engine with the OS's default English voice recognition pack (location name " + defaultLocale + "). " + "Check that SpeechPlatformRuntime.msi and at least one of MSSpeech_SR_en-GB_TELE.msi, MSSpeech_SR_en-US_TELE.msi, " + "MSSpeech_SR_en-AU_TELE.msi, MSSpeech_SR_en-CA_TELE.msi or MSSpeech_SR_en-IN_TELE.msi are installed."); Console.WriteLine("Exception message: " + e.Message); return; } } try { sre.SetInputToDefaultAudioDevice(); } catch (Exception e) { Console.WriteLine("Unable to set default audio device"); Console.WriteLine("Exception message: " + e.Message); return; } try { Choices staticSpeechChoices = new Choices(); validateAndAdd(HOWS_MY_TYRE_WEAR, staticSpeechChoices); validateAndAdd(HOWS_MY_TRANSMISSION, staticSpeechChoices); validateAndAdd(HOWS_MY_AERO, staticSpeechChoices); validateAndAdd(HOWS_MY_ENGINE, staticSpeechChoices); validateAndAdd(HOWS_MY_SUSPENSION, staticSpeechChoices); validateAndAdd(HOWS_MY_BRAKES, staticSpeechChoices); validateAndAdd(HOWS_MY_FUEL, staticSpeechChoices); validateAndAdd(HOWS_MY_PACE, staticSpeechChoices); validateAndAdd(HOW_ARE_MY_TYRE_TEMPS, staticSpeechChoices); validateAndAdd(WHAT_ARE_MY_TYRE_TEMPS, staticSpeechChoices); validateAndAdd(HOW_ARE_MY_BRAKE_TEMPS, staticSpeechChoices); validateAndAdd(WHAT_ARE_MY_BRAKE_TEMPS, staticSpeechChoices); validateAndAdd(HOW_ARE_MY_ENGINE_TEMPS, staticSpeechChoices); validateAndAdd(WHATS_MY_GAP_IN_FRONT, staticSpeechChoices); validateAndAdd(WHATS_MY_GAP_BEHIND, staticSpeechChoices); validateAndAdd(WHAT_WAS_MY_LAST_LAP_TIME, staticSpeechChoices); validateAndAdd(WHATS_MY_BEST_LAP_TIME, staticSpeechChoices); validateAndAdd(WHATS_MY_POSITION, staticSpeechChoices); validateAndAdd(WHATS_MY_FUEL_LEVEL, staticSpeechChoices); validateAndAdd(KEEP_QUIET, staticSpeechChoices); validateAndAdd(KEEP_ME_INFORMED, staticSpeechChoices); validateAndAdd(TELL_ME_THE_GAPS, staticSpeechChoices); validateAndAdd(DONT_TELL_ME_THE_GAPS, staticSpeechChoices); validateAndAdd(WHATS_THE_FASTEST_LAP_TIME, staticSpeechChoices); validateAndAdd(HOW_LONGS_LEFT, staticSpeechChoices); validateAndAdd(WHATS_THE_TIME, staticSpeechChoices); validateAndAdd(SPOT, staticSpeechChoices); validateAndAdd(DONT_SPOT, staticSpeechChoices); validateAndAdd(REPEAT_LAST_MESSAGE, staticSpeechChoices); validateAndAdd(HAVE_I_SERVED_MY_PENALTY, staticSpeechChoices); validateAndAdd(DO_I_HAVE_A_PENALTY, staticSpeechChoices); validateAndAdd(DO_I_STILL_HAVE_A_PENALTY, staticSpeechChoices); validateAndAdd(DO_I_HAVE_A_MANDATORY_PIT_STOP, staticSpeechChoices); validateAndAdd(WHAT_ARE_MY_SECTOR_TIMES, staticSpeechChoices); validateAndAdd(WHATS_MY_LAST_SECTOR_TIME, staticSpeechChoices); validateAndAdd(WHATS_THE_AIR_TEMP, staticSpeechChoices); validateAndAdd(WHATS_THE_TRACK_TEMP, staticSpeechChoices); validateAndAdd(RADIO_CHECK, staticSpeechChoices); GrammarBuilder staticGrammarBuilder = new GrammarBuilder(); staticGrammarBuilder.Culture = cultureInfo; staticGrammarBuilder.Append(staticSpeechChoices); Grammar staticGrammar = new Grammar(staticGrammarBuilder); sre.LoadGrammar(staticGrammar); } catch (Exception e) { Console.WriteLine("Unable to configure speech engine grammar"); Console.WriteLine("Exception message: " + e.Message); return; } sre.InitialSilenceTimeout = TimeSpan.Zero; try { sre.SpeechRecognized += new EventHandler <SpeechRecognizedEventArgs>(sre_SpeechRecognized); } catch (Exception e) { Console.WriteLine("Unable to add event handler to speech engine"); Console.WriteLine("Exception message: " + e.Message); return; } initialised = true; }
private SpeechRecognitionEngine InitSpeechRecognitionEngine() { OMD.Main.Logger.Trace("InitSpeechRecognitionEngine()"); SpeechRecognitionEngine _reco = null; Boolean selectedReco = false; CultureInfo cultureInfo = null; foreach (RecognizerInfo recoInfo in SpeechRecognitionEngine.InstalledRecognizers()) { OMD.Main.Logger.Debug("Recoginzer:"); OMD.Main.Logger.Debug(" Name: " + recoInfo.Name); OMD.Main.Logger.Debug(" Culture: " + recoInfo.Culture); OMD.Main.Logger.Debug(" Description: " + recoInfo.Description); OMD.Main.Logger.Debug(" ID: " + recoInfo.Id); OMD.Main.Logger.Debug("-------"); Game.LogTrivial(" Name: " + recoInfo.Name); Game.LogTrivial(" Culture: " + recoInfo.Culture); Game.LogTrivial(" Description: " + recoInfo.Description); Game.LogTrivial(" ID: " + recoInfo.Id); Game.LogTrivial(""); if (recoInfo.Culture.Name.Contains(language)) { OMD.Main.Logger.Debug("Found recognizer matching the language."); _reco = new SpeechRecognitionEngine(recoInfo); cultureInfo = recoInfo.Culture; selectedReco = true; OMD.Main.Logger.Debug("Recognizer set."); break; } } OMD.Main.Logger.Debug("Recognizers browse..."); if (selectedReco == false || _reco == null) { OMD.Main.Logger.Error("No recognizer selected for language " + language); throw new Exception("No recognizer selected for language " + language); } OMD.Main.Logger.Debug("Do choices..."); Choices choices = new Choices(); choices.Add(commandsPerInput.GetAllInputs()); OMD.Main.Logger.Debug("Do choices... Done."); OMD.Main.Logger.Debug("Do GrammarBuilder..."); GrammarBuilder gBuilder = new GrammarBuilder(); gBuilder.Culture = cultureInfo; gBuilder.Append(choices); OMD.Main.Logger.Debug("Do GrammarBuilder... Done."); Grammar grammar = new Grammar(gBuilder); OMD.Main.Logger.Debug("Do Grammar... Done."); OMD.Main.Logger.Debug("Load Grammar..."); _reco.LoadGrammar(grammar); OMD.Main.Logger.Debug("Load Grammar... Done."); OMD.Main.Logger.Debug("Set up input..."); _reco.SetInputToDefaultAudioDevice(); OMD.Main.Logger.Debug("Set up input... Done."); OMD.Main.Logger.Debug("Set up handler..."); _reco.SpeechRecognized += SpeechRecognitionEngine_SpeechRecognized; OMD.Main.Logger.Debug("Set up handler... Done."); return(_reco); }
public void setup(string[] args /*, StreamWriter writer*/) { kinectSensor = KinectSensor.KinectSensors[0]; //output = writer; source = kinectSensor.AudioSource; //source.FeatureMode = true; source.AutomaticGainControlEnabled = false; //Important to turn this off for speech recognition source.EchoCancellationMode = EchoCancellationMode.None; //SystemMode.OptibeamArrayOnly; //No AEC for this sample //RecognizerInfo ri = SpeechRecognitionEngine.InstalledRecognizers().Where(r => r.Id == RecognizerId).FirstOrDefault(); RecognizerInfo ri = getKinectRecognizer(); if (ri == null) { //Console.WriteLine("Could not find speech recognizer: {0}. Please refer to the sample requirements.", RecognizerId); Console.WriteLine("Could not find speech recognizer. Please refer to the sample requirements."); return; } Console.WriteLine("Using: {0}", ri.Name); //using (var sre = new SpeechRecognitionEngine(ri.Id)) sre = new SpeechRecognitionEngine(ri.Id); var words = new Choices(); foreach (string w in args) { words.Add(w); } gb = new GrammarBuilder(); //Specify the culture to match the recognizer in case we are running in a different culture. gb.Culture = ri.Culture; gb.Append(words); // Create the actual Grammar instance, and then load it into the speech recognizer. g = new Grammar(gb); sre.LoadGrammar(g); sre.SpeechRecognized += SreSpeechRecognized; sre.SpeechHypothesized += SreSpeechHypothesized; sre.SpeechRecognitionRejected += SreSpeechRecognitionRejected; s = source.Start(); sre.SetInputToAudioStream(s, new SpeechAudioFormatInfo( EncodingFormat.Pcm, 16000, 16, 1, 32000, 2, null)); Console.Write("Recognizing: "); foreach (string w in args) { Console.Write(w + " "); } Console.Write("\n"); startRecog(); }
private void LoadSpeech() { try { engine = new SpeechRecognitionEngine(); engine.SetInputToDefaultAudioDevice(); //microfone Choices cNumbers = new Choices(); for (int i = 0; i <= 100; i++) { cNumbers.Add(i.ToString()); } Choices c_commandsOfSystem = new Choices(); c_commandsOfSystem.Add(GrammarRules.WhatTimeIs.ToArray()); c_commandsOfSystem.Add(GrammarRules.WhatDateIs.ToArray()); c_commandsOfSystem.Add(GrammarRules.JarvisStartListening.ToArray()); c_commandsOfSystem.Add(GrammarRules.JarvisStopListening.ToArray()); c_commandsOfSystem.Add(GrammarRules.MinimizeWindow.ToArray()); c_commandsOfSystem.Add(GrammarRules.NormalWindow.ToArray()); c_commandsOfSystem.Add(GrammarRules.ChangeVoice.ToArray()); c_commandsOfSystem.Add(GrammarRules.OpenProgram.ToArray()); c_commandsOfSystem.Add(GrammarRules.MediaPlayerCommands.ToArray()); GrammarBuilder gb_commandsOfSystem = new GrammarBuilder(); gb_commandsOfSystem.Append(c_commandsOfSystem); Grammar g_commandsOfSystem = new Grammar(gb_commandsOfSystem); g_commandsOfSystem.Name = "sys"; GrammarBuilder gbNumber = new GrammarBuilder(); gbNumber.Append(cNumbers); gbNumber.Append(new Choices("vezes", "mais", "menos", "por")); gbNumber.Append(cNumbers); Grammar gNumbers = new Grammar(gbNumber); gNumbers.Name = "calc"; engine.LoadGrammar(g_commandsOfSystem); engine.LoadGrammar(gNumbers); //string[] words = { "olá", "bom dia" //engine.LoadGrammar(new Grammar(new GrammarBuilder(new Choices(words)))); engine.SpeechRecognized += new EventHandler <SpeechRecognizedEventArgs>(rec); engine.AudioLevelUpdated += new EventHandler <AudioLevelUpdatedEventArgs>(audioLevel); engine.SpeechRecognitionRejected += new EventHandler <SpeechRecognitionRejectedEventArgs>(rej); //inicia o reconhecimento engine.RecognizeAsync(RecognizeMode.Multiple); Speaker.Speak("estou carregando os arquivos."); } catch (Exception e) { MessageBox.Show("Ocorreu erro no LoadSpeech(): " + e.Message); } }
public pnlDoc_Main(String input) { InitializeComponent(); this.ID = input; lblID.Text = "ID: " + ID; Program.sqlConnection.Open(); string docQuery = "SELECT Patient_ID FROM Patient where Assigned_Doc_ID = " + ID + ";"; SqlCommand docComnd = new SqlCommand(docQuery, Program.sqlConnection); SqlDataReader docReader = docComnd.ExecuteReader(); while (docReader.Read()) { rtbPTable.Text += Convert.ToString(docReader.GetInt32(0)) + "\n"; } docReader.Close(); Program.sqlConnection.Close(); sre = new SpeechRecognitionEngine(ci); sre.SetInputToDefaultAudioDevice(); sre.SpeechRecognized += sre_SpeechRecognized; Choices ch_StartStopCommands = new Choices(); ch_StartStopCommands.Add("Hello!"); ch_StartStopCommands.Add("Ok Done"); ch_StartStopCommands.Add("Exit"); ch_StartStopCommands.Add("Write"); GrammarBuilder gb_StartStop = new GrammarBuilder(); gb_StartStop.Append(ch_StartStopCommands); Grammar g_StartStop = new Grammar(gb_StartStop); GrammarBuilder gb_food = new GrammarBuilder(); gb_food.Append("After Food"); gb_food.Append("Before Food"); Grammar g_food = new Grammar(gb_food); Choices ch_Medicine = new Choices(); ch_Medicine.Add("Peracetamol"); ch_Medicine.Add("Omez"); ch_Medicine.Add("Polycrol"); ch_Medicine.Add("Acid"); Choices ch_Numbers = new Choices(); ch_Numbers.Add("Twice Daily"); ch_Numbers.Add("Once Daily"); GrammarBuilder gb_WhatIsXplusY = new GrammarBuilder(); gb_WhatIsXplusY.Append("Write"); gb_WhatIsXplusY.Append(ch_Medicine); gb_WhatIsXplusY.Append(ch_Numbers); gb_WhatIsXplusY.Append("And"); Grammar g_WhatIsXplusY = new Grammar(gb_WhatIsXplusY); sre.LoadGrammarAsync(g_food); sre.LoadGrammarAsync(g_StartStop); sre.LoadGrammarAsync(g_WhatIsXplusY); // sre.RecognizeAsync() }
public void Run() { timer.Tick += new EventHandler(GetWeather); timer.Tick += new EventHandler(SetWeather); timer.Tick += new EventHandler(SetWeatherDescriptionImage); timer.Interval = 1000; // Should be set higher timer.Start(); timer2.Tick += new EventHandler(UpdateAudioLevel); timer2.Interval = 1; timer2.Start(); lightRain.URL = Path.Combine(Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location), "Sounds\\lightRain.wav"); lightRain.settings.setMode("loop", true); lightRain.settings.volume = 70; lightRain.controls.stop(); heavyRain.URL = Path.Combine(Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location), "Sounds\\heavyRain.wav"); heavyRain.settings.setMode("loop", true); heavyRain.settings.volume = 70; heavyRain.controls.stop(); wind.URL = Path.Combine(Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location), "Sounds\\wind.mp3"); wind.settings.setMode("loop", true); wind.settings.volume = 0; wind.controls.stop(); winter.URL = Path.Combine(Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location), "Sounds\\winter.wav"); winter.settings.setMode("loop", true); winter.settings.volume = 70; winter.controls.stop(); thunder.URL = Path.Combine(Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location), "Sounds\\thunder.wav"); thunder.settings.setMode("loop", true); thunder.settings.volume = 70; thunder.controls.stop(); birds.URL = Path.Combine(Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location), "Sounds\\birds.mp3"); birds.settings.setMode("loop", true); birds.settings.volume = 50; birds.controls.stop(); jungle.URL = Path.Combine(Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location), "Sounds\\jungle.wav"); jungle.settings.setMode("loop", true); jungle.settings.volume = 70; jungle.controls.stop(); beachCola.URL = Path.Combine(Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location), "Sounds\\beachCola.wav"); beachCola.settings.setMode("loop", true); beachCola.settings.volume = 70; beachCola.controls.stop(); summerBee.URL = Path.Combine(Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location), "Sounds\\summerBee.wav"); summerBee.settings.setMode("loop", true); summerBee.settings.volume = 70; summerBee.controls.stop(); /* Speechrecognition */ // Create a simple grammar Choices colors = new Choices(); colors.Add(new string[] { "stockholm", "copenhagen", "warsaw", "kiev", "london", "madrid", "moscow", "washington", "oslo", "reykjavik", "santiago", "Seoul", "bern", "damascus", "budapest", "tokyo", "rome", "dublin", "taipei", "helsinki", "athens", "paris", "beijing", "lima", "lisbon", "ottawa", "sofia", "minsk", "brussels", "vienna", "pyongyang", "belgrade", "cairo", "berlin", "jakarta", "tehran", "jerusalem" }); // Create a GrammarBuilder object and append the Choices object. GrammarBuilder gb = new GrammarBuilder(); gb.Culture = new System.Globalization.CultureInfo("en-US"); gb.Append(colors); // Create the Grammar instance and load it into the speech recognition engine. Grammar g = new Grammar(gb); sre.LoadGrammar(g); // Register a handler for the SpeechRecognized event. sre.SpeechRecognized += new EventHandler <SpeechRecognizedEventArgs>(sre_SpeechRecognized); sre.SetInputToDefaultAudioDevice(); sre.RecognizeAsyncStop(); }
public void addOpponentSpeechRecognition(List <String> names, Boolean useNames) { driverNamesInUse.Clear(); foreach (Grammar opponentGrammar in opponentGrammarList) { sre.UnloadGrammar(opponentGrammar); } opponentGrammarList.Clear(); Choices opponentChoices = new Choices(); if (useNames) { Console.WriteLine("adding opponent names to speech recogniser: " + Environment.NewLine + String.Join(Environment.NewLine, names)); foreach (String name in names) { opponentChoices.Add(WHERE_IS + " " + name); opponentChoices.Add(WHATS + " " + name + POSSESSIVE + " " + LAST_LAP); opponentChoices.Add(WHATS + " " + name + POSSESSIVE + " " + BEST_LAP); opponentChoices.Add(WHAT_TYRES_IS + " " + name + " " + ON); opponentChoices.Add(WHAT_TYRE_IS + " " + name + " " + ON); } } foreach (KeyValuePair <String, int> entry in numberToNumber) { opponentChoices.Add(WHATS + " " + POSITION_LONG + " " + entry.Key + POSSESSIVE + " " + LAST_LAP); opponentChoices.Add(WHATS + " " + POSITION_LONG + " " + entry.Key + POSSESSIVE + " " + BEST_LAP); opponentChoices.Add(WHAT_TYRE_IS + " " + POSITION_LONG + " " + entry.Key + " " + ON); opponentChoices.Add(WHAT_TYRES_IS + " " + POSITION_LONG + " " + entry.Key + " " + ON); opponentChoices.Add(WHATS + " " + POSITION_SHORT + " " + entry.Key + POSSESSIVE + " " + LAST_LAP); opponentChoices.Add(WHATS + " " + POSITION_SHORT + " " + entry.Key + POSSESSIVE + " " + BEST_LAP); opponentChoices.Add(WHOS_IN + " " + POSITION_SHORT + " " + entry.Key); opponentChoices.Add(WHOS_IN + " " + POSITION_LONG + " " + entry.Key); opponentChoices.Add(WHAT_TYRE_IS + " " + POSITION_SHORT + " " + entry.Key + " " + ON); opponentChoices.Add(WHAT_TYRES_IS + " " + POSITION_SHORT + " " + entry.Key + " " + ON); opponentChoices.Add(WHERE_IS + " " + POSITION_SHORT + " " + entry.Key); opponentChoices.Add(WHERE_IS + " " + POSITION_LONG + " " + entry.Key); } opponentChoices.Add(WHATS + " " + THE_LEADER + POSSESSIVE + " " + BEST_LAP); opponentChoices.Add(WHATS + " " + THE_LEADER + POSSESSIVE + " " + LAST_LAP); opponentChoices.Add(WHATS + " " + THE_GUY_IN_FRONT + POSSESSIVE + " " + BEST_LAP); opponentChoices.Add(WHATS + " " + THE_GUY_IN_FRONT + POSSESSIVE + " " + LAST_LAP); opponentChoices.Add(WHATS + " " + THE_CAR_IN_FRONT + POSSESSIVE + " " + BEST_LAP); opponentChoices.Add(WHATS + " " + THE_CAR_IN_FRONT + POSSESSIVE + " " + LAST_LAP); opponentChoices.Add(WHATS + " " + THE_GUY_AHEAD + POSSESSIVE + " " + BEST_LAP); opponentChoices.Add(WHATS + " " + THE_GUY_AHEAD + POSSESSIVE + " " + LAST_LAP); opponentChoices.Add(WHATS + " " + THE_CAR_AHEAD + POSSESSIVE + " " + BEST_LAP); opponentChoices.Add(WHATS + " " + THE_CAR_AHEAD + POSSESSIVE + " " + LAST_LAP); opponentChoices.Add(WHATS + " " + THE_CAR_BEHIND + POSSESSIVE + " " + BEST_LAP); opponentChoices.Add(WHATS + " " + THE_CAR_BEHIND + POSSESSIVE + " " + LAST_LAP); opponentChoices.Add(WHATS + " " + THE_GUY_BEHIND + POSSESSIVE + " " + BEST_LAP); opponentChoices.Add(WHATS + " " + THE_GUY_BEHIND + POSSESSIVE + " " + LAST_LAP); opponentChoices.Add(WHAT_TYRE_IS + " " + THE_GUY_IN_FRONT + " " + ON); opponentChoices.Add(WHAT_TYRES_IS + " " + THE_GUY_IN_FRONT + " " + ON); opponentChoices.Add(WHAT_TYRE_IS + " " + THE_GUY_AHEAD + " " + ON); opponentChoices.Add(WHAT_TYRES_IS + " " + THE_GUY_AHEAD + " " + ON); opponentChoices.Add(WHAT_TYRE_IS + " " + THE_GUY_BEHIND + " " + ON); opponentChoices.Add(WHAT_TYRES_IS + " " + THE_GUY_BEHIND + " " + ON); validateAndAdd(WHOS_IN_FRONT_IN_THE_RACE, opponentChoices); validateAndAdd(WHOS_BEHIND_IN_THE_RACE, opponentChoices); validateAndAdd(WHOS_IN_FRONT_ON_TRACK, opponentChoices); validateAndAdd(WHOS_BEHIND_ON_TRACK, opponentChoices); validateAndAdd(WHOS_LEADING, opponentChoices); GrammarBuilder opponentGrammarBuilder = new GrammarBuilder(); opponentGrammarBuilder.Culture = cultureInfo; opponentGrammarBuilder.Append(opponentChoices); Grammar newOpponentGrammar = new Grammar(opponentGrammarBuilder); sre.LoadGrammar(newOpponentGrammar); opponentGrammarList.Add(newOpponentGrammar); driverNamesInUse.AddRange(names); }
public Grammar GetGrammar() { var gBuilder = new GrammarBuilder("Mute"); return(new Grammar(gBuilder)); }
/// /// VOZ /// private void WindowLoaded(object sender, RoutedEventArgs e) { // Look through all sensors and start the first connected one. // This requires that a Kinect is connected at the time of app startup. // To make your app robust against plug/unplug, // it is recommended to use KinectSensorChooser provided in Microsoft.Kinect.Toolkit (See components in Toolkit Browser). foreach (var potentialSensor in KinectSensor.KinectSensors) { if (potentialSensor.Status == KinectStatus.Connected) { this.sensor = potentialSensor; break; } } if (null != this.sensor) { // Turn on the skeleton stream to receive skeleton frames this.sensor.SkeletonStream.Enable(); // Add an event handler to be called whenever there is new color frame data this.sensor.SkeletonFrameReady += this.SensorSkeletonFrameReady; myKinect = KinectSensor.KinectSensors[0]; myKinect.ColorStream.Enable(); myKinect.ColorFrameReady += myKinect_ColorFrameReady; // Start the sensor! try { this.sensor.Start(); } catch (IOException) { this.sensor = null; } } //---------------------VOZ //Gramatica correspondiente a los comandos de voz RecognizerInfo ri = GetKinectRecognizer(); if (null != ri) { this.speechEngine = new SpeechRecognitionEngine(ri.Id); // Use this code to create grammar programmatically rather than froma grammar file. var directions = new Choices(); directions.Add(new SemanticResultValue("izquierdo", "CLICKIZQ")); directions.Add(new SemanticResultValue("derecho", "CLICKDER")); directions.Add(new SemanticResultValue("foto", "CAPTURA")); directions.Add(new SemanticResultValue("arriba", "ARRIBA")); directions.Add(new SemanticResultValue("abajo", "ABAJO")); directions.Add(new SemanticResultValue("cambio", "CAMBIO")); directions.Add(new SemanticResultValue("sube", "SUBE")); directions.Add(new SemanticResultValue("baja", "BAJA")); directions.Add(new SemanticResultValue("salir", "SALIR")); var gb = new GrammarBuilder { Culture = ri.Culture }; gb.Append(directions); var g = new Grammar(gb); /* ****************************************************************/ // Create a grammar from grammar definition XML file. //using (var memoryStream = new MemoryStream(Encoding.ASCII.GetBytes(Properties.Resources.SpeechGrammar))) //{ // var g = new Grammar(memoryStream); speechEngine.LoadGrammar(g); //} speechEngine.SpeechRecognized += SpeechRecognized; //speechEngine.SpeechRecognitionRejected += SpeechRejected; // For long recognition sessions (a few hours or more), it may be beneficial to turn off adaptation of the acoustic model. // This will prevent recognition accuracy from degrading over time. ////speechEngine.UpdateRecognizerSetting("AdaptationOn", 0); speechEngine.SetInputToAudioStream( sensor.AudioSource.Start(), new SpeechAudioFormatInfo(EncodingFormat.Pcm, 16000, 16, 1, 32000, 2, null)); speechEngine.RecognizeAsync(RecognizeMode.Multiple); } //VOZ }
public ClixSenseAdProcessor( ScreenShotsManager screenShotsManager, List <Bitmap> toFindBitmaps, Dictionary <string, Bitmap> catBitmaps, List <Bitmap> closeBitmaps, int displayIndex, int zoomFactorX, int zoomFactorY, bool fakeMoves, Dispatcher cd) { this.screenShotsManager = screenShotsManager; this.toFindBitmaps = toFindBitmaps; this.closeBitmaps = closeBitmaps; this.catBitmaps = catBitmaps; this.fakeMoves = fakeMoves; this.zoomFactorX = zoomFactorX; this.zoomFactorY = zoomFactorY; this.CaptureCatImage = false; this.StopAllProcessing = false; this.DisplayScreen = DisplayIndex.GetDisplayRectangle(displayIndex); this.clixConfig = new ClixSenseAdProcessorConfig(); if ((this.clixConfig = this.clixConfig.Load($".\\ClixSenseAdProcessorConfig" + ".xml")) == null) { this.clixConfig = new ClixSenseAdProcessorConfig(); this.clixConfig.Save($".\\ClixSenseAdProcessorConfig" + ".xml"); Logger.Info($"Saved {nameof(this.clixConfig)} correctly : {this.clixConfig}"); } else { Logger.Info($"Loaded {nameof(this.clixConfig)} correctly : {this.clixConfig}"); } this.currentDispatcher = cd; //this.TalkingCool = new Talking(); Logger.Info($"ClickSense initialize - Trying to initialize speech "); try { this.installedVoices = new List <InstalledVoice>(this.synthesizer.GetInstalledVoices()); this.synthesizer.Volume = 100; // 0...100 this.synthesizer.Rate = -2; // -10...10 this.synthesizer.SelectVoice(this.installedVoices.First().VoiceInfo.Name); var numbers = new Choices(); numbers.Add("one", "two", "three", "four", "five", "Refresh", "Close"); //, "Close" // Create a GrammarBuilder object and append the Choices object. var gb = new GrammarBuilder(); gb.Append(new GrammarBuilder(numbers), 1, 5); var grammer = new Grammar(gb) { Priority = 127 }; this.speechRecognitionEngine.RequestRecognizerUpdate(); this.speechRecognitionEngine.LoadGrammar(grammer); this.speechRecognitionEngine.SetInputToDefaultAudioDevice(); this.speechRecognitionEngine.SpeechRecognized += this.SpeechRecognitionEngine_SpeechRecognized; this.speechRecognitionEngine.SpeechDetected += this.SpeechRecognitionEngine_SpeechDetected; this.speechRecognitionEngine.SpeechRecognitionRejected += this.SpeechRecognitionEngine_SpeechRecognitionRejected; this.speechRecognitionEngine.RecognizeCompleted += this.SpeechRecognitionEngine_RecognizeCompleted; } catch (Exception exception) { Logger.Error(exception.Message + exception.InnerException?.Message); throw exception; } Logger.Info($" Finished audio Initializer with no exceptions - found {this.installedVoices.Count} voices"); }
/** * Grammar creation */ private Grammar CreateGrammarBuilderSemantics(object p) { //1. Close application GrammarBuilder close = "Cierra"; GrammarBuilder close1 = "Cerrar"; GrammarBuilder exit = "Salir de"; Choices closeCh = new Choices(close, exit); GrammarBuilder application = "la aplicacion"; GrammarBuilder closeApplication = new GrammarBuilder(closeCh); closeApplication.Append(application); //2. Select topics GrammarBuilder want = "Quiero"; GrammarBuilder give = "Dame"; Choices wantCh = new Choices(want, give); GrammarBuilder issues = "Cuestiones de"; GrammarBuilder questions = "Preguntas de"; Choices questionsCh = new Choices(issues, questions); SemanticResultValue semanticResultValue = new SemanticResultValue("animales", "animales"); GrammarBuilder resultValueBuilder = new GrammarBuilder(semanticResultValue); Choices topicsCh = new Choices(); topicsCh.Add(resultValueBuilder); semanticResultValue = new SemanticResultValue("colores", "colores"); resultValueBuilder = new GrammarBuilder(semanticResultValue); topicsCh.Add(resultValueBuilder); SemanticResultKey semanticResultKey = new SemanticResultKey("topic1", topicsCh); GrammarBuilder topics = new GrammarBuilder(semanticResultKey); GrammarBuilder wantQuestions = wantCh; wantQuestions.Append(questionsCh); wantQuestions.Append(topics); //3. Select topic extended GrammarBuilder wantQuestionsExtended = wantCh; wantQuestionsExtended.Append(questionsCh); wantQuestionsExtended.Append(topics); wantQuestionsExtended.Append("y"); semanticResultKey = new SemanticResultKey("topic2", topicsCh); topics = new GrammarBuilder(semanticResultKey); wantQuestionsExtended.Append(topics); Choices select = new Choices(wantQuestions, wantQuestionsExtended); //4. Begin again GrammarBuilder beginAgain = "Empezar de nuevo"; //5. Need help GrammarBuilder needHelp = "Necesito ayuda"; //Answers //6. Multiple answers GrammarBuilder animalsColorsAnswers = new GrammarBuilder("Este animal es un"); Choices animalsCh = new Choices(); for (int i = 0; i < animalsList.Length; i++) { semanticResultValue = new SemanticResultValue(animalsList[i], animalsList[i]); resultValueBuilder = new GrammarBuilder(semanticResultValue); animalsCh.Add(resultValueBuilder); } semanticResultKey = new SemanticResultKey("animals", animalsCh); GrammarBuilder animals = new GrammarBuilder(semanticResultKey); animalsColorsAnswers.Append(animals); animalsColorsAnswers.Append(new GrammarBuilder("de color")); Choices colorsCh = new Choices(); for (int i = 0; i < colorsList.Length; i++) { semanticResultValue = new SemanticResultValue(colorsList[i], colorsList[i]); resultValueBuilder = new GrammarBuilder(semanticResultValue); colorsCh.Add(resultValueBuilder); } semanticResultKey = new SemanticResultKey("colors", colorsCh); GrammarBuilder colors = new GrammarBuilder(semanticResultKey); animalsColorsAnswers.Append(colors); //7. Animals answer GrammarBuilder animalsAnswers = "Este animal es un"; animalsAnswers.Append(animals); //8. Colors answer GrammarBuilder colorsAnswers = "Este color es el"; colorsAnswers.Append(colors); //9. Next Question Choices nextQuestion = new Choices("pregunta siguiente", "siguiente pregunta"); //10. Increase and decrease difficulty GrammarBuilder increaseDifficulty = "aumentar dificultad"; GrammarBuilder decreaseDifficulty = "disminuir dificultad"; Choices difficulty = new Choices(increaseDifficulty, decreaseDifficulty); Choices choices = new Choices(beginAgain, needHelp, closeApplication, select, nextQuestion, animalsAnswers, colorsAnswers, animalsColorsAnswers, difficulty); Grammar grammar = new Grammar(choices); //Grammar name grammar.Name = "Questions"; return(grammar); }
public Recognizer() { RecognizerInfo ri = SpeechRecognitionEngine.InstalledRecognizers().Where(r => r.Id == RecognizerId).FirstOrDefault(); if (ri == null) { return; } sre = new SpeechRecognitionEngine(ri.Id); // Build a simple grammar of shapes, colors, and some simple program control var single = new Choices(); foreach (var phrase in SinglePhrases) { single.Add(phrase.Key); } var gameplay = new Choices(); foreach (var phrase in GameplayPhrases) { gameplay.Add(phrase.Key); } var shapes = new Choices(); foreach (var phrase in ShapePhrases) { shapes.Add(phrase.Key); } var colors = new Choices(); foreach (var phrase in ColorPhrases) { colors.Add(phrase.Key); } var coloredShapeGrammar = new GrammarBuilder(); coloredShapeGrammar.Append(colors); coloredShapeGrammar.Append(shapes); var objectChoices = new Choices(); objectChoices.Add(gameplay); objectChoices.Add(shapes); objectChoices.Add(colors); objectChoices.Add(coloredShapeGrammar); var actionGrammar = new GrammarBuilder(); actionGrammar.AppendWildcard(); actionGrammar.Append(objectChoices); var allChoices = new Choices(); allChoices.Add(actionGrammar); allChoices.Add(single); var gb = new GrammarBuilder(); gb.Append(allChoices); var g = new Grammar(gb); sre.LoadGrammar(g); sre.SpeechRecognized += sre_SpeechRecognized; sre.SpeechHypothesized += sre_SpeechHypothesized; sre.SpeechRecognitionRejected += new EventHandler <SpeechRecognitionRejectedEventArgs>(sre_SpeechRecognitionRejected); var t = new Thread(StartDMO); t.Start(); valid = true; }
private static void LoadGrammar() { if (sre != null && sre.Grammars.Count > 0) { sre.RecognizeAsyncStop(); sre.UnloadAllGrammars(); } string setDefaultWord = KeywordFactory.DefaultParent; string[] defaultSubChoices = null; string[] rootChoices = new string[KeywordFactory.Parents.Count]; GrammarBuilder[] gbAll = new GrammarBuilder[KeywordFactory.Parents.Count]; Choices choices = null; for (int p = 0; p < KeywordFactory.Parents.Count; p++) { string[] subChoices = null; Choices choicesSub = null; bool defaultExists = false; rootChoices[p] = KeywordFactory.Parents[p].Keyword; if (rootChoices[p] == setDefaultWord) { defaultExists = true; defaultSubChoices = KeywordFactory.GetChildrenNames( KeywordFactory.Parents .Where(parent => parent.Keyword == setDefaultWord) .Select(parent => parent) .Single()) .ToArray(); } else { choices = new Choices(rootChoices[p]); subChoices = new string[KeywordFactory.Parents[p].Children.Count]; for (int c = 0; c < KeywordFactory.Parents[p].Children.Count; c++) { subChoices[c] = KeywordFactory.Parents[p].Children[c].Keyword; } choicesSub = new Choices(subChoices); } // Create grammar builder if (!defaultExists) { gbAll[p] = new GrammarBuilder(rootChoices[p]) { Culture = new System.Globalization.CultureInfo("en-GB") }; gbAll[p].Append(choicesSub); } else { gbAll[p] = new GrammarBuilder(new Choices(defaultSubChoices)) { Culture = new System.Globalization.CultureInfo("en-GB") }; } // Check for quantity string[] quantifiers = KeywordFactory.GetQuantifiers(); Choices quantiferChoices = new Choices(quantifiers); gbAll[p].Append(quantiferChoices, 0, 1); } Choices allChoices = new Choices(); allChoices.Add(gbAll); Grammar gram = new Grammar(allChoices) { Name = "Actions", Enabled = true }; try { sre = new SpeechRecognitionEngine(); sre.SetInputToDefaultAudioDevice(); sre.LoadGrammar(gram); gram.SpeechRecognized += OnSpeechRecognized; sre.RecognizeAsync(RecognizeMode.Multiple); Console.WriteLine("*** Recognition Engine Ready! ***\n"); } catch (Exception e) { Console.WriteLine($"Uh oh:\n\n {e.Message}"); } }
private void backgroundWorker1_DoWork(object sender, DoWorkEventArgs e) { List <string> words = (List <string>)e.Argument; ProgessResponse response = new ProgessResponse() { }; using (var source = new KinectAudioSource()) { source.FeatureMode = true; source.AutomaticGainControl = false; //Important to turn this off for speech recognition source.SystemMode = SystemMode.OptibeamArrayOnly; //No AEC for this sample RecognizerInfo ri = SpeechRecognitionEngine.InstalledRecognizers().Where(r => r.Id == RecognizerId).FirstOrDefault(); if (ri == null) { response.Message = "Could not find speech recognizer: {0}. Please refer to the sample requirements." + RecognizerId; backgroundWorker1.ReportProgress(0, response); return; } response.Message = "Using: {0}" + ri.Name; backgroundWorker1.ReportProgress(0, response); using (var sre = new SpeechRecognitionEngine(ri.Id)) { var wordChoices = new Choices(); foreach (string word in words) { wordChoices.Add(word); //backgroundWorker1.ReportProgress(0, "added: " + word); } response.Message = "speech listener started"; backgroundWorker1.ReportProgress(0, response); var gb = new GrammarBuilder(); //Specify the culture to match the recognizer in case we are running in a different culture. gb.Culture = ri.Culture; gb.Append(wordChoices); // Create the actual Grammar instance, and then load it into the speech recognizer. var g = new Grammar(gb); sre.LoadGrammar(g); sre.SpeechRecognized += SreSpeechRecognized; sre.SpeechHypothesized += SreSpeechHypothesized; sre.SpeechRecognitionRejected += SreSpeechRecognitionRejected; using (Stream s = source.Start()) { sre.SetInputToAudioStream(s, new SpeechAudioFormatInfo(EncodingFormat.Pcm, 16000, 16, 1, 32000, 2, null)); sre.RecognizeAsync(RecognizeMode.Multiple); bool cancel = false; while (cancel == false) { if (backgroundWorker1.CancellationPending) { // Set the e.Cancel flag so that the WorkerCompleted event // knows that the process was cancelled. e.Cancel = true; cancel = true; break; } Thread.Sleep(10); } sre.RecognizeAsyncStop(); } } } }
public TeleportCommand(GrammarBuilder builder) : base(builder) { }
private void CreateGrammar() { //// Create and load a dictation grammar. This handles for many words and doesn't work very well //recognizer.LoadGrammar(new DictationGrammar()); // create a small custom grammar for testing Choices reward = new Choices("good", "no"); Choices color = new Choices("black", "red", "blue", "green", "orange", "white"); Choices size = new Choices("big", "medium", "little", "long", "short"); Choices distance = new Choices("near", "nearer", "nearest", "far", "farther", "farest"); Choices attrib = new Choices(new GrammarBuilder[] { color, distance, size }); Choices shape = new Choices("dot", "line"); Choices attribs = new Choices("color", "size", "distance", "shape"); Choices action = new Choices("go", "stop", "turn", "move"); Choices direction = new Choices("right", "left", "forward", "backwards", "around"); Choices command = new Choices("this is", "this is a", "this is the", "this is an"); Choices query = new Choices("what can you see", "what is behind you", "what is this"); GrammarBuilder attribQuery = new GrammarBuilder(); attribQuery.Append("what"); attribQuery.Append(attribs); attribQuery.Append("is this"); GrammarBuilder say = new GrammarBuilder(); say.Append("say"); say.AppendDictation(); GrammarBuilder declaration = new GrammarBuilder(); declaration.Append(command); declaration.Append(attrib, 0, 4); declaration.Append(shape, 0, 1); GrammarBuilder actionCommand = new GrammarBuilder(); actionCommand.Append(action); actionCommand.Append(direction, 0, 1); Choices digit = new Choices("1", "2", "3", "4", "5", "6", "7", "8", "9", "0", "point"); GrammarBuilder number = new GrammarBuilder(); number.Append(digit, 1, 5); Choices commands = new Choices(reward, say, attribQuery, declaration, actionCommand, query, number); Choices attentionWord = new Choices("Sallie", "Computer"); GrammarBuilder a = new GrammarBuilder(); a.Append(attentionWord, 1, 1); a.Append(commands); a.Append(digit); //some words we might need some day //Choices article = new Choices("a", "an", "the", "some", "containing", "with", "which are"); //Choices emotion = new Choices("ecstatic", "happy", "so-so", "OK", "sad", "unhappy"); //Choices timeOfDay = new Choices("morning", "afternoon", "evening", "night"); //someday we'll need numbers //Choices number = new Choices(); //for (int i = 1; i < 200; i++) // number.Add(i.ToString()); //how to add singular/plural to choices //PluralizationService ps = PluralizationService.CreateService(new CultureInfo("en-us")); //string[] attribList = new string[] { "attributes", "sequences", "colors", "sizes", "shapes", "digits", "things" }; //string[] attribList1 = new string[attribList.Length]; //for (int i = 0; i < attribList.Length; i++) // attribList1[i] = ps.Singularize(attribList[i]); //how to specify a custom pronunciation with SRGS--these don't integrate with the rest of the grammar //SrgsItem cItem = new SrgsItem(); //SrgsToken cWord = new SrgsToken("computer"); //cWord.Pronunciation = "kəmpjutər"; //cItem.Add(cWord); //SrgsRule srgsRule = new SrgsRule("custom", cItem); //SrgsDocument tokenPron = new SrgsDocument(srgsRule); //tokenPron.PhoneticAlphabet = SrgsPhoneticAlphabet.Ipa; //Grammar g_Custom = new Grammar(tokenPron); //recognizer.LoadGrammar(g_Custom); //get the words from the grammar and label neurons string c = a.DebugShowPhrases; c = c.Replace((char)0x2018, ' '); c = c.Replace((char)0x2019, ' '); string[] c1 = c.Split(new string[] { "[", ",", "]", " " }, StringSplitOptions.RemoveEmptyEntries); c1 = c1.Distinct().ToArray(); //int i1 = 1; //na.BeginEnum(); //for (Neuron n = na.GetNextNeuron(); n != null && i1 < c1.Length; i1++, n = na.GetNextNeuron()) // n.Label = c1[i1].ToLower(); Grammar gr = new Grammar(a); recognizer.LoadGrammar(gr); // gr = new Grammar(reward); // recognizer.LoadGrammar(gr); }
public static string GetPronunciationFromText(string MyWord, string Pron = null) { //this is a trick to figure out phonemes used by synthesis engine MyWord = MyWord.Trim(); if (MyWord == null || MyWord == "") { return(""); } if (MyWord.ToLower() == "a") { return("ə"); } if (MyWord.ToLower() == "no") { return("no"); } //txt to wav using (MemoryStream audioStream = new MemoryStream()) { using (SpeechSynthesizer synth = new SpeechSynthesizer()) { if (synth == null) { MessageBox.Show("Could not open speech synthisizer."); return(""); } synth.SetOutputToWaveStream(audioStream); PromptBuilder pb = new PromptBuilder(); if (Pron == null) { synth.Speak(MyWord); } else { pb.AppendTextWithPronunciation("Not Used", Pron); synth.Speak(pb); } //synth.Speak(pb); synth.SetOutputToNull(); audioStream.Position = 0; //now wav to txt (for reco phonemes) recoPhonemes = String.Empty; GrammarBuilder gb = new GrammarBuilder(MyWord); Grammar g = new Grammar(gb); //TODO the hard letters to recognize are 'g' and 'e' //SrgsItem si = new SrgsItem(); //SrgsToken s = new SrgsToken("am"); //s.Pronunciation = "AE M"; //si.Add(s); //s = new SrgsToken(MyWord); //si.Add(s); //SrgsRule sr = new SrgsRule("x", si); //SrgsDocument sd = new SrgsDocument(sr); //sd.PhoneticAlphabet = SrgsPhoneticAlphabet.Ups; //Grammar g1 = new Grammar(sd); //Grammar g = new DictationGrammar(); SpeechRecognitionEngine reco = new SpeechRecognitionEngine(); if (reco == null) { MessageBox.Show("Could not open speech recognition engine."); return(""); } reco.SpeechHypothesized += new EventHandler <SpeechHypothesizedEventArgs>(reco_SpeechHypothesized); reco.SpeechRecognitionRejected += new EventHandler <SpeechRecognitionRejectedEventArgs>(reco_SpeechRecognitionRejected); reco.UnloadAllGrammars(); //only use the one word grammar reco.LoadGrammar(g); reco.SetInputToWaveStream(audioStream); RecognitionResult rr = reco.Recognize(); reco.SetInputToNull(); if (rr != null) { recoPhonemes = StringFromWordArray(rr.Words, WordType.Pronunciation); } //custom pronunciations don't seem to work so here are patches recoPhonemes = recoPhonemes.Replace("e͡iɛm", "æm"); return(recoPhonemes); } } }
private void InitializeSpeechRecognition() { RecognizerInfo ri = GetKinectRecognizer(); if (ri == null) { MessageBox.Show( @"There was a problem initializing Speech Recognition. Ensure you have the Microsoft Speech SDK installed.", "Failed to load Speech SDK", MessageBoxButton.OK, MessageBoxImage.Error); return; } try { speechRecognizer = new SpeechRecognitionEngine(ri.Id); } catch { MessageBox.Show( @"There was a problem initializing Speech Recognition. Ensure you have the Microsoft Speech SDK installed and configured.", "Failed to load Speech SDK", MessageBoxButton.OK, MessageBoxImage.Error); } var firstword = new Choices(); foreach (string value in firstWord.Keys) { firstword.Add(value); } GrammarBuilder firstword1 = new Choices(new string[] { "seir" }); firstword1.Append("surita"); firstword1.Append("fimm"); firstword1.Append("groenn"); firstword1.Append("vindr"); GrammarBuilder firstword2 = new Choices(new string[] { "oss" }); firstword2.Append("naeo"); firstword2.Append("fyor"); firstword2.Append("regin"); firstword2.Append("tinada"); firstword2.Append("varindo"); firstword2.Append("yotsun"); GrammarBuilder exit = new Choices(new string[] { "nox" }); exit.Append("eterna"); Choices first = new Choices(); first.Add(new Choices(new GrammarBuilder[] { firstword1, firstword2, exit })); var gb = new GrammarBuilder(); gb.Culture = ri.Culture; gb.Append(first); var g = new Grammar(gb); speechRecognizer.LoadGrammar(g); speechRecognizer.SpeechRecognized += speechRecognizer_SpeechRecognized; speechRecognizer.SpeechHypothesized += speechRecognizer_SpeechHypothesized; speechRecognizer.SpeechRecognitionRejected += speechRecognizer_SpeechRecognitionRejected; if (Kinect == null || speechRecognizer == null) { return; } var audioSource = this.Kinect.AudioSource; audioSource.BeamAngleMode = BeamAngleMode.Adaptive; var kinectStream = audioSource.Start(); speechRecognizer.SetInputToAudioStream( kinectStream, new SpeechAudioFormatInfo(EncodingFormat.Pcm, 16000, 16, 1, 32000, 2, null)); speechRecognizer.RecognizeAsync(RecognizeMode.Multiple); }
/// <summary> /// 初始化語音辨識 /// </summary> private void InitalizeSpeechRecognition() { // grab the audio stream IReadOnlyList <AudioBeam> audioBeamList = this._sensor.AudioSource.AudioBeams; System.IO.Stream audioStream = audioBeamList[0].OpenInputStream(); // create the convert stream this.convertStream = new KinectAudioStream(audioStream); //RecognizerInfo RecognizerInfo recognizerInfo = RecognizerSelection.TryGetKinectRecognizer(); if (null != recognizerInfo) { //Using KinectRecognizer(); this.speechEngine = new SpeechRecognitionEngine(recognizerInfo.Id); var grammarBuilder = new GrammarBuilder { Culture = recognizerInfo.Culture }; //把語音字典放進去 grammarBuilder.Append(vocabulary.Speech_Dictionary); //Grammar var grammar = new Grammar(grammarBuilder); //載入文法 this.speechEngine.LoadGrammar(grammar); // let the convertStream know speech is going active this.convertStream.SpeechActive = true; // For long recognition sessions (a few hours or more), it may be beneficial to turn off adaptation of the acoustic model. // This will prevent recognition accuracy from degrading over time. speechEngine.UpdateRecognizerSetting("AdaptationOn", 0); try { if (MicrophoneSetting.Microphone_Status) { this.speechEngine.SetInputToDefaultAudioDevice(); } else { this.speechEngine.SetInputToAudioStream(this.convertStream, new SpeechAudioFormatInfo(EncodingFormat.Pcm, 16000, 16, 1, 32000, 2, null)); } } catch (Exception e) { Console.WriteLine("輸入設備問題" + e, ToString()); } this.speechEngine.RecognizeAsync(RecognizeMode.Multiple); //語音辨識事件,語音辨識拒絕事件 this.speechEngine.SpeechRecognized += SpeechRecognized; this.speechEngine.SpeechRecognitionRejected += SpeechRejected; } }
static void Main(string[] args) { try { pTTS.SetOutputToDefaultAudioDevice(); pTTS.Speak("Witam w kalkulatorze"); // Ustawienie języka rozpoznawania: CultureInfo ci = new CultureInfo("pl-PL"); // Utworzenie "silnika" rozpoznawania: pSRE = new SpeechRecognitionEngine(ci); // Ustawienie domyślnego urządzenia wejściowego: pSRE.SetInputToDefaultAudioDevice(); // Przypisanie obsługi zdarzenia realizowanego po rozpoznaniu wypowiedzi zgodnej z gramatyką: pSRE.SpeechRecognized += PSRE_SpeechRecognized; // ------------------------------------------------------------------------- // Budowa gramatyki numer 1 - POLECENIA SYSTEMOWE // Budowa gramatyki numer 1 - określenie komend: Choices stopChoice = new Choices(); stopChoice.Add("Stop"); stopChoice.Add("Pomoc"); // Budowa gramatyki numer 1 - definiowanie składni gramatyki: GrammarBuilder buildGrammarSystem = new GrammarBuilder(); buildGrammarSystem.Append(stopChoice); // Budowa gramatyki numer 1 - utworzenie gramatyki: Grammar grammarSystem = new Grammar(buildGrammarSystem); // // ------------------------------------------------------------------------- // Budowa gramatyki numer 2 - POLECENIA DLA PROGRAMU // Budowa gramatyki numer 2 - określenie komend: Choices chNumbers = new Choices(); //możliwy wybór słów string[] numbers = new string[] { "0", "1", "2", "3", "4", "5", "6", "7", "8", "9" }; chNumbers.Add(numbers); // Budowa gramatyki numer 2 - definiowanie składni gramatyki: GrammarBuilder grammarProgram = new GrammarBuilder(); grammarProgram.Append("Oblicz"); grammarProgram.Append(chNumbers); grammarProgram.Append("plus"); grammarProgram.Append(chNumbers); // Budowa gramatyki numer 2 - utworzenie gramatyki: Grammar g_WhatIsXplusY = new Grammar(grammarProgram); //gramatyka // ------------------------------------------------------------------------- // Załadowanie gramatyk: pSRE.LoadGrammarAsync(g_WhatIsXplusY); pSRE.LoadGrammarAsync(grammarSystem); // Ustaw rozpoznawanie przy wykorzystaniu wielu gramatyk: pSRE.RecognizeAsync(RecognizeMode.Multiple); // ------------------------------------------------------------------------- Console.WriteLine("\nAby zakonczyć działanie programu powiedz 'STOP'\n"); while (speechOn == true) { ; } //pętla w celu uniknięcia zamknięcia programu Console.WriteLine("\tWCIŚNIJ <ENTER> aby wyjść z programu\n"); Console.ReadLine(); } catch (Exception ex) { Console.WriteLine(ex.Message); Console.ReadLine(); } }
public ChangeAdditives() { InitializeComponent(); int count = 0; AdditiveList = new List <string>() { "wołowina", "kiełbasa pepperoni", "zielona papryka", "pieczarki", "cebula", "szynka", "ananas", "kurczak", "pomidor" }; Sre = new SpeechRecognitionEngine(new System.Globalization.CultureInfo("pl-PL")); for (int i = 0; i < GridAdditives.RowDefinitions.Count; i++) { for (int j = 0; j < GridAdditives.ColumnDefinitions.Count; j++) { ToggleButton additiveButton = new ToggleButton { Content = AdditiveList.ElementAt(count), Margin = new Thickness(10, 10, 10, 10), }; additiveButton.Checked += Btn_Check; additiveButton.Unchecked += Btn_Uncheck; foreach (var additive in HomePage.OrderList.ElementAt(HomePage.OnTheList).AdditivesList) { if (additive == additiveButton.Content.ToString()) { additiveButton.IsChecked = true; } } AdditiveButtonsList.Add(additiveButton); Grid.SetColumn(additiveButton, j); Grid.SetRow(additiveButton, i); GridAdditives.Children.Add(additiveButton); count++; } } var additiveWordsList = new string[AdditiveList.Count + 1]; for (var index = 0; index < AdditiveList.Count; index++) { additiveWordsList[index] = AdditiveList.ElementAt(index); } additiveWordsList[AdditiveList.Count] = "Dalej"; // MICROSOFT SPEECH PLATFORM try { Sre.SetInputToDefaultAudioDevice(); Sre.SpeechRecognized += new EventHandler <SpeechRecognizedEventArgs>(Sre_SpeechRecognized); Choices words = new Choices(additiveWordsList); GrammarBuilder gramBuild = new GrammarBuilder(); gramBuild.Append(words); Grammar gramSre = new Grammar(gramBuild); Sre.LoadGrammar(gramSre); Sre.RecognizeAsync(RecognizeMode.Multiple); } catch (Exception ex) { Console.WriteLine(ex.Message); Console.ReadLine(); } // MICROSOFT SPEECH PLATFORM }
/* This function is responsible for setting up the hueConnect on startup as well as voice commands */ private void InitHueSystem() { /* Get the light list for use later */ controller.GetLights(); LightList = controller.GetLightList(); /* Get the group list for use later */ controller.GetGroups(); GroupList = controller.GetGroupList(); /* Put the color choices in the textbox */ for (int i = 0; i < AllColors.ColorChoices.Length; i++) { textBoxColorChoices.Text += AllColors.ColorChoices[i] + Environment.NewLine; } /* Print the name of the lights to the textbox */ for (int i = 0; i < LightList.Count; i++) { textBoxLightNames.Text += LightList[i].Name + Environment.NewLine; } /* Print the name of the groups to the textbox */ for (int i = 0; i < GroupList.Count; i++) { textBoxGroupNames.Text += GroupList[i].Name + Environment.NewLine; } /* Start the speech recognition setup */ try { /* Set the text to speech to the default output */ ss.SetOutputToDefaultAudioDevice(); /* Set up the recognition engine */ CultureInfo info = new CultureInfo("en-us"); sre = new SpeechRecognitionEngine(info); sre.SetInputToDefaultAudioDevice(); sre.SpeechRecognized += Sre_SpeechRecognized; /* Get the names of the lights and put them in the choices variable */ lightNames = new Choices(); for (int i = 0; i < LightList.Count; i++) { lightNames.Add(LightList[i].Name); } /* Get the names of the groups and put them in the choices variable */ groupNames = new Choices(); for (int i = 0; i < GroupList.Count; i++) { groupNames.Add(GroupList[i].Name); } /* Get the names of the colors and put them in the choices variable */ lightColors = new Choices(); for (int i = 0; i < AllColors.ColorChoices.Count(); i++) { lightColors.Add(AllColors.ColorChoices[i]); } /* Range of numbers for brightness */ brightnessRange = new Choices(); for (int i = 1; i < 101; i++) { brightnessRange.Add(i.ToString()); } /* Range of numbers for saturation */ saturationRange = new Choices(); for (int i = 0; i < 101; i++) { saturationRange.Add(i.ToString()); } /* For turning lights on/off and setting colorloop on/off */ OnOff = new Choices(); OnOff.Add("on"); OnOff.Add("off"); /* Start building the grammar variables */ /* For turning lights on or off * EX: Turn light 1 on/off */ gb_TurnLightOnOff = new GrammarBuilder(); gb_TurnLightOnOff.Append("Turn"); gb_TurnLightOnOff.Append(lightNames); gb_TurnLightOnOff.Append(OnOff); g_TurnLightsOnOff = new Grammar(gb_TurnLightOnOff); /* For turning all lights on or off * EX: Turn all on/off */ gb_TurnAllOnOff = new GrammarBuilder(); gb_TurnAllOnOff.Append("Turn all"); gb_TurnAllOnOff.Append(OnOff); g_TurnAllOnOff = new Grammar(gb_TurnAllOnOff); /* For changing a light color * EX: Change light 1 color blue */ gb_changeColor = new GrammarBuilder(); gb_changeColor.Append("Change"); gb_changeColor.Append(lightNames); gb_changeColor.Append("color"); gb_changeColor.Append(lightColors); g_changeColor = new Grammar(gb_changeColor); /* For changing all lights color * EX: Change all lights color blue */ gb_changeAllColors = new GrammarBuilder(); gb_changeAllColors.Append("Change all lights color"); gb_changeAllColors.Append(lightColors); g_changeAllColors = new Grammar(gb_changeAllColors); /* For changing the brightness of a light * EX: Change light 1 brightness 100 percent */ gb_changeBri = new GrammarBuilder(); gb_changeBri.Append("Change"); gb_changeBri.Append(lightNames); gb_changeBri.Append("brightness"); gb_changeBri.Append(brightnessRange); gb_changeBri.Append("percent"); g_changeBri = new Grammar(gb_changeBri); /* For changing the saturation of a light * EX: Change light 1 saturation 100 percent */ gb_changeSat = new GrammarBuilder(); gb_changeSat.Append("Change"); gb_changeSat.Append(lightNames); gb_changeSat.Append("saturation"); gb_changeSat.Append(saturationRange); gb_changeSat.Append("percent"); g_changeSat = new Grammar(gb_changeSat); /* For turning on/off colorloop * EX: Color cycle on/off */ gb_colorloop = new GrammarBuilder(); gb_colorloop.Append("Color cycle"); gb_colorloop.Append(OnOff); g_colorLoop = new Grammar(gb_colorloop); /* For turning a single group on/off * EX: Turn group groupname on/off */ gb_turnGroupOnOff = new GrammarBuilder(); gb_turnGroupOnOff.Append("Turn group"); gb_turnGroupOnOff.Append(groupNames); gb_turnGroupOnOff.Append(OnOff); g_turnGroupOnOff = new Grammar(gb_turnGroupOnOff); /* For changing the color of a group * EX: Change group groupname color blue */ gb_changeGroupColor = new GrammarBuilder(); gb_changeGroupColor.Append("Change group"); gb_changeGroupColor.Append(groupNames); gb_changeGroupColor.Append("color"); gb_changeGroupColor.Append(lightColors); g_changeGroupColor = new Grammar(gb_changeGroupColor); /* For changing the color of a group * EX: Change group groupname brightness 100 percent */ gb_changeGroupBrightness = new GrammarBuilder(); gb_changeGroupBrightness.Append("Change group"); gb_changeGroupBrightness.Append(groupNames); gb_changeGroupBrightness.Append("brightness"); gb_changeGroupBrightness.Append(brightnessRange); gb_changeGroupBrightness.Append("percent"); g_changeGroupBrightness = new Grammar(gb_changeGroupBrightness); /* Load all grammars into the engine */ sre.LoadGrammar(g_TurnLightsOnOff); sre.LoadGrammar(g_changeColor); sre.LoadGrammar(g_changeBri); sre.LoadGrammar(g_changeSat); sre.LoadGrammar(g_colorLoop); sre.LoadGrammar(g_TurnAllOnOff); sre.LoadGrammar(g_changeAllColors); sre.LoadGrammar(g_changeGroupColor); sre.LoadGrammar(g_turnGroupOnOff); sre.LoadGrammar(g_changeGroupBrightness); /* Start the recognition */ sre.RecognizeAsync(RecognizeMode.Multiple); } catch (Exception e) { } }
private SpeechRecognitionEngine CreateSpeechRecognizer() { RecognizerInfo ri = GetKinectRecognizer(); if (ri == null) { System.Windows.MessageBox.Show( @"There was a problem initializing Speech Recognition. Ensure you have the Microsoft Speech SDK installed.", "Failed to load Speech SDK", MessageBoxButton.OK, MessageBoxImage.Error); this.Close(); return null; } //SpeechRecognitionEngine sre; try { speechRecognizer = new SpeechRecognitionEngine(ri.Id); //speechRecognizer = SelectRecognizer("Kinect"); } catch { System.Windows.MessageBox.Show( @"There was a problem initializing Speech Recognition. Ensure you have the Microsoft Speech SDK installed and configured.", "Failed to load Speech SDK", MessageBoxButton.OK, MessageBoxImage.Error); this.Close(); return null; } var grammar = new Choices(); grammar.Add("start"); grammar.Add("pick"); grammar.Add("select"); grammar.Add("a"); grammar.Add("navigate"); grammar.Add("b"); grammar.Add("move"); grammar.Add("stop"); grammar.Add("hold"); grammar.Add("synchronize"); var gb = new GrammarBuilder {}; gb.Append(grammar); // Create the actual Grammar instance, and then load it into the speech recognizer. var g = new Grammar(gb); speechRecognizer.LoadGrammar(g); speechRecognizer.SpeechRecognized += this.SreSpeechRecognized; speechRecognizer.SpeechHypothesized += SreSpeechHypothesized; speechRecognizer.SpeechRecognitionRejected += SreSpeechRecognitionRejected; kinectSource = kinectSensorT.AudioSource; kinectSource.BeamAngleMode = BeamAngleMode.Adaptive; kinectSource.EchoCancellationMode = EchoCancellationMode.None; // No AEC for this sample kinectSource.AutomaticGainControlEnabled = false; // Important to turn this off for speech recognition st = kinectSource.Start(); //System.Windows.MessageBox.Show("audio source started"); speechRecognizer.SetInputToAudioStream(st, new SpeechAudioFormatInfo( EncodingFormat.Pcm, 16000, 16, 1, 32000, 2, null)); speechRecognizer.RecognizeAsync(RecognizeMode.Multiple); return speechRecognizer; }
/// <summary> /// Allows the game to perform any initialization it needs to before starting to run. /// This is where it can query for any required services and load any non-graphic /// related content. Calling base.Initialize will enumerate through any components /// and initialize them as well. /// </summary> protected override void Initialize() { // TODO: Add your initialization logic here //initialize Kinect SDK nui = Runtime.Kinects[0]; nui.Initialize(RuntimeOptions.UseDepthAndPlayerIndex | RuntimeOptions.UseSkeletalTracking); nui.SkeletonEngine.TransformSmooth = true; TransformSmoothParameters param = new TransformSmoothParameters(); param.Smoothing = 0.2f; param.Correction = 0.0f; param.Prediction = 0.0f; param.JitterRadius = 0.2f; param.MaxDeviationRadius = 0.3f; nui.SkeletonEngine.SmoothParameters = param; nui.SkeletonFrameReady += new EventHandler <SkeletonFrameReadyEventArgs>(nui_SkeletonFrameReady); //setup audio RecognizerInfo ri = GetKinectRecognizer(); if (ri == null) { Console.WriteLine("Could not find Kinect speech recognizer. Please refer to the sample requirements."); return; } Console.WriteLine("Using: {0}", ri.Name); sre = new SpeechRecognitionEngine(); var models = new Choices(); models.Add("lander"); models.Add("flowers"); models.Add("car"); var gb = new GrammarBuilder(); //Specify the culture to match the recognizer in case we are running in a different culture. gb.Culture = ri.Culture; gb.Append(models); // Create the actual Grammar instance, and then load it into the speech recognizer. var g = new Grammar(gb); sre.LoadGrammar(g); sre.SpeechRecognized += SreSpeechRecognized; sre.SpeechHypothesized += SreSpeechHypothesized; sre.SpeechRecognitionRejected += SreSpeechRecognitionRejected; source = new KinectAudioSource(); source.FeatureMode = true; source.AutomaticGainControl = false; source.SystemMode = SystemMode.OptibeamArrayOnly; Stream s = source.Start(); sre.SetInputToAudioStream(s, new SpeechAudioFormatInfo(EncodingFormat.Pcm, 16000, 16, 1, 32000, 2, null)); Console.WriteLine("Recognizing. Say: 'red', 'green' or 'blue'. Press ENTER to stop"); sre.RecognizeAsync(RecognizeMode.Multiple); handPos[0] = new Vector3(0.0f, 0.0f, 0.0f); handPos[1] = new Vector3(0.0f, 0.0f, 0.0f); //set up angles shiftAngleRemote1 = shiftAngleLocal + 115.3733f; shiftAngleRemote2 = shiftAngleLocal - 93.9733f; //initialize hand angles prevHandYAngle = 0.0f; prevHandXAngle = 0.0f; //for sphere distortion = new Distortion(Vector3.Zero, Vector3.Backward, Vector3.Up, 1, 1); //for cylinder //distortion = new Distortion(Vector3.Zero, Vector3.Backward, Vector3.Up, 1, 1, distortionShift, 4); View = Matrix.CreateLookAt(new Vector3(0, 0, 2), Vector3.Zero, Vector3.Up); Projection = Matrix.CreatePerspectiveFieldOfView( MathHelper.PiOver4, 16.0f / 9.0f, 1, 500); //setupOSC(); base.Initialize(); }
public void button1_Click(object sender, EventArgs e) { button1.Enabled = false; button1.Text = "God Called"; label2.Text = "The god is listening..."; label2.ForeColor = Color.Red; SpeechRecognitionEngine GodListener = new SpeechRecognitionEngine(); Choices GodList = new Choices(); GodList.Add(new string[] { "Make toast", "Make me toast", "Make me some toast", "Make me immortal", "Make rain", "call rain", "call the rain", "make it rain", "wink out of existence", "begone", "go now", "wink yourself out of existence" }); GrammarBuilder gb = new GrammarBuilder(); gb.Append(GodList); Grammar GodGrammar = new Grammar(gb); GodListener.MaxAlternates = 2; try { GodListener.RequestRecognizerUpdate(); GodListener.LoadGrammar(GodGrammar); GodListener.SetInputToDefaultAudioDevice(); GodListener.SpeechRecognized += GodListener_SpeechRecognized; GodListener.AudioStateChanged += GodListener_AudioStateChanged; GodListener.AudioLevelUpdated += GodListener_AudioLevelUpdated; GodListener.RecognizeAsync(RecognizeMode.Multiple); } catch { return; } }