public SpeechRecognizerServer(string moduleName)
        {
            System.Collections.ObjectModel.ReadOnlyCollection<RecognizerInfo> installedRecognizers = SpeechRecognitionEngine.InstalledRecognizers();

            //Synchronous Recognition
            m_reco = new System.Speech.Recognition.SpeechRecognitionEngine(myLanguage);

            Network.init();
            m_moduleName = moduleName;

            //TTS
            m_tts = new System.Speech.Synthesis.SpeechSynthesizer();
            m_portISpeak = new Port();
            m_portISpeak.open("/" + moduleName + "/tts/iSpeak:o");
            Network.connect("/" + moduleName + "/tts/iSpeak:o", "/iSpeak");

            //Grammars
            GrammarBuilder dictation = new GrammarBuilder();
            dictation.Culture = myLanguage;
            dictation.AppendDictation();
            m_grammar_dictation = new Grammar(dictation);
            GrammarBuilder spelling = new GrammarBuilder();
            spelling.Culture = myLanguage;
            spelling.AppendDictation("spelling");
            m_grammar_dictation_spelling = new Grammar(spelling);
            m_grammar_continuous = new GrammarBuilder("For sure this non empty grammar will never be recognized.");

            m_reco.SetInputToDefaultAudioDevice();
            m_reco.LoadGrammar(m_grammar_dictation);

            //Continuous Recognition
            m_reco_continuous = new SpeechRecognitionEngine();
            m_reco_continuous.SetInputToDefaultAudioDevice();
            m_portContinuousRecognition = new Port();
            m_portContinuousRecognition.open("/" + moduleName + "/recog/continuous:o");
            m_reco_continuous.LoadGrammar(new Grammar(m_grammar_continuous));
            m_reco_continuous.RecognizeCompleted += onContinuousRecognitionResult;
            m_reco_continuous.RecognizeAsync();

            m_grammarManager = new RobotGrammarManager();
            m_grammarManager.InitialiseVocabulories();
            SetLanguage("EN-us");
            //SetLanguage("fr-fr");

            Console.WriteLine("#########################");
            Console.WriteLine("#    Speech Recognizer  #");
            Console.WriteLine("#########################");

            Network.init();
            m_rpcPort = new Port();
            m_rpcPort.open("/" + m_moduleName + "/rpc");
            m_rpcThread = new System.Threading.Thread(HandleRPC);
            m_rpcThread.Start();
        }
Beispiel #2
0
        public Form1()
        {
            Choices colors = new Choices();
            colors.Add(new string[] { "how are you", "fine", "blue" });

            GrammarBuilder gb = new GrammarBuilder();
            gb.Append(colors);

            // Create the Grammar instance.
            System.Speech.Recognition.Grammar g = new System.Speech.Recognition.Grammar(gb);
            sr.LoadGrammar(g);
            sr.SpeechRecognized += new EventHandler<SpeechRecognizedEventArgs>(sr_SpeechRecognized);

            InitializeComponent();
        }
Beispiel #3
0
        public Form1()
        {
            Choices colors = new Choices();

            colors.Add(new string[] { "how are you", "fine", "blue" });

            GrammarBuilder gb = new GrammarBuilder();

            gb.Append(colors);

            // Create the Grammar instance.
            System.Speech.Recognition.Grammar g = new System.Speech.Recognition.Grammar(gb);
            sr.LoadGrammar(g);
            sr.SpeechRecognized += new EventHandler <SpeechRecognizedEventArgs>(sr_SpeechRecognized);

            InitializeComponent();
        }
Beispiel #4
0
        private static void wreck_a_nice_beach()
        {
            var sre = new SSR.SpeechRecognitionEngine();
            sre.SetInputToDefaultAudioDevice();
            sre.UnloadAllGrammars();

            var gb1 = new SSR.GrammarBuilder();
            gb1.Append(new SSR.Choices("cut", "copy", "paste", "delete", "quit"));


            var g1 = new SSR.Grammar(gb1);
            sre.LoadGrammar(g1);

            sre.SpeechRecognized += SreOnSpeechRecognized;
            sre.SpeechDetected += SreOnSpeechDetected;
            sre.SpeechHypothesized += SreOnSpeechHypothesized;
            sre.SpeechRecognitionRejected += SreOnSpeechRecognitionRejected;
            sre.AudioSignalProblemOccurred += SreOnAudioSignalProblemOccurred;

            sre.RecognizeAsync(SSR.RecognizeMode.Multiple);
        }
Beispiel #5
0
        private void loadGrammar()
        {
            sre.UnloadAllGrammars();
            htWords.Clear();
            StreamReader sr = File.OpenText(strGrammarFile);
            int icnt = 0;
            while (!sr.EndOfStream && icnt <10000)
            {
                string strLine = sr.ReadLine();
                if (strLine != "")
                {
                    SSR.GrammarBuilder gb = new System.Speech.Recognition.GrammarBuilder();
                    gb.Append(strLine);
                    SSR.Grammar gram = new System.Speech.Recognition.Grammar(gb);
                    sre.LoadGrammar(gram);

                    htWords.Add(htWords.Count, strLine.ToLower());
                }
                icnt++;
            }
        }
Beispiel #6
0
        private void loadGrammar()
        {
            sre.UnloadAllGrammars();
            htWords.Clear();
            StreamReader sr   = File.OpenText(strGrammarFile);
            int          icnt = 0;

            while (!sr.EndOfStream && icnt < 10000)
            {
                string strLine = sr.ReadLine();
                if (strLine != "")
                {
                    SSR.GrammarBuilder gb = new System.Speech.Recognition.GrammarBuilder();
                    gb.Append(strLine);
                    SSR.Grammar gram = new System.Speech.Recognition.Grammar(gb);
                    sre.LoadGrammar(gram);

                    htWords.Add(htWords.Count, strLine.ToLower());
                }
                icnt++;
            }
        }
Beispiel #7
0
        private static void wreck_a_nice_beach()
        {
            var sre = new SSR.SpeechRecognitionEngine();

            sre.SetInputToDefaultAudioDevice();
            sre.UnloadAllGrammars();

            var gb1 = new SSR.GrammarBuilder();

            gb1.Append(new SSR.Choices("cut", "copy", "paste", "delete", "quit"));


            var g1 = new SSR.Grammar(gb1);

            sre.LoadGrammar(g1);

            sre.SpeechRecognized           += SreOnSpeechRecognized;
            sre.SpeechDetected             += SreOnSpeechDetected;
            sre.SpeechHypothesized         += SreOnSpeechHypothesized;
            sre.SpeechRecognitionRejected  += SreOnSpeechRecognitionRejected;
            sre.AudioSignalProblemOccurred += SreOnAudioSignalProblemOccurred;

            sre.RecognizeAsync(SSR.RecognizeMode.Multiple);
        }
        private void SpeechToText()
        {
            // Configure the input to the recognizer.
            sre.SetInputToDefaultAudioDevice();

            // Create a simple grammar that recognizes "red", "green", or "blue".
            recog.Choices queries = new recog.Choices();
            queries.Add(dictionary);

            // Create a GrammarBuilder object and append the Choices object.
            recog.GrammarBuilder gb = new recog.GrammarBuilder();
            gb.Append(queries);

            // Create the Grammar instance and load it into the speech recognition engine.
            recog.Grammar g = new recog.Grammar(gb);
            sre.LoadGrammar(g);

            // Register a handler for the SpeechRecognized event.
            sre.SpeechRecognized += new EventHandler<recog.SpeechRecognizedEventArgs>(sre_SpeechRecognized);

            // Start recognition.
            sre.Recognize();
        }
 private sp.Grammar GeneratePauseGrammar()
 {
     sp.Choices choices = new sp.Choices();
     foreach (string pausePhrase in this.currentProfile.PauseRecognitionPhrases)
     {
         sp.SemanticResultValue temp = new sp.SemanticResultValue(pausePhrase, "PauseVoiceRecognitionCommand");
         choices.Add(temp);
     }
     foreach (string unpausePhrase in this.currentProfile.UnpauseRecognitionPhrases)
     {
         sp.SemanticResultValue temp = new sp.SemanticResultValue(unpausePhrase, "UnpauseVoiceRecognitionCommand");
         choices.Add(temp);
     }
     sp.GrammarBuilder builder = new sp.GrammarBuilder();
     builder.Append(new sp.SemanticResultKey("command", choices));
     sp.Grammar grammar = new sp.Grammar(builder);
     grammar.Name = "PauseCommands";
     return grammar;
 }
        public void LoadProfile(Profile profile)
        {
            this.currentProfile = profile;
            if (this.actions != null)
                this.actions.Clear();

            this.currentProfile.UpdateGrammar();

            List<UpdateOperation> ops = new List<UpdateOperation>();
            foreach (sp.Grammar gram in this.engine.Grammars)
            {
                ops.Add(new UpdateOperation()
                {
                    UpdateType = UpdateOperationType.RemoveGrammar,
                    Grammar = gram
                });
            }
            ops.Add(new UpdateOperation()
            {
                UpdateType = UpdateOperationType.AddGrammar,
                Grammar = this.currentProfile.Grammar,
                AssociatedActions = this.currentProfile.Actions
            });

            this.pauseGrammar = null;
            if (this.currentProfile.EnableVoicePausing)
            {
                this.pauseGrammar = GeneratePauseGrammar();
                ops.Add(new UpdateOperation()
                {
                    UpdateType = UpdateOperationType.AddGrammar,
                    Grammar = pauseGrammar
                });
            }

            this.ExecuteGrammarChanges(ops);
        }
        public bool SetLanguage(string cultureName)
        {
            //System.Globalization.CultureInfo[] cultures = System.Globalization.CultureInfo.GetCultures(System.Globalization.CultureTypes.AllCultures);
            System.Globalization.CultureInfo culture;
            try
            {
                culture = System.Globalization.CultureInfo.GetCultureInfoByIetfLanguageTag(cultureName);
            }
            catch
            {
                Console.WriteLine("Culture info is not found.");
                return false;
            }
            myLanguage = culture;

            System.Collections.ObjectModel.ReadOnlyCollection<InstalledVoice> voices = m_tts.GetInstalledVoices(culture);
            if (voices.Count > 0)
                m_tts.SelectVoice(voices.First().VoiceInfo.Name);

            m_reco = new System.Speech.Recognition.SpeechRecognitionEngine(culture);

            m_reco.SetInputToDefaultAudioDevice();
            GrammarBuilder dictation = new GrammarBuilder();
            dictation.Culture = myLanguage;
            dictation.AppendDictation();
            m_grammar_dictation = new Grammar(dictation);

            m_reco.LoadGrammar(m_grammar_dictation);

            m_reco_continuous.RecognizeCompleted -= onContinuousRecognitionResult;
            m_reco_continuous.RecognizeAsyncCancel();
            //m_reco_continuous.RecognizeAsyncStop();
            m_reco_continuous = new SpeechRecognitionEngine(culture);
            m_reco_continuous.SetInputToDefaultAudioDevice();
            m_grammar_continuous.Culture = culture;
            m_reco_continuous.LoadGrammar(new Grammar(m_grammar_continuous));
            m_reco_continuous.RecognizeCompleted += onContinuousRecognitionResult;
            m_reco_continuous.RecognizeAsync();

            m_grammarManager.SetLanguage(cultureName);

            Console.WriteLine("The culture has been set to " + cultureName);
            return true;
        }
        public RecognitionResult Recog_Grammar_XML(string grammarDef, int timeout)
        {
            Console.WriteLine("Recognition XML loaded.");
            string xmlFormattedGrammar = "<?xml version=\"1.0\" encoding=\"ISO-8859-1\"?> <grammar xmlns=\"http://www.w3.org/2001/06/grammar\" xml:lang=\""+m_reco.RecognizerInfo.Culture.Name+"\" version=\"1.0\">";
            //grammarDef += "<rule id=\"request\" scope=\"public\">Je veux acheter de la <one-of> <item>vodka</item><item>tequila</item></one-of></rule>";
            xmlFormattedGrammar += grammarDef;
            xmlFormattedGrammar += "</grammar>";

            byte[] byteArray = Encoding.ASCII.GetBytes(xmlFormattedGrammar);
            MemoryStream stream = new MemoryStream( byteArray );
            m_grammar_simple = new Grammar(stream, "request");
            m_reco.UnloadAllGrammars();
            m_reco.LoadGrammar(m_grammar_simple);

            RecognitionResult result = m_reco.Recognize(TimeSpan.FromMilliseconds(timeout));
            if (result != null)
                Console.WriteLine("Recognized : " + result.Text);
            else
                Console.WriteLine("Recognize Failure.");
            return result;
        }
        public RecognitionResult Recog_Choices(List<string> choices, int timeout)
        {
            Console.Write("Recognition Choices Grammar loaded : ");

            foreach (string s in choices)
                Console.Write(s);
            Console.WriteLine();
            Choices c = new Choices(choices.ToArray());
            GrammarBuilder cBuild = c.ToGrammarBuilder();
            cBuild.Culture = m_reco.RecognizerInfo.Culture;
            m_reco.UnloadAllGrammars();
            m_grammar_simple = new Grammar(cBuild);

            m_reco.LoadGrammar(m_grammar_simple);
            RecognitionResult result = m_reco.Recognize(TimeSpan.FromMilliseconds(timeout));
            if (result != null)
                Console.WriteLine("Recognized : " + result.Text);
            else
                Console.WriteLine("Recognize Failure.");
            return result;
        }
 public void UnloadGrammar(Grammar grammar)
 {
     RecoBase.UnloadGrammar(grammar);
 }
 /// <filterpriority>2</filterpriority>
 public void LoadGrammarAsync(Grammar grammar)
 {
     RecoBase.LoadGrammarAsync(grammar);
 }
 public void LoadGrammar(Grammar grammar)
 {
     RecoBase.LoadGrammar(grammar);
 }