Exemple #1
0
        private void sre_SpeechRecognized(object sender, SpeechRecognizedEventArgs e)
        {
            Microsoft.Speech.Synthesis.SpeechSynthesizer ms = new Microsoft.Speech.Synthesis.SpeechSynthesizer();
            ms.SetOutputToDefaultAudioDevice();
            ms.SelectVoice("Microsoft Server Speech Text to Speech Voice (ko-KR, Heami)");
            switch (e.Result.Text)
            {
            case "마마고":
            {
                MaMaState(true);
                ms.Speak("네 말씀하세요");

                sre2 = new SpeechRecognitionEngine(new CultureInfo("ko-KR"));

                Choices c = new Choices();
                c.Add(new string[] { "입력해줘", "아니야", "영어로번역해줘", "한국어로번역해줘", "일본어로번역해줘",
                                     "독일어로번역해줘", "스페인어로번역해줘", "프랑스어로번역해줘",
                                     "러시아어로번역해줘", "이탈리아어로번역해줘", "중국어번체로번역해줘", "중국어간체로번역해줘" });
                Microsoft.Speech.Recognition.Grammar g = new Microsoft.Speech.Recognition.Grammar(new GrammarBuilder(c));
                sre2.LoadGrammar(g);

                sre2.RequestRecognizerUpdate();
                sre2.SpeechRecognized += doWhat;
                sre2.SetInputToDefaultAudioDevice();
                sre2.RecognizeAsync(RecognizeMode.Multiple);
                break;
            }
            }
            ms.Dispose();
            return;
        }
Exemple #2
0
        static void Main(string[] args)
        {
            var keyphrasesFile = "keyphrases.txt";

            if (args.Length != 0)
            {
                keyphrasesFile = args[0];
            }

            RecognizerInfo info = null;

            foreach (var ri in SpeechRecognitionEngine.InstalledRecognizers())
            {
                if (ri.Culture.TwoLetterISOLanguageName.Equals("en"))
                {
                    info = ri;
                    break;
                }
            }
            if (info == null)
            {
                return;
            }

            using (_recognizer = new SpeechRecognitionEngine(info))
            {
                var keyphrases = new Choices(getChoices(keyphrasesFile));


                var gb = new GrammarBuilder(keyphrases)
                {
                    Culture = info.Culture
                };

                // Create the Grammar instance.
                var g = new Grammar(gb)
                {
                    Name = "Keyphrases"
                };

                _recognizer.RequestRecognizerUpdate();
                _recognizer.LoadGrammar(g);
                _recognizer.SpeechRecognized          += recognizer_SpeechRecognized;
                _recognizer.SpeechRecognitionRejected += recognizer_SpeechNotRecognized;
                _recognizer.SetInputToDefaultAudioDevice();
                _recognizer.RecognizeAsync(RecognizeMode.Multiple);
                while (true)
                {
                    Console.ReadLine();
                }
            }
        }
Exemple #3
0
        private void Gramatica()
        {
            try
            {
                sr = new Microsoft.Speech.Recognition.SpeechRecognitionEngine(ci);
            }
            catch (Exception ex)
            {
                MessageBox.Show("Erro: " + ex.Message);
            }

            var gramatica = new Microsoft.Speech.Recognition.Choices();

            gramatica.Add(words);

            var gb = new Microsoft.Speech.Recognition.GrammarBuilder();

            gb.Append(gramatica);

            try
            {
                var g = new Microsoft.Speech.Recognition.Grammar(gb);

                try
                {
                    sr.RequestRecognizerUpdate();
                    sr.LoadGrammarAsync(g);
                    sr.SpeechRecognized += Sr_SpeechRecognized;
                    sr.SetInputToDefaultAudioDevice();
                    ss.SetOutputToDefaultAudioDevice();
                    sr.RecognizeAsync(Microsoft.Speech.Recognition.RecognizeMode.Multiple);
                }
                catch (Exception ex)
                {
                    MessageBox.Show("Erro: " + ex.Message);
                }
            }
            catch (Exception ex)
            {
                MessageBox.Show("Erro: " + ex.Message);
            }
        }
Exemple #4
0
        public void initRS()
        {
            try
            {
                sre = new SpeechRecognitionEngine(new CultureInfo("ko-KR"));

                Choices c = new Choices();
                c.Add(new string[] { "마마고" });
                Microsoft.Speech.Recognition.Grammar g = new Microsoft.Speech.Recognition.Grammar(new GrammarBuilder(c));
                sre.LoadGrammar(g);

                //sre.SpeechRecognized += new EventHandler<SpeechRecognizedEventArgs>(sre_SpeechRecognized);
                sre.RequestRecognizerUpdate();
                sre.SpeechRecognized += sre_SpeechRecognized;
                sre.SetInputToDefaultAudioDevice();
                sre.RecognizeAsync(RecognizeMode.Multiple);
            }
            catch (Exception e)
            {
                MessageBox.Show("init RS Error : " + e.ToString());
            }
        }
 public MainForm()
 {
     InitializeComponent();
     SREngine.OnRecognition += handleResult;//BR recognition
     microsftEngine.SetInputToDefaultAudioDevice();//US
     Choices programs = new Choices();//US
     programs.Add(getDATABASE());//US
     GrammarBuilder gb = new GrammarBuilder();//US
     gb.Append(programs);//US
     Microsoft.Speech.Recognition.Grammar g = new Microsoft.Speech.Recognition.Grammar(gb);//US
     microsftEngine.LoadGrammar(g);//US
     microsftEngine.SpeechRecognized += new EventHandler<SpeechRecognizedEventArgs>(sre_SpeechRecognized);//US
     SpeechEngine.SpeakStarted += new EventHandler<SpeakStartedEventArgs>(handleSpeechStarted);//BR speaker
     SpeechEngine.SpeakCompleted += new EventHandler<SpeakCompletedEventArgs>(handleSpeechDone);//BR speaker
     SpeechEngine.SetOutputToDefaultAudioDevice();//BR speaker
     open = false;
     close = false;
     lapsamOn = true;
 }
        /// <summary>
        /// Event method fired when window is loaded
        /// </summary>
        void MainWindow_Loaded(object sender, RoutedEventArgs e)
        {
            if (KinectSensor.KinectSensors.Count > 0)
            {
                _sensor = KinectSensor.KinectSensors[0];
                if (_sensor.Status == KinectStatus.Connected)
                {
                    _sensor.ColorStream.Enable(ColorImageFormat.RgbResolution640x480Fps30);
                    _sensor.DepthStream.Enable(DepthImageFormat.Resolution640x480Fps30);
                    _sensor.AllFramesReady += new EventHandler<AllFramesReadyEventArgs>(_sensor_AllFramesReady);
                    _sensor.SkeletonStream.Enable();

                    _sensor.Start();
                }

                RecognizerInfo ri = GetKinectRecognizer();

                if (null != ri)
                {
                    this.speechEngine = new SpeechRecognitionEngine(ri.Id);

                    // Create a grammar from grammar definition XML file.

                    var g = new Microsoft.Speech.Recognition.Grammar(_grammarPath);
                    speechEngine.LoadGrammar(g);

                    speechEngine.SpeechRecognized += SpeechRecognized;
                    speechEngine.SpeechRecognitionRejected += SpeechRejected;

                    speechEngine.SetInputToAudioStream(
                        _sensor.AudioSource.Start(), new SpeechAudioFormatInfo(EncodingFormat.Pcm, 16000, 16, 1, 32000, 2, null));
                    speechEngine.RecognizeAsync(RecognizeMode.Multiple);
                }
                else
                {
                    Console.WriteLine("No speech recognition engine installed!");
                }
            }
        }