예제 #1
0
 private void button1_Click(object sender, EventArgs e)
 {
     this.button1.BackColor = System.Drawing.Color.LightGreen;
     this.button2.BackColor = System.Drawing.Color.White;
     button1.Enabled        = false;
     try
     {
         sre.Dispose();
         sre = new SpeechRecognitionEngine(new CultureInfo("ko-KR"));
         sre.LoadGrammar(g);
         sre.SpeechRecognized += new EventHandler <SpeechRecognizedEventArgs>(sre_SpeechRecognized);
         sre.SetInputToDefaultAudioDevice();
         sre.RecognizeAsync(RecognizeMode.Single);
     }
     catch (Exception w)
     {
         label1.Text = "init RS Error : " + w.ToString();
         sre.Dispose();
         sre = new SpeechRecognitionEngine(new CultureInfo("ko-KR"));
         sre.LoadGrammar(g);
         sre.SpeechRecognized += new EventHandler <SpeechRecognizedEventArgs>(sre_SpeechRecognized);
         sre.SetInputToDefaultAudioDevice();
         sre.RecognizeAsync(RecognizeMode.Single);
     }
 }
예제 #2
0
        public bool Open(System.Globalization.CultureInfo ctp)
        {
            ct = ctp;

            if (engine != null)
            {
                engine.Dispose();
            }

            try
            {
                engine = new SpeechRecognitionEngine(ct); // may except if ct is not there on machine
                engine.SetInputToDefaultAudioDevice();    // crashes if no default device..
            }
            catch
            {
                engine = null;
                return(false);
            }

            engine.SpeechRecognized          += Engine_SpeechRecognized;
            engine.SpeechHypothesized        += Engine_SpeechHypothesized;
            engine.SpeechRecognitionRejected += Engine_SpeechRecognitionRejected;

            //System.Diagnostics.Debug.WriteLine("Engine {0}", engine.RecognizerInfo.Description);
            //foreach (var x in engine.RecognizerInfo.AdditionalInfo)
            //    System.Diagnostics.Debug.WriteLine(".. " + x.Key + "=" + x.Value);

            return(true);
        }
 private void Window_Closing(object sender, System.ComponentModel.CancelEventArgs e)
 {
     // unhook events
     speechRecognitionEngine.RecognizeAsyncStop();
     // clean references
     speechRecognitionEngine.Dispose();
 }
예제 #4
0
 private void Emploi_FormClosing(object sender, FormClosingEventArgs e)
 {
     if (sre != null)
     {
         sre.Dispose();
     }
 }
 void IDisposable.Dispose()
 {
     if (UnitySpeechRecognitionEngine != null)
     {
         try
         {
             UnitySpeechRecognitionEngine.RecognizeAsyncStop();
             UnitySpeechRecognitionEngine.Dispose();
             UnitySpeechRecognitionEngine = null;
         }
         catch (Exception ex)
         {
             UnityModule.DebugPrint("注销语音识别引擎时遇到错误:{0}", ex.Message);
         }
     }
     if (UnitySpeechSynthesizer != null)
     {
         try
         {
             UnitySpeechSynthesizer.SpeakAsyncCancelAll();
             UnitySpeechRecognitionEngine.Dispose();
             UnitySpeechRecognitionEngine = null;
         }
         catch (Exception ex)
         {
             UnityModule.DebugPrint("注销语音朗读引擎时遇到错误:{0}", ex.Message);
         }
     }
     GC.SuppressFinalize(this);
 }
예제 #6
0
 private void Form1_Leave(object sender, EventArgs e)
 {
     if (myPort.IsOpen)
     {
         myPort.Close();
         engine.Dispose();
         timerFan.Stop();
         timerSensor.Stop();
         zara.Stop();
     }
 }
예제 #7
0
        // Method on button which starts recording
        private void record_Click(object sender, EventArgs e)
        {
            if (global != null)
            {
                global.Dispose();
            }

            recording_timer.Start();
            start = DateTime.Now;
            initRecord(false);

            sourceStream.StartRecording();
        }
예제 #8
0
        static void Main(string[] args)
        {
            Console.WriteLine("Enter your name :");
            Name = Console.ReadLine();
            while (true)
            {
                manualResetEvent = new ManualResetEvent(false);
                Console.WriteLine("********************************************************************");
                Console.WriteLine("This is the questions that you can ask to computer:\n" +
                                  "what is the date today?\n" +
                                  "which day today?\n" +
                                  "what is the time?\n" +
                                  "where are you from?\n" +
                                  "how are you?\n" +
                                  "what is your name?\n" +
                                  "hello computer?\n" +
                                  "show\n" +
                                  "hide\n" +
                                  "exit\n");
                Console.WriteLine("********************************************************************");
                Console.WriteLine("speech to make sure the computer speaks to you");

                RecognizeSpeechAndMakeSureTheComputerSpeaksToYou();

                manualResetEvent.WaitOne();
                if (_recognizer != null)
                {
                    _recognizer.Dispose();
                }
                Console.Clear();
            }
        }
예제 #9
0
 internal void close()
 {
     speechRecognizer.Dispose();
     EventSingleton.Instance().fixationEvent -= sharedData_fixationEvent;
     EventSingleton.Instance().systemHook.KeyDown -= sharedData_keyboardEvent;
     speechRecognizer.SpeechRecognized -= SpeechRecognised;
 }
예제 #10
0
 private void CloseRecognitionEngine()
 {
     if (recognitionEngine != null)
     {
         recognitionEngine.Dispose();
     }
 }
예제 #11
0
        static void Main(string[] args)
        {
            var synth = new SpeechSynthesizer();

            synth.SetOutputToDefaultAudioDevice();

            var cultureTag = ConfigurationManager.AppSettings["culture"];
            var culture    = CultureInfo.GetCultureInfoByIetfLanguageTag(cultureTag);

            var prompt = new PromptBuilder(culture);

            prompt.StartSentence(culture);
            prompt.AppendTextWithHint(SpeakResources.Welcome, SayAs.Text);
            prompt.EndSentence();

            synth.SpeakAsync(prompt);

            var recog = new SpeechRecognitionEngine(culture);

            recog.LoadGrammar(new DictationGrammar());
            recog.SetInputToDefaultAudioDevice();
            recog.SpeechRecognized += Recog_SpeechRecognized;

            recog.RecognizeAsync(RecognizeMode.Multiple);

            Console.WriteLine("Hello!");
            Console.ReadLine();

            synth.Dispose();
            recog.Dispose();
        }
예제 #12
0
 private void DisposeSpeechSynthesizer()
 {
     if (speechRecognitionEngine != null)
     {
         speechRecognitionEngine.Dispose();
     }
 }
예제 #13
0
 private void DisposeSpeechInstruments()
 {
     if (speechRecognitionEngine != null)
     {
         speechRecognitionEngine.Dispose();
     }
 }
예제 #14
0
        public Form1()
        {
            //Init Grammar
            grammarList.Add(grammarFile);
            Grammar grammar = new Grammar(new GrammarBuilder(grammarList));

            try
            {
                speechRecognitionEngine.RequestRecognizerUpdate();
                speechRecognitionEngine.LoadGrammarAsync(grammar);
                speechRecognitionEngine.SetInputToDefaultAudioDevice();
                speechRecognitionEngine.RecognizeAsync(RecognizeMode.Multiple);
                speechRecognitionEngine.SpeechRecognized          += SpeechRecognizedEvent;    //Subscriber speech recognized
                speechRecognitionEngine.SpeechRecognitionRejected += SpeechNotRecognizedEvent; //Subscriber speech not recognized
            }
            catch                                                                              //*********Exeptions***********
            {
                return;
            }

            //Custom Speech Sythesis Settings
            speechSynthesizer.SelectVoiceByHints(VoiceGender.Female);

            InitializeComponent();

            //Disposing resourses
            speechSynthesizer.Dispose();
            rec.Dispose();
            speechRecognitionEngine.Dispose();
        }
예제 #15
0
        public void InitializeService()
        {
            SpeechRecognitionEngine _recognizer       = new SpeechRecognitionEngine();
            List <string>           brightnessChoices = new List <string>();

            for (int i = 0; i < 105; i += 5)
            {
                brightnessChoices.Add(String.Format("brightness {0} percent", i));
            }
            var brightnessGrammar = new Grammar(new Choices(brightnessChoices.ToArray()));


            _recognizer.LoadGrammar(new Grammar(new Choices("color red", "color blue", "color green", "color yellow", "color orange", "color purple", "computer exit"))
            {
                Name = "colors"
            });
            _recognizer.LoadGrammar(brightnessGrammar);
            _recognizer.SpeechRecognized += _recognizer_SpeechRecognized;
            _recognizer.SetInputToDefaultAudioDevice();         // set the input of the speech recognizer to the default audio device
            _recognizer.RecognizeAsync(RecognizeMode.Multiple); // recognize speech asynchronous

            _completed.WaitOne();                               // wait until speech recognition is completed

            _recognizer.Dispose();
        }
예제 #16
0
 /// <summary>
 /// Handles the Closing event of the Window control.
 /// </summary>
 /// <param name="sender">The source of the event.</param>
 /// <param name="e">The <see cref="System.ComponentModel.CancelEventArgs"/> instance containing the event data.</param>
 public void Stop()
 {
     // unhook events
     speechRecognitionEngine.RecognizeAsyncStop();
     // clean references
     speechRecognitionEngine.Dispose();
 }
예제 #17
0
        private void engine_SpeechRecognized(object sender, SpeechRecognizedEventArgs e)
        {
            string speech = (e.Result.Text);

            switch (speech)
            {
            case "hello decca":
                Decca.SpeakAsync("hello Trendon");
                break;

            case "close weather":
                this.Dispose();
                speechRecognition.RecognizeAsyncCancel();
                speechRecognition.Dispose();
                this.Close();
                break;

            case "what is todays temperature":
            case "what is the temperature":
            case "whats the temperature":
                Decca.SpeakAsync("It is " + lbl_Temperature.Text);
                break;

            case "What is today like":
            case "what is todays condition":
            case "what is the condition today":
            case "what is the weather condition":
                Decca.SpeakAsync("Temperature is " + lbl_Temperature.Text);
                Decca.SpeakAsync("Humidity is " + lbl_HumidityMain.Text);
                Decca.SpeakAsync("Condition " + lbl_DescriptionMain.Text);
                Decca.SpeakAsync("Pressure is " + lbl_PressureMain.Text);
                Decca.SpeakAsync("WInd Speed is " + lbl_WindSpeedMain.Text);
                break;
            }
        }
예제 #18
0
        static void Main(string[] args)
        {
            Console.WriteLine("Recognizing. \nPress ENTER to stop");

            Choices cmds = new Choices();

            cmds.Add(new String[] { "say hello", "print my name" });
            GrammarBuilder gBuilder = new GrammarBuilder();

            gBuilder.Append(cmds);
            Grammar grammar = new Grammar(gBuilder);

            recEngine.LoadGrammarAsync(grammar);
            recEngine.SetInputToDefaultAudioDevice();

            recEngine.RecognizeAsync(RecognizeMode.Multiple);

            //recognizer.Enabled = true;
            recEngine.SpeechRecognized += new EventHandler <SpeechRecognizedEventArgs>(recognizer_SpeechRecognized);

            //String name = "piper";//Riley

            Console.ReadLine();
            Console.WriteLine("Stopping recognizer ...");
            recEngine.Dispose();
        }
예제 #19
0
 public MainWindow()
 {
     InitializeComponent();
     this.DataContext = this;
     this.Unloaded   += delegate
     {
         _kinectSensor.SkeletonStream.Disable();
         _sre.RecognizeAsyncCancel();
         _sre.RecognizeAsyncStop();
         _sre.Dispose();
     };
     this.Loaded += delegate
     {
         _kinectSensor = KinectSensor.KinectSensors[0];
         _kinectSensor.SkeletonStream.Enable(new
                                             TransformSmoothParameters()
         {
             Correction         = 0.5f,
             JitterRadius       = 0.05f,
             MaxDeviationRadius = 0.04f,
             Smoothing          = 0.5f
         });
         _kinectSensor.SkeletonFrameReady += nui_SkeletonFrameReady;
         _kinectSensor.Start();
         StartSpeechRecognition();
     };
 }
예제 #20
0
        public void Dispose()
        {
            Engine.RecognizeAsyncCancel();

            Engine.Dispose();
            Engine = null;
        }
예제 #21
0
 internal void stopVoiceRecognizer()
 {
     engine.RecognizeAsyncStop();
     audioStream.Close();
     audioStream.Dispose();
     engine.Dispose();  //This was commented out.  Was it causing problems?
 }
예제 #22
0
 public void Dispose()
 {
     if (_recognizerEngine != null)
     {
         _recognizerEngine.Dispose();
     }
 }
예제 #23
0
 public void Dispose()
 {
     speechRecognitionEngine.RecognizeAsyncStop();
     speechSynthesizer.Dispose();
     speechRecognitionEngine.UnloadAllGrammars();
     speechRecognitionEngine.Dispose();
 }
예제 #24
0
 public void Dispose()
 {
     mySpeechRecognitionEngine.SpeechRecognized -=
         new EventHandler <SpeechRecognizedEventArgs>(
             SpeechRecognizedHandler);
     mySpeechRecognitionEngine.Dispose();
 }
예제 #25
0
 private void CloseBtn_Click(object sender, EventArgs e)
 {
     Recog_speech.RecognizeAsyncCancel();
     Recog_speech.Dispose();
     this.Dispose();
     this.Close();
 }
 private void recognizer_SpeechRecognized(object sender, SpeechRecognizedEventArgs e)
 {
     if (e.Result.Text == "Lights on")
     {
         pictureBox1.Image = Properties.Resources.lightsOn;
     }
     else if (e.Result.Text == "Lights off")
     {
         pictureBox1.Image = Properties.Resources.lightsOff;
     }
     else if (e.Result.Text == "Exit")
     {
         recognizer.Dispose();
         Application.Exit();
     }
     else if (e.Result.Text == "Zoom out")
     {
         pictureBox1.Size = new System.Drawing.Size(135, 107);
     }
     else if (e.Result.Text == "Zoom in")
     {
         pictureBox1.Size = new System.Drawing.Size(538, 426);
     }
     else if (e.Result.Text == "Reset")
     {
         pictureBox1.Size = new System.Drawing.Size(269, 213);
     }
     textBox1.Text = e.Result.Text;
 }
 protected virtual void Dispose(bool disposing)
 {
     if (disposing)
     {
         recognizer.Dispose();
     }
 }
예제 #28
0
        public MainWindow()
        {
            InitializeComponent();
            UnInitializePtr();

            this.DataContext = this;
            this.Unloaded   += delegate
            {
                kinectDevice.SkeletonStream.Disable();
                _sre.RecognizeAsyncCancel();
                _sre.RecognizeAsyncStop();
                _sre.Dispose();
            };
            this.Loaded += delegate
            {
                kinectDevice = KinectSensor.KinectSensors[0];
                kinectDevice.SkeletonStream.Enable(new
                                                   TransformSmoothParameters()
                {
                    Correction         = 0.5f,
                    JitterRadius       = 0.05f,
                    MaxDeviationRadius = 0.04f,
                    Smoothing          = 0.5f
                });
                kinectDevice.SkeletonFrameReady += KinectDevice_SkeletonFrameReady;
                kinectDevice.Start();
                StartSpeechRecognition();
            };

            //注册Kinect状态改变事件
            KinectSensor.KinectSensors.StatusChanged += KinectSensors_StatusChanged;
            //返回可用的Kinect
            this.KinectDevice = KinectSensor.KinectSensors.FirstOrDefault(x => x.Status == KinectStatus.Connected);
        }
예제 #29
0
        static void Main(string[] args)
        {
            manualResetEvent = new ManualResetEvent(false);

/*            Console.WriteLine("To recognize speech, and write 'test' to the console, press 0");
 *          Console.WriteLine("To recognize speech and make sure the computer speaks to you, press 1");
 *          Console.WriteLine("To emulate speech recognition, press 2");
 *          Console.WriteLine("To recognize speech using Choices and GrammarBuilder.Append, press 3");
 *          Console.WriteLine("To recognize speech using a DictationGrammar, press 4");
 *          Console.WriteLine("To get a prompt building example, press 5");
 */         ConsoleKeyInfo pressedKey = Console.ReadKey(true);
            char           keychar    = pressedKey.KeyChar;

            //Console.WriteLine("You pressed '{0}'", keychar);
            switch (keychar)
            {
            case '0':
                RecognizeSpeechAndWriteToConsole();
                break;

            case '1':
                RecognizeSpeechAndMakeSureTheComputerSpeaksToYou();
                break;

            case '2':
                EmulateRecognize();
                break;

            case '3':
                SpeechRecognitionWithChoices();
                break;

            case '4':
                SpeechRecognitionWithDictationGrammar();
                break;

            case '5':
                PromptBuilding();
                break;

            default:
                //     Console.WriteLine("You didn't press 0, 1, 2, 3, 4, or 5!");
                Console.WriteLine("Press any key to continue . . .");
                Console.ReadKey(true);
                Environment.Exit(0);
                break;
            }
            if (keychar != '5')
            {
                manualResetEvent.WaitOne();
            }
            if (_recognizer != null)
            {
                _recognizer.Dispose();
            }

            Console.WriteLine("Press any key to continue . . .");
            Console.ReadKey(true);
        }
예제 #30
0
 //--------------------------------------------------------------------------------------------
 private void Window_Closing(object sender, System.ComponentModel.CancelEventArgs e)
 {
     if (speechRecognitionEngine != null)
     {
         speechRecognitionEngine.RecognizeAsyncStop();
         speechRecognitionEngine.Dispose();
     }
 }
예제 #31
0
        public void TestMethod1()
        {
            SpeechRecognitionEngine engine = new SpeechRecognitionEngine(new CultureInfo("en-US"));

            engine.LoadGrammar(new DictationGrammar());

            engine.Dispose();
        }
예제 #32
0
 private void button1_Click(object sender, EventArgs e)
 {
     _completed = new ManualResetEvent(false);
     SpeechRecognitionEngine _recognizer = new SpeechRecognitionEngine();
     _recognizer.RequestRecognizerUpdate(); // request for recognizer update
     _recognizer.LoadGrammar(new Grammar(new GrammarBuilder("test"))); // load a grammar
     _recognizer.RequestRecognizerUpdate(); // request for recognizer update
     _recognizer.LoadGrammar(new Grammar(new GrammarBuilder("exit"))); // load a "exit" grammar
     _recognizer.SpeechRecognized += _recognizer_SpeechRecognized;
     _recognizer.SetInputToDefaultAudioDevice(); // set the input of the speech recognizer to the default audio device
     _recognizer.RecognizeAsync(RecognizeMode.Multiple); // recognize speech asynchronous
     _completed.WaitOne(); // wait until speech recognition is completed
     _recognizer.Dispose(); // dispose the speech recognition engine
 }
예제 #33
0
파일: Program.cs 프로젝트: rdodgen/Ezri
        static void Main(string[] args)
        {
            AppDomain.CurrentDomain.UnhandledException += new UnhandledExceptionEventHandler(CurrentDomain_UnhandledException);

            voice = new Voice();

            commandProcessor = ConfigureCommands().CreateCommandProcessor();
            commandProcessor.CommandRecognized += sound.NotifyRecognizedCommandAsync;
            commandProcessor.CommandRejected += sound.NotifyUnrecognizedCommandAsync;

            Console.WriteLine("Attached PIR-1 devices:");
            foreach (var pir in PIRDriver.Instance.QueryAttachedDevices())
                Console.WriteLine("\t{0}", pir);

            ConfigureLightShow();
            Console.WriteLine("Configured LightShow");

            var recognizer = GetKinectRecognizer();
            using (var sensor = GetKinectSensor())
            {
                /* Skeleton-based beam control is disabled due to an OOM issue when long running.
                var beamController = new SkeletonBasedBeamControl();
                beamController.AttentionGestureDetected += delegate(SkeletonBasedBeamControl controller)
                {
                    sound.NotifyAttentionGestureRecognized();
                };
                beamController.Start(sensor);
                */

                sensor.Start();
                var source = sensor.AudioSource;

                source.AutomaticGainControlEnabled = false;
                source.EchoCancellationMode = EchoCancellationMode.None;
                source.NoiseSuppression = true;

                Console.WriteLine("Using: {0}", recognizer.Name);

                using (Stream s = source.Start())
                {
                    SpeechRecognitionEngine sre = null;
                    var sreLock = new object();

                    EventHandler<SpeechDetectedEventArgs> SreSpeechDetected = delegate(object sender, SpeechDetectedEventArgs dea) { SpeechDetected(source, dea); };

                    Action startRecognizer = delegate()
                    {
                        SpeechRecognitionEngine oldSre = null;

                        lock (sreLock)
                        {
                            if (sre != null)
                            {
                                oldSre = sre;
                            }
                            sre = new SpeechRecognitionEngine(recognizer.Id);
                            sre.UpdateRecognizerSetting("AdaptationOn", 1);
                            sre.UpdateRecognizerSetting("PersistedBackgroundAdaptation", 1);
                            sre.LoadGrammar(commandProcessor.CreateGrammar());

                            sre.SpeechDetected += SreSpeechDetected;
                            sre.SpeechHypothesized += SreSpeechHypothesized;
                            sre.SpeechRecognitionRejected += SreSpeechRecognitionRejected;
                            sre.AudioSignalProblemOccurred += SreAudioSignalProblemOccurred;

                            sre.EndSilenceTimeoutAmbiguous = TimeSpan.FromMilliseconds(AmbiguousSilenceTimeout);
                            sre.EndSilenceTimeout = TimeSpan.FromMilliseconds(UnambiguousSilenceTimeout);

                            sre.SpeechRecognized += delegate(object sender, SpeechRecognizedEventArgs r)
                            {
                                Console.WriteLine("Handling text {0} in command processor", r.Result.Text);
                                try
                                {
                                    commandProcessor.ProcessSpeech(r.Result);
                                }
                                catch (Exception ex)
                                {
                                    Console.WriteLine("Command handler failed: " + ex.ToString());
                                    voice.SpeakAsync("Failed to execute command. Sorry!");
                                }
                            };

                            sre.SetInputToAudioStream(s,
                                                      new SpeechAudioFormatInfo(
                                                          EncodingFormat.Pcm, 16000, 16, 1,
                                                          32000, 2, null));
                            sre.RecognizeAsync(RecognizeMode.Multiple);
                            Trace.TraceInformation("New recognizer started");

                            if (oldSre != null)
                            {
                                oldSre.RecognizeAsyncStop();

                                oldSre.SpeechDetected -= SreSpeechDetected;
                                oldSre.SpeechHypothesized -= SreSpeechHypothesized;
                                oldSre.SpeechRecognitionRejected -= SreSpeechRecognitionRejected;
                                oldSre.AudioSignalProblemOccurred -= SreAudioSignalProblemOccurred;

                                oldSre.Dispose();
                                Trace.TraceInformation("Old recognizer disposed");
                            }
                        }
                    };

                    var recognizerRecycleTimer = new System.Timers.Timer()
                    {
                        AutoReset = false,
                        Enabled = false,
                        Interval = RecognizerRecyleTime.TotalMilliseconds,
                    };
                    recognizerRecycleTimer.Elapsed += (sender, elapsedEventArgs) =>
                    {
                        Trace.TraceInformation("Recycling recognizer...");
                        startRecognizer();
                        recognizerRecycleTimer.Start();
                        Trace.TraceInformation("Done recycling recognizer.");
                    };

                    startRecognizer();
                    Console.WriteLine("Ready.");
                    voice.SpeakAsync("Ez-ree is now online.");

                    recognizerRecycleTimer.Start();

                    Console.ReadLine();
                    Console.WriteLine("Stopping recognizer ...");
                    // TODO: poison flag so the recycle timer doesn't get in the way
                    lock (sreLock)
                    {
                        sre.RecognizeAsyncStop();
                        sre.Dispose();
                    }
                    // beamController.Stop();
                }
            }
        }