private void oRecognizer_StateChanged(object sender, System.Speech.Recognition.AudioStateChangedEventArgs e)
        {
            switch (oRecognizer.AudioState.ToString())
            {
            case "Stopped":
                lblAudioState.Content = "I hear silence.";
                break;

            case "Speech":
                // gSpeechPlugin;
                String temp = OSAEObjectPropertyManager.GetObjectPropertyValue(gSpeechPlugin, "Speaking").Value.ToString().ToLower();

                if (temp.ToLower() == "true")
                {
                    lblAudioState.Content = "I hear myself.";
                }
                else
                {
                    lblAudioState.Content = "I hear talking.";
                }
                break;

            case "Silence":
                lblAudioState.Content = "I hear silence.";
                break;
            }
        }
Пример #2
0
        private void AudioStateChangedProxy(object sender, AudioStateChangedEventArgs e)
        {
            EventHandler <AudioStateChangedEventArgs> audioStateChangedHandler = _audioStateChangedDelegate;

            if (audioStateChangedHandler != null)
            {
                audioStateChangedHandler(this, e);
            }
        }
 private void oRecognizer_StateChanged(object sender, System.Speech.Recognition.AudioStateChangedEventArgs e)
 {
     System.Speech.Recognition.AudioState state = oRecognizer.AudioState;
     lblAudioState.Content = "I hear " + state.ToString();
     try
     {
         if (oRecognizer.AudioState == 0)
         {
             oRecognizer.RecognizeAsync();
         }
     }
     catch (Exception ex)
     {
         AddToLog("Error trying to Restart Recognition!");
         AddToLog("Errord: " + ex.Message);
     }
 }
Пример #4
0
        private void oRecognizer_StateChanged(object sender, System.Speech.Recognition.AudioStateChangedEventArgs e)
        {
            /*
             * //System.Speech.Recognition.AudioState state = oRecognizer.AudioState;
             * try
             * {
             *  if (oRecognizer.AudioState == 0 && gRunningManual == false)
             *  {
             *      oRecognizer.RecognizeAsync();
             *  }
             * }
             * catch (Exception ex)
             * {
             *  AddToLog("Error trying to Restart Recognition!");
             *  AddToLog("Errord: " + ex.Message);
             * }
             */

            switch (oRecognizer.AudioState.ToString())
            {
            case "Stopped":
                lblAudioState.Content = "I hear silence.";
                break;

            case "Speech":
                // gSpeechPlugin;
                String temp = OSAEObjectPropertyManager.GetObjectPropertyValue(gSpeechPlugin, "Speaking").Value.ToString().ToLower();

                if (temp.ToLower() == "true")
                {
                    lblAudioState.Content = "I hear myself.";
                }
                else
                {
                    lblAudioState.Content = "I hear talking.";
                }
                break;

            case "Silence":
                lblAudioState.Content = "I hear silence.";
                break;
            }
        }
 private void AudioStateChangedProxy(object sender, AudioStateChangedEventArgs e)
 {
     _audioStateChangedDelegate?.Invoke(this, e);
 }