Exemple #1
0
 public void HandleAudioStateChanging(object sender, AudioStateChangedEventArgs e)
 {
     if (e.AudioState == AudioState.Speech)
     {
         _lastSpeechTime = DateTime.Now;
     }
 }
        void recEngine_AudioStateChanged(object sender, AudioStateChangedEventArgs e)
        {
            string text      = "\nAudio State: " + e.AudioState;
            String timeStamp = GetTimestamp(DateTime.Now);

            Trace.WriteLine(text + "\t" + timeStamp);
            SpeechState state = SpeechState.UNKNOWN;

            if (e.AudioState == AudioState.Silence)
            {
                state = SpeechState.SPEECH_END;

                //if (eachSpeechTimes != null)
                //{
                //    turnAroundTimes.Add(eachSpeechTimes);
                //}
            }
            else if (e.AudioState == AudioState.Speech)
            {
                state = SpeechState.SPEECH_START;
                //eachSpeechTimes = new List<double>();
                //speechTurnAroundTime = EBookUtil.GetUnixTimeMillis();
            }
            //AbstractEBookEvent.raise(new SpeechStateChangeEvent(state));
            ActivityExecutor.add(new InternalSpeechStateChangeActivity(state));
        }
Exemple #3
0
        //private System.Windows.Forms.TextBox textBox;
        private static void AudioChanged(object sender, AudioStateChangedEventArgs e)
        {
            if (!((Range)Globals.ThisAddIn.Application.Selection is Microsoft.Office.Interop.Excel.Range currentRange))
            {
                return;
            }
            //currentRange.Value = e.AudioState;  //Silence	1	Receiving silence or non-speech background noise.
            //Speech    2   Receiving speech input.
            //Stopped   0   Not processing audio input.
            //if (e.AudioState != 0)
            //{
            //    currentRange.Value = e.AudioState.ToString();
            //}
            //if ((int)e.AudioState == 1)
            //{
            //    currentRange.Value = e.AudioState.ToString();
            //}
            switch ((int)e.AudioState)
            {
            case 1: currentRange.Value = "Please say something"; break;    //NOT SO ACCURATE

            case 2: currentRange.Value = "Listening..."; break;

            //case 0: currentRange.Value = "Stopped"; break;
            default: break;
            }
        }
 private void OnVolumeChanged(object sender, AudioStateChangedEventArgs e)
 {
     if (VolumeChanged != null)
     {
         VolumeChanged(this, e);
     }
 }
Exemple #5
0
 private void DSN_AudioStateChanged(object sender, AudioStateChangedEventArgs e)
 {
     if (logAudioSignalIssues)
     {
         Trace.TraceInformation("Audio state changed: {0}", e.AudioState.ToString());
     }
 }
Exemple #6
0
        private void Engine_AudioStateChanged(object sender, AudioStateChangedEventArgs e)
        {
            state = e.AudioState;

            if (state == AudioState.Silence)
            {
                timestamp = DateTime.Now;
            }
        }
Exemple #7
0
 private void onAudioStateChanged(object sender, AudioStateChangedEventArgs e)
 {
     if (isRunning == false)
     {
         return;
     }
     if (AudioStateChanged != null)
     {
         AudioStateChanged(e.AudioState);
     }
 }
        private void AudioStoppedCanSay(object sender, AudioStateChangedEventArgs e)
        {
            Task.Delay(10).ContinueWith((o) =>
            {
                var speak = _synthesizer.SpeakAsync(_messageToSay);
                while (!speak.IsCompleted)
                {
                    System.Threading.Thread.Sleep(50);
                }

                _recognitionEngine.AudioStateChanged -= AudioStoppedCanSay;
                _recognitionEngine.RecognizeAsync(RecognizeMode.Multiple);
            });
        }
        private void DSN_AudioStateChanged(object sender, AudioStateChangedEventArgs e)
        {
            if (config.Get("SpeechRecognition", "bLogAudioSignalIssues", "0") == "1")
            {
                Trace.TraceInformation("Audio state changed: {0}", e.AudioState.ToString());
            }

            // Thread-safe: if (e.AudioState == AudioState.Stopped && recognitionStatus == STATUS_RECOGNIZING)
            if (e.AudioState == AudioState.Stopped && Interlocked.Read(ref recognitionStatus) == STATUS_RECOGNIZING)
            {
                Trace.TraceInformation("The recording device is not available.");
                WaitRecordingDeviceNonBlocking();
            }
        }
Exemple #10
0
        private void recognizer_AudioStateChanged(object sender, AudioStateChangedEventArgs e)
        {
            switch (e.AudioState)
            {
            case AudioState.Speech:
                LabelStatus.Content = "Listening";
                break;

            case AudioState.Silence:
                LabelStatus.Content = "Idle";
                break;

            case AudioState.Stopped:
                LabelStatus.Content = "Stopped";
                break;
            }
        }
Exemple #11
0
            /// <summary>
            /// Initializes a new instance of the AudioStateChangedSpeechMessage class.
            /// </summary>
            /// <param name="args">Event arguments.</param>
            internal AudioStateChangedSpeechMessage(AudioStateChangedEventArgs args)
            {
                this.category  = SpeechStreamHandler.SpeechEventCategory;
                this.eventType = "audioStateChanged";

                this.audioStateChanged = new MessageArgs();
                switch (args.AudioState)
                {
                case AudioState.Stopped:
                    this.audioStateChanged.audioState = "stopped";
                    break;

                case AudioState.Silence:
                    this.audioStateChanged.audioState = "silence";
                    break;

                case AudioState.Speech:
                    this.audioStateChanged.audioState = "speech";
                    break;
                }
            }
        /// <summary>
        /// Called when the audio state of the recognizer changes.
        /// </summary>
        /// <param name="sender">The source of the event.</param>
        /// <param name="e">An object that contains the event data.</param>
        private void OnAudioStateChanged(object sender, AudioStateChangedEventArgs e)
        {
            // Don't start processing until stream start time has been synchronized with the first message.
            if (this.streamStartTime == DateTime.MinValue)
            {
                return;
            }

            switch (e.AudioState)
            {
            case AudioState.Speech:
                this.lastSpeechDetectedTime = this.streamStartTime + this.speechRecognitionEngine.RecognizerAudioPosition +
                                              TimeSpan.FromMilliseconds(this.Configuration.VoiceActivityStartOffsetMs);

                if (this.lastSpeechDetectedTime < this.lastSilenceDetectedTime)
                {
                    // speech start time must not be before last speech ended time
                    this.lastSpeechDetectedTime = this.lastSilenceDetectedTime;
                }

                this.lastSpeechDetectedState = true;
                break;

            case AudioState.Silence:
            case AudioState.Stopped:
                this.lastSilenceDetectedTime = this.streamStartTime + this.speechRecognitionEngine.RecognizerAudioPosition +
                                               TimeSpan.FromMilliseconds(this.Configuration.VoiceActivityEndOffsetMs);

                if (this.lastSilenceDetectedTime < this.lastSpeechDetectedTime)
                {
                    // speech end time must not be before last speech started time
                    this.lastSilenceDetectedTime = this.lastSpeechDetectedTime;
                }

                this.lastSpeechDetectedState = false;
                break;
            }
        }
Exemple #13
0
        private void GodListener_AudioStateChanged(object sender, AudioStateChangedEventArgs e)
        {
            var GodListener = sender as SpeechRecognitionEngine;

            if (e.AudioState == AudioState.Speech)
            {
                textBox2.Text =   "I can tell that you are speaking to me.";
            }

            if (e.AudioState == AudioState.Silence)
            {
                textBox2.Text = "I can hear only background noise.";
            }

            if (e.AudioState == AudioState.Stopped)
            {
               label2.Text = "The god is no longer listening.";
               label2.ForeColor = Color.Black;
               button1.Enabled = true;
               GodListener.RecognizeAsyncStop();
               GodListener.UnloadAllGrammars();
            }
        }
Exemple #14
0
        /// <summary>
        /// Handles AudioStateChanged events from the speech recognition engine.
        /// </summary>
        /// <param name="sender">Event sender.</param>
        /// <param name="args">Event arguments.</param>
        private async void AudioStateChanged(object sender, AudioStateChangedEventArgs args)
        {
            var message = new AudioStateChangedSpeechMessage(args);

            await this.ownerContext.SendEventMessageAsync(message);
        }
Exemple #15
0
 // Handle Audio state changed event
 static void recognizer_AudioStateChanged(object sender, AudioStateChangedEventArgs e)
 {
     // Console.WriteLine("[Engine] AudioStateChanged ({0}): {1}", DateTime.Now.ToString("mm:ss.f"), e.AudioState);
 }
 private void recognizer_AudioStateChanged(object sender, AudioStateChangedEventArgs e)
 {
     Log("AudioStateChanged (" + DateTime.Now.ToString("mm:ss.f") + "):" + e.AudioState);
 }
        /**
         * @method  EventAudioStateChange
         *
         * Method that collects the change of state of the audio input.
         *
         * @param   {object}    sender          Object sent by the recognition event.
         * @paran   {object}    e               Event with the result returned by the recognition engine.
         * @returns {void}
         */
        private void EventAudioStateChange(object sender, AudioStateChangedEventArgs e)
        {
            string data = JSON.Serialize(e);

            EventDispatch(data, "vc:audioState");
        }
Exemple #18
0
 protected void Event_AudioStateChanged(object sender, AudioStateChangedEventArgs e)
 {
 }
Exemple #19
0
 private void OnAudioStateChanged(object sender, AudioStateChangedEventArgs e)
 {
     Console.WriteLine("OnAudioStateChanged--" + e.AudioState);
 }
Exemple #20
0
 private void onAudioStateChanged(object sender, AudioStateChangedEventArgs e)
 {
     if (isRunning == false)
     {
         return;
     }
     if (AudioStateChanged != null)
     {
         AudioStateChanged(e.AudioState);
     }
     
 }
Exemple #21
0
 // Handle the AudioStateChanged event.
 static void AudioStateChangedHandler(
     object sender, AudioStateChangedEventArgs e)
 {
     Console.WriteLine("AudioStateChanged ({0}): {1}", DateTime.Now.ToString("mm:ss.f"), e.AudioState);
 }
 void sre_AudioStateChanged(object sender, AudioStateChangedEventArgs e)
 {
     if (e.AudioState == AudioState.Silence)
         button2.BackColor = Color.Red;
     if (e.AudioState == AudioState.Speech)
         button2.BackColor = Color.Green;
     if (e.AudioState == AudioState.Stopped)
         button2.BackColor = Color.DarkGray;
 }
Exemple #23
0
 protected void recognizer_AudioStateChanged(object sender, AudioStateChangedEventArgs e)
 {
     // logInfo("[Engine] AudioStateChanged ({0}): {1}", DateTime.Now.ToString("mm:ss.f"), e.AudioState);
 }
Exemple #24
0
 private void MySpeechRecognitionEnginee_AudioStateChanged(object sender, AudioStateChangedEventArgs e)
 {
     Console.Write(e.AudioState.ToString());
 }
 private void Recognizer_AudioStateChanged(object sender, AudioStateChangedEventArgs e)
 {
     Console.ForegroundColor = ConsoleColor.DarkGray;
     Console.WriteLine($"STATE={e.AudioState}");
     Console.ForegroundColor = ConsoleColor.Gray;
 }
 protected void recognizer_AudioStateChanged(object sender, AudioStateChangedEventArgs e)
 {
     cfg.logDebug("ENGINE - " + Name, "AudioStateChanged (" + DateTime.Now.ToString("mm:ss.f") + "):" + e.AudioState);
 }
Exemple #27
0
 private void OnAudioStateChange(object sender, AudioStateChangedEventArgs e)
 {
     Logger.Write("Cambio de estado." + e.AudioState);
 }
 void recognizer_AudioStateChanged(object sender, AudioStateChangedEventArgs e)
 {
 }
 private void UnitySREngine_AudioStateChanged(object sender, AudioStateChangedEventArgs e)
 {
     UnityModule.DebugPrint("语音状态改变:{0}", e.AudioState.ToString());
     //语音状态改变
 }
Exemple #30
0
 void recEngine_AudioStateChanged(object sender, AudioStateChangedEventArgs e)
 {
     listener.handleAudioStateChanged(e.AudioState);
 }
 protected void recognizer_AudioStateChanged(object sender, AudioStateChangedEventArgs e) {
   Trace("AudioStateChanged (" + DateTime.Now.ToString("mm:ss.f") + "):" + e.AudioState);
 }
Exemple #32
0
 protected void recognizer_AudioStateChanged(object sender, AudioStateChangedEventArgs e)
 {
     cfg.logDebug("ENGINE - " + Name, "AudioStateChanged (" + DateTime.Now.ToString("mm:ss.f") + "):" + e.AudioState);
 }
Exemple #33
0
        private void GodListener_AudioStateChanged(object sender, AudioStateChangedEventArgs e)
        {
            var GodListener = sender as SpeechRecognitionEngine;

            if (e.AudioState == AudioState.Stopped)
            {
                radioButton1.Text = "Not Receiving Audio";
                radioButton1.Checked = false;
                radioButton1.ForeColor = Color.Black;
                textBox3.Text = "";
                textBox5.Text = "";
                GodListener.RecognizeAsyncStop();
            }
            else if (e.AudioState == AudioState.Silence)
            {

                if (GodAudioLevel == 0)
                {
                    radioButton1.Text = "Receiving Audio";
                    radioButton1.Checked = true;
                    radioButton1.ForeColor = Color.Red;
                    textBox3.Text = "Silence";
                }
            }
        }
Exemple #34
0
        private void OnAudioStateChanged(object sender, AudioStateChangedEventArgs e)
        {
            DateTime originatingTime = this.streamStartTime + this.speechRecognitionEngine.RecognizerAudioPosition;

            this.PostWithOriginatingTimeConsistencyCheck(this.AudioStateChanged, e, originatingTime);
        }
		private void OnVolumeChanged(object sender, AudioStateChangedEventArgs e)
		{
			if (VolumeChanged != null)
				VolumeChanged(this, e);
		}
Exemple #36
0
 private void AudioStateChanged(object sender, AudioStateChangedEventArgs e)
 {
     Debug.WriteLine("Audio state changed to " + e.AudioState + ".");
 }
Exemple #37
0
 private void Recognizer_AudioStateChanged(object sender, AudioStateChangedEventArgs e) //EventArgs
 {
 }
 private void RecognizerOnAudioStateChanged(object sender, AudioStateChangedEventArgs audioStateChangedEventArgs)
 {
     Debug.WriteLine(audioStateChangedEventArgs.AudioState.ToString());
 }
Exemple #39
0
 private void MySpeechRecognitionEnginee_AudioStateChanged(object sender, AudioStateChangedEventArgs e)
 {
     Console.Write(e.AudioState.ToString());
 }