Exemple #1
0
        /// <summary>
        ///     Called when a final response is received;
        /// </summary>
        void OnResponseReceivedHandler(object sender, SpeechResponseEventArgs e)
        {
            bool isFinalDicationMessage = m_recoMode == SpeechRecognitionMode.LongDictation &&
                                          (e.PhraseResponse.RecognitionStatus == RecognitionStatus.EndOfDictation ||
                                           e.PhraseResponse.RecognitionStatus == RecognitionStatus.DictationEndSilenceTimeout);

            if (m_isMicrophoneReco && ((m_recoMode == SpeechRecognitionMode.ShortPhrase) || isFinalDicationMessage))
            {
                // we got the final result, so it we can end the mic reco.  No need to do this
                // for dataReco, since we already called endAudio() on it as soon as we were done
                // sending all the data.
                m_micClient.EndMicAndRecognition();
            }

            if (!isFinalDicationMessage)
            {
                Console.WriteLine("********* Final NBEST Results *********");
                for (int i = 0; i < e.PhraseResponse.Results.Length; i++)
                {
                    Console.WriteLine("[{0}] Confidence={1} Text=\"{2}\"",
                                      i, e.PhraseResponse.Results[i].Confidence,
                                      e.PhraseResponse.Results[i].DisplayText);
                }
                Console.WriteLine();
            }
        }
Exemple #2
0
 private void Record_Click(object sender, RoutedEventArgs e)
 {
     if (RecordButtonText == "Record")
     {
         micRecognitionClient.StartMicAndRecognition();
         RecordButtonText = "Stop";
     }
     else
     {
         micRecognitionClient.EndMicAndRecognition();
         RecordButtonText = "Record";
     }
 }
Exemple #3
0
 private void OnMicDictationResponseReceivedHandler(object sender, SpeechResponseEventArgs e)
 {
     if (OnFinalOutputReceivedHandler != null)
     {
         if (e.PhraseResponse != null && e.PhraseResponse.Results.Length > 0)
         {
             FinalOutputEvent evt = new FinalOutputEvent();
             evt.SetEventData(e.PhraseResponse.Results[0].DisplayText);
             evt.IsCompleted = true;
             OnFinalOutputReceivedHandler(evt);
         }
     }
     _speechClient.EndMicAndRecognition();
 }
 private void Stopbtn_Click(object sender, RoutedEventArgs e)
 {
     Dispatcher.Invoke((Action)(() =>
     {
         try
         {
             _FinalResponceEvent.Set();
             _microphoneRecognitionClient.EndMicAndRecognition();
             _microphoneRecognitionClient.Dispose();
             _microphoneRecognitionClient = null;
             Speakbtn.Content = "Start\nRecording";
             Speakbtn.IsEnabled = true;
             Responsetxt.Background = Brushes.White;
             Responsetxt.Foreground = Brushes.Black;
         }
         catch (Exception e1) { Console.WriteLine(e); }
     }));
     // Speakbtn.Content = "";
     Speakbtn.IsEnabled     = true;
     Responsetxt.Background = Brushes.White;
     Responsetxt.Foreground = Brushes.Black;
     Responsetxt.Text       = "hello helpline number there is case of murder in my area send urgent help";
     GetSentiments(Responsetxt.Text);
     //  using (System.IO.StreamWriter file =
     //new System.IO.StreamWriter(@"C:\Users\saksham\Desktop\powerbi.xlsx", true))
     // {
     //  file.WriteLine(Responsetxt.Text+"\n");
     //}
 }
Exemple #5
0
 private void OnMicShortPhraseResponseReceivedHandler(object sender, SpeechResponseEventArgs e)
 {
     this.BeginInvoke((MethodInvoker) delegate() {
         //this.WriteLine("--- OnMicShortPhraseResponseReceivedHandler ---");
         micClient.EndMicAndRecognition();
         btnSpeech.Enabled = true;
     });
 }
Exemple #6
0
 /// <summary>
 /// Called when a final response is received;
 /// </summary>
 /// <param name="sender">The sender.</param>
 /// <param name="e">The <see cref="SpeechResponseEventArgs"/> instance containing the event data.</param>
 private void OnMicShortPhraseResponseReceivedHandler(object sender, SpeechResponseEventArgs e)
 {
     // we got the final result, so it we can end the mic reco.  No need to do this
     // for dataReco, since we already called endAudio() on it as soon as we were done
     // sending all the data.
     _micClient.EndMicAndRecognition();
     this.WriteResponseResult(e);
 }
Exemple #7
0
        /// <summary>
        /// Called when a final response is received;
        /// </summary>
        /// <param name="sender">The sender.</param>
        /// <param name="e">The <see cref="SpeechResponseEventArgs"/> instance containing the event data.</param>
        private void OnMicShortPhraseResponseReceivedHandler(object sender, SpeechResponseEventArgs e)
        {
            Dispatcher.Invoke(() =>
            {
                WriteLine("--- OnMicShortPhraseResponseReceivedHandler ---");

                // we got the final result, so it we can end the mic reco.  No need to do this
                // for dataReco, since we already called endAudio() on it as soon as we were done
                // sending all the data.
                _micClient.EndMicAndRecognition();

                WriteResponseResult(e);

                _startButton.IsEnabled = true;
                _radioGroup.IsEnabled  = true;
            });
        }
Exemple #8
0
 private void StopMicrophone()
 {
     //end mic
     if (micClient != null)
     {
         micClient.EndMicAndRecognition();
         micClient.Dispose();
     }
 }
Exemple #9
0
        private void OnResponseReceived(object sender, SpeechResponseEventArgs e)
        {
            Dispatcher.Invoke(() =>
            {
                _micClient.EndMicAndRecognition();
                RecordButton.IsEnabled = true;
                RecordButton.Content   = "Record";
            });

            var log = "";

            for (var i = 0; i < e.PhraseResponse.Results.Length; i++)
            {
                log +=
                    $"{i}. Confidence: {e.PhraseResponse.Results[i].Confidence}, Text: \"{e.PhraseResponse.Results[i].DisplayText}\"\r\n";
            }

            WriteToLog(log);
        }
        /// <summary>
        ///     Called when a final response is received;
        /// </summary>
        ///


        void OnMicShortPhraseResponseReceivedHandler(object sender, SpeechResponseEventArgs e)
        {
            Dispatcher.Invoke((Action)(() =>
            {
                // WriteLine("--- OnMicShortPhraseResponseReceivedHandler ---");

                _FinalResponseEvent.Set();

                // we got the final result, so it we can end the mic reco.  No need to do this
                // for dataReco, since we already called endAudio() on it as soon as we were done
                // sending all the data.
                _micClient.EndMicAndRecognition();

                // BUGBUG: Work around for the issue when cached _micClient cannot be re-used for recognition.
                _micClient.Dispose();
                _micClient = null;
                openMic();
                WriteResponseResult(e);
            }));
        }
Exemple #11
0
        /// <summary>
        /// Analyze speech input
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        private void btnAnalyzeSpeech_Click(object sender, RoutedEventArgs e)
        {
            ignoreNextString             = false;
            AudioControlsGrid.Visibility = Visibility.Visible;

            if (ButtonState == "Record")
            {
                this.fullText = null;
                recording     = true;
                BrushConverter bc = new BrushConverter();
                mySentiment.Sentiment = 0.5;
                userInput.Text        = "";

                recordGrid.Visibility     = System.Windows.Visibility.Hidden;
                recordingdGrid.Visibility = System.Windows.Visibility.Visible;

                recordingState = RecordingState.Recording;

                string speechAPIKey = confCollection["SpeechRecognitionAPIKey"].Value;

                MicrophoneRecognitionClient intentMicClient =
                    SpeechRecognitionServiceFactory.CreateMicrophoneClient(SpeechRecognitionMode.LongDictation,
                                                                           "en-us",
                                                                           speechAPIKey);

                m_micClient = intentMicClient;

                // Event handlers for speech recognition results
                m_micClient.OnResponseReceived        += this.OnResponseReceivedHandler;
                m_micClient.OnPartialResponseReceived += this.OnPartialResponseReceivedHandler;
                //m_micClient.OnConversationError += OnConversationErrorHandler;

                // First send of audio data to service
                m_micClient.StartMicAndRecognition();

                ButtonState = "Finish";
            }
            // Finish the recording
            else if (ButtonState == "Finish")
            {
                Thread.Sleep(1000);
                recording = false;
                m_micClient.EndMicAndRecognition();
                recordGrid.Visibility     = System.Windows.Visibility.Visible;
                recordingdGrid.Visibility = System.Windows.Visibility.Hidden;

                ButtonState = "Record";

                DisplayAnalysis();

                // Stop recording.
                Stop();
            }
        }
        // ----------------------------------------------------------------------------------------------------------------------------------------

        void OnMicShortPhraseResponseReceivedHandler(object sender, SpeechResponseEventArgs e)
        {
            Console.WriteLine("--- OnMicShortPhraseResponseReceivedHandler ---");

            // we got the final result, so it we can end the mic reco.  No need to do this
            // for dataReco, since we already called endAudio() on it as soon as we were done
            // sending all the data.
            Microphone.EndMicAndRecognition();

            WriteResponseResult(e);

            CreateMicrophoneRecoClientWithIntent();
        }
 private void OnMicShortPhraseResponseReceivedHandler(object sender, SpeechResponseEventArgs e)
 {
     Dispatcher.Invoke((Action)(() =>
     {
         _FinalResponseEvent.Set();
         _microphoneRecognitionClient.EndMicAndRecognition();
         _microphoneRecognitionClient.Dispose();
         _microphoneRecognitionClient = null;
         RecordButton.Content = "Iniciar\nGrabacion";
         RecordButton.IsEnabled = true;
         OutputTextbox.Background = Brushes.White;
         OutputTextbox.Foreground = Brushes.Black;
     }));
 }
 public void StopRecording()
 {
     try
     {
         FinalResponceEvent.Set();
         MicrophoneRecognitionClient.EndMicAndRecognition();
         MicrophoneRecognitionClient.Dispose();
         MicrophoneRecognitionClient = null;
     }
     catch (Exception ex)
     {
         MessageBox.Show(ex.Message);
     }
 }
 private void btn_Bitir_Click(object sender, EventArgs e)
 {
     txt_Cikti.BeginInvoke((Action)(() =>
     {
         _FinalResponceEvent.Set();
         mikrofonTanimaAlicisi.EndMicAndRecognition();
         mikrofonTanimaAlicisi.Dispose();
         mikrofonTanimaAlicisi = null;
         btn_Baslar.Text = "Kayda Başla";
         btn_Baslar.Enabled = true;
         txt_Cikti.BackColor = Color.White;
         txt_Cikti.ForeColor = Color.Black;
     }
                                    ));
 }
Exemple #16
0
 //통화 종료 버튼
 private void btnQuitRecord_Click(object sender, RoutedEventArgs e)
 {
     if (microphoneRecognitionClient != null)
     {
         currentEvent.EventENDTIME = DateTime.Now.ToString("yyyy-MM-dd  HH:mm");
         savedEventList.Add(currentEvent);
         microphoneRecognitionClient.EndMicAndRecognition();
         microphoneRecognitionClient.Dispose();
         microphoneRecognitionClient = null;
         toastViewModel.ShowInformation("Hang up the call.");
         //PrintCurrentEvent(currentEvent);
         InsertCurrentEvent(currentEvent);
         //현재 처리 중이었던 사건 저장 및 UI 초기화
         ResetEvent();
     }
 }
Exemple #17
0
 private void StartButton_Click(object sender, RoutedEventArgs e)
 {
     if (IsRecognizing)
     {
         StartButton.Content = "Start";
         micClient.EndMicAndRecognition();
         micClient.Dispose();
         micClient = null;
     }
     else
     {
         StartButton.Content = "Stop";
         micClient           = SpeechRecognitionServiceFactory.CreateMicrophoneClient(SpeechRecognitionMode.LongDictation, "en-US", ApiKey1.Text);
         micClient.OnPartialResponseReceived += OnPartialResponseReceivedHandler;
         micClient.OnResponseReceived        += OnMicDictationResponseReceivedHandler;
         micClient.StartMicAndRecognition();
     }
     IsRecognizing = !IsRecognizing;
 }
        private void OnMicDictationResponseReceivedHandler(object sender, SpeechResponseEventArgs e)
        {
            ClearTextBox(txtFinal);

            if (e.PhraseResponse.RecognitionStatus == RecognitionStatus.EndOfDictation ||
                e.PhraseResponse.RecognitionStatus == RecognitionStatus.DictationEndSilenceTimeout)
            {
                micClient.EndMicAndRecognition();
            }

            if (e.PhraseResponse.Results.Length == 0)
            {
                WriteTextBoxLine(txtFinal, "NOTHING RETURNED");
            }
            else
            {
                for (int i = 0; i < e.PhraseResponse.Results.Length; i++)
                {
                    WriteTextBoxLine(txtFinal,
                                     String.Format("[{0}]: \"{2}\" (Confidence = {1})",
                                                   i,
                                                   e.PhraseResponse.Results[i].Confidence,
                                                   e.PhraseResponse.Results[i].DisplayText));
                }
                WriteTextBoxLine(txtFinal, String.Empty);

                DateTime dtm = DateTime.Now;


                // send results here
                foreach (var w in e.PhraseResponse.Results[0].DisplayText.GetCleansedWords())
                {
                    eventHubClient.PostPayload(new WordPayload()
                    {
                        Word = w, WordTime = dtm, DeviceName = deviceName, Location = deviceLocation
                    });
                    WriteTextBoxLine(wordOutput, w);
                }
            }

            InitMicClient(true);
        }
        private async void MicClient_OnResponseReceived(object sender, SpeechResponseEventArgs e)
        {
            if (e.PhraseResponse.Results.Length > 0)
            {
                micClient.EndMicAndRecognition();
                await Application.Current.Dispatcher.BeginInvoke(
                    DispatcherPriority.Normal, new Action(() =>
                {
                    this.MySpeechResponse.Text = $"'{e.PhraseResponse.Results[0].DisplayText}',";
                    this.MySpeechResponseConfidence.Text = $"confidence: { e.PhraseResponse.Results[0].Confidence}";
                }));

                var intent = await this.GetLuisIntent(e.PhraseResponse.Results[0].DisplayText);

                await Application.Current.Dispatcher.BeginInvoke(
                    DispatcherPriority.Normal, new Action(() =>
                {
                    this.MySpeechIntent.Text = $"Intent: '{intent.intents[0].intent}',";
                    this.MySpeechIntentScore.Text = $"score: {Convert.ToInt16(intent.intents[0].score * 100)}%";
                }));

                if (intent.intents[0].intent.ToLower() == "thingpictures")
                {
                    this.showEmotion = false;
                    this.SearchImage(intent.query);
                }
                else if (intent.intents[0].intent.ToLower() == "peoplepictures")
                {
                    this.showEmotion = true;
                    this.SearchImage(intent.query);
                }
                else
                {
                    await Application.Current.Dispatcher.BeginInvoke(
                        DispatcherPriority.Normal,
                        new Action(() => { this.MySpeechSentiment.Text = $"I'm not sure what your intent is and will not search"; }));
                }
            }
        }
Exemple #20
0
        /// <summary>
        ///     Speech recognition from the microphone.  The microphone is turned on and data from the microphone
        ///     is sent to the Speech Recognition Service.  A built in Silence Detector
        ///     is applied to the microphone data before it is sent to the recognition service.
        /// </summary>
        void DoMicrophoneRecognition(MicrophoneRecognitionClient micClient)
        {
            int waitSeconds = (m_recoMode == SpeechRecognitionMode.LongDictation) ? 200 : 15;

            try
            {
                // Turn on the microphone and stream audio to the Speech Recognition Service
                micClient.StartMicAndRecognition();
                Console.WriteLine("Start talking");

                // sleep until the final result in OnResponseReceived event call, or waitSeconds, whichever is smaller.
                bool isReceivedResponse = micClient.WaitForFinalResponse(waitSeconds * 1000);
                if (!isReceivedResponse)
                {
                    Console.WriteLine("{0}: Timed out waiting for conversation response after {1} ms",
                                      DateTime.UtcNow, waitSeconds * 1000);
                }
            }
            finally
            {
                // We are done sending audio.  Final recognition results will arrive in OnResponseReceived event call.
                micClient.EndMicAndRecognition();
            }
        }
Exemple #21
0
 public void Stop_Click()
 {
     microphoneRecognitionClient.EndMicAndRecognition();
     microphoneRecognitionClient.Dispose();
     microphoneRecognitionClient = null;
 }
Exemple #22
0
 /// <summary>
 /// Function to stop microphone recordings
 /// </summary>
 private void StopMicRecording()
 {
     _micRecClient.EndMicAndRecognition();
     _isMicRecording = false;
 }
 private void StopRecording()
 {
     m_MicrophoneRecording.EndMicAndRecognition();
     m_MicrophoneRecording.AudioStop();
 }
Exemple #24
0
        private static void WriteResponseResult(SpeechResponseEventArgs e)
        {
            if (e.PhraseResponse.Results.Length == 0)
            {
                Console.WriteLine("No phrase response is available.");
                result.tableName = "INVALID";
                result.rowID     = -1;
                micClient.EndMicAndRecognition();
            }
            else
            {
                Console.WriteLine("********* Final n-BEST Results *********");
                for (int i = 0; i < e.PhraseResponse.Results.Length; i++)
                {
                    Console.WriteLine(
                        "[{0}] Confidence={1}, Text=\"{2}\"",
                        i,
                        e.PhraseResponse.Results[i].Confidence,
                        e.PhraseResponse.Results[i].DisplayText);
                }
                string        prefix = string.Empty, resultTable = "INVALID";
                int           resultRowRecord = -1;
                List <string> response        = new List <string>();
                foreach (var item in e.PhraseResponse.Results)
                {
                    response.Add(item.DisplayText);
                }
                resultTable      = GetDirectMatchingPrefix(response);
                result.tableName = resultTable;
                for (int i = 0; i < e.PhraseResponse.Results.Length; i++)
                {
                    if (!resultTable.Equals("INVALID", StringComparison.InvariantCultureIgnoreCase))
                    {
                        break;
                    }
                    string sentence = Regex.Replace(e.PhraseResponse.Results[i].DisplayText, "[^a-zA-Z0-9_ ]+", "");
                    if (sentence.Contains(" "))
                    {
                        prefix = sentence.Substring(0, sentence.IndexOf(" "));
                    }
                    else
                    {
                        prefix = sentence;
                    }

                    resultTable      = GetMatchingPrefix(prefix);
                    result.tableName = resultTable;
                }
                resultRowRecord = GetDirectMatchingRowRecord(response, result.tableName);
                result.rowID    = resultRowRecord;
                for (int i = 0; i < e.PhraseResponse.Results.Length; i++)
                {
                    if (resultRowRecord != -1)
                    {
                        break;
                    }
                    string sentence = Regex.Replace(e.PhraseResponse.Results[i].DisplayText, "[^a-zA-Z0-9_  ]+", "");
                    if (sentence.Length > 0)
                    {
                        int j = sentence.IndexOf(" ") + 1;
                        sentence        = sentence.Substring(j);
                        resultRowRecord = GetMatchingRow(result.tableName, sentence);
                        result.rowID    = resultRowRecord;
                    }
                }
            }
            CommandExecution cmdExec = new CommandExecution();

            if (result == null || result.tableName == "INVALID" || result.rowID == -1)
            {
                TextToSpeech.Speak("Sorry! I did not get you.");
            }
            else if (result.tableName == "BROWSE")
            {
                cmdExec.Run(result.rowID);
                TextToSpeech.Speak("Your website is launched");
            }
            else if (result.tableName == "CLIPBOARD")
            {
                cmdExec.CopyToClipBoard(result.rowID);
                TextToSpeech.Speak("Copied your text to clipboard");
            }
            else if (result.tableName == "ARENA")
            {
                Batch b = new Batch();
                using (var db = new Models())
                {
                    var text = db.ArenaDB.Where(q => q.ArenaID == result.rowID).FirstOrDefault();
                    if (!string.IsNullOrEmpty(text.TextCommand))
                    {
                        b.findexe(text.TextCommand);
                    }
                }
            }
            else if (result.tableName == "JIRA")
            {
                cmdExec.ExecuteResult(result.rowID);
            }
            else if (result.tableName == "OUTLOOK")
            {
                new OutlookUtils().HandleOutlookOperations(result.rowID);
            }
        }
        private void StopListening()
        {
            _microphoneRecognitionClient.EndMicAndRecognition();

            IsListening = false;
        }
Exemple #26
0
 public void Stop()
 {
     lock (speechClientLocker)
         speechClient.EndMicAndRecognition();
 }
        /// <summary>
        ///     Speech recognition from the microphone.  The microphone is turned on and data from the microphone
        ///     is sent to the Speech Recognition Service.  A built in Silence Detector
        ///     is applied to the microphone data before it is sent to the recognition service.
        /// </summary>
        void DoMicrophoneRecognition(MicrophoneRecognitionClient micClient)
        {
            int waitSeconds = (m_recoMode == SpeechRecognitionMode.LongDictation) ? 200 : 15;

            try
            {
                // Turn on the microphone and stream audio to the Speech Recognition Service
                micClient.StartMicAndRecognition();
                Console.WriteLine("Start talking");

                // sleep until the final result in OnResponseReceived event call, or waitSeconds, whichever is smaller.
                bool isReceivedResponse = micClient.WaitForFinalResponse(waitSeconds * 1000);
                if (!isReceivedResponse)
                {
                    Console.WriteLine("{0}: Timed out waiting for conversation response after {1} ms",
                                      DateTime.UtcNow, waitSeconds * 1000);
                }
            }
            finally
            {
                // We are done sending audio.  Final recognition results will arrive in OnResponseReceived event call.
                micClient.EndMicAndRecognition();
            }
        }
Exemple #28
0
 private void btnEnd_Click(object sender, RoutedEventArgs e)
 {
     micClient.EndMicAndRecognition();
 }