private void Recognizer_Recognizing(object sender, SpeechRecognitionEventArgs e)
 {
     Dispatcher.Invoke(() =>
     {
         MessageBlock.Text = $"{lastLine}\n{e.Result.Text.ToUpper()}";
     });
 }
Ejemplo n.º 2
0
    // "Recognizing" events are fired every time we receive interim results during recognition (i.e. hypotheses)
    private void RecognizingHandler(object sender, SpeechRecognitionEventArgs e)
    {
        //If we are recognizing speech
        if (e.Result.Reason == ResultReason.RecognizingSpeech)
        {
            //Check who is talking
            requester = new Request("2", byteStream);
            requester.Start();

            UnityEngine.Debug.LogFormat($"HYPOTHESIS: Text={e.Result.Text}");
            UnityEngine.Debug.LogFormat($"Returned Message: {requester.ServerMessage}");

            //Join thread. Wait for action to complete
            requester.Stop();

            // Populate recognized string, ready for transcription.
            recognizedString = e.Result.Text;

            //If the user is detected
            if (requester.ServerMessage == userName)
            {
                // Dont transcribe and disable to stop Update() from changing it.
                recognizedText.enabled = false;
                UnityEngine.Debug.LogFormat($"User found or null value");
                recognizedText.text = "";
            }
            else
            {
                // Transcribe
                recognizedText.text = recognizedString;
            }
        }
    }
Ejemplo n.º 3
0
        private void Recognized(object sender, SpeechRecognitionEventArgs e)
        {
            Console.WriteLine();
            var test = GetResponse(e.Result.Properties);

            SpeechResult.Add(test);
        }
Ejemplo n.º 4
0
        private void _speechRecognizer_Recognized(object sender, SpeechRecognitionEventArgs e)
        {
            var result = e.Result;

            // Checks result.
            if (result.Reason == ResultReason.RecognizedSpeech)
            {
                if (!String.IsNullOrWhiteSpace(result.Text))
                {
                    Console.WriteLine($"- {result.Text}");
                    _reconizedSpeeches.Add(
                        new ReconizedSpeech
                    {
                        Result = result.Text
                    });
                }
            }
            else if (result.Reason == ResultReason.NoMatch)
            {
                Console.WriteLine($"NOMATCH: Speech could not be recognized.");
            }
            else if (result.Reason == ResultReason.Canceled)
            {
                var cancellation = CancellationDetails.FromResult(result);
                Console.WriteLine($"CANCELED: Reason={cancellation.Reason}");

                if (cancellation.Reason == CancellationReason.Error)
                {
                    Console.WriteLine($"CANCELED: ErrorCode={cancellation.ErrorCode}");
                    Console.WriteLine($"CANCELED: ErrorDetails={cancellation.ErrorDetails}");
                    Console.WriteLine($"CANCELED: Did you update the subscription info?");
                }
            }
        }
Ejemplo n.º 5
0
 // 识别过程中的中间结果
 private void Recognizer_Recognizing(object sender, SpeechRecognitionEventArgs e)
 {
     if (!string.IsNullOrEmpty(e.Result.Text))
     {
         Log("中间结果: " + e.Result.Text);
     }
 }
        private static void Recognizer_Recognized(object sender, SpeechRecognitionEventArgs e)
        {
            if (GlobalFlowControl.moduleActivated == false ||
                GlobalFlowControl.ChatbotInterrupted == true)
            {
                queryResult = null;
                return;
            }

            if (stopRecognizing)
            {
                return;
            }
            Debug.WriteLine("Recognized: " + e.Result.Text);

            if (isAskingQuestion)
            {
                if (e.Result.Text.Length > 9)
                {
                    queryResult = e.Result.Text;

                    stopRecognizing = true;
                }
            }
            else
            {
                if (IsContainKeyword(e.Result.Text))
                {
                    //queryResult = e.Result.Text;
                    stopRecognizing = true;
                }
            }
        }
 private void SpeechBotConnector_Recognizing(object sender, SpeechRecognitionEventArgs e)
 {
     if (e.Result.Reason == ResultReason.RecognizingKeyword)
     {
         Trace.TraceInformation($"Keyword Recognition: Verifying: {e.Result.Text}");
     }
 }
        /// <summary>
        /// Logs the final recognition result
        /// </summary>
        private void RecognizedEventHandler(SpeechRecognitionEventArgs e, RecoType rt)
        {
            TextBox log;

            if (rt == RecoType.Base)
            {
                log = this.baseModelLogText;
                this.SetCurrentText(this.baseModelCurrentText, e.Result.Text);
            }
            else
            {
                log = this.customModelLogText;
                this.SetCurrentText(this.customModelCurrentText, e.Result.Text);
            }

            this.WriteLine(log);
            this.WriteLine(log, $" --- Final result received. Reason: {e.Result.Reason.ToString()}. --- ");
            if (!string.IsNullOrEmpty(e.Result.Text))
            {
                //write log
                this.WriteLine(log, e.Result.Text);

                //write result
                this.WriteLine(this.recognizedText, $"[{DateTime.Now.ToString("HH:mm:ss")}] Me: {e.Result.Text}");
                SendMessageToBot(e.Result.Text);
            }
        }
Ejemplo n.º 9
0
    public void Recognized(object s, SpeechRecognitionEventArgs e)
    {
        var rec    = s as SpeechRecognizer;
        var result = e.Result;
        // Checks result.
        string newMessage = outputText.text;

        if (result.Reason == ResultReason.RecognizedSpeech)
        {
            buf += result.Text;
        }
        else if (result.Reason == ResultReason.NoMatch)
        {
        }
        else if (result.Reason == ResultReason.Canceled)
        {
            var cancellation = CancellationDetails.FromResult(result);
            newMessage = $"CANCELED: Reason={cancellation.Reason} ErrorDetails={cancellation.ErrorDetails}";
        }

        lock (threadLocker)
        {
            waitingForReco = false;
        }
    }
Ejemplo n.º 10
0
        private void _speechClient_Recognized(object sender, SpeechRecognitionEventArgs e)
        {
            Debug.WriteLine($"{e.SessionId} > Final result: {e.Result.Text}");
            SpeechRecognizer r = (SpeechRecognizer)sender;

            SendTranscript(r.SpeechRecognitionLanguage, e.Result.Text);
        }
Ejemplo n.º 11
0
        private void SpeechRecognition_Recognized(object sender, SpeechRecognitionEventArgs e)
        {
            if ((e.confidence >= SpeechRecognitionConfidenceThreshold))
            {
                switch (e.command)
                {
                case Commands.ZOOM_IN:
                    if (ptz != null)
                    {
                        ptz.ZoomLevel = 1;
                    }
                    break;

                case Commands.ZOOM_OUT:
                    if (ptz != null)
                    {
                        ptz.ZoomLevel = 0;
                    }
                    break;

                case Commands.TRACK_ON:
                    TrackingPresenter = true;
                    break;

                case Commands.TRACK_OFF:
                    TrackingPresenter = false;
                    break;
                }
            }
        }
Ejemplo n.º 12
0
        private void SpeechRecognition_Recognized(object sender, SpeechRecognitionEventArgs e)
        {
            Dictionary <string, ICommand> commands;

            if (EditorVisible)
            {
                commands = commands_Editor;
            }
            else if (VisualizerVisible)
            {
                commands = commands_Visualizer;
            }
            else
            {
                commands = commands_Manager;
            }

            string   speechCommand = e.command;
            ICommand cmd;

            if (e.confidence >= SpeechRecognitionConfidenceThreshold)
            {
                if (!commands.TryGetValue(speechCommand, out cmd))
                {
                    ExecuteGesture(speechCommand, VisualizerVisible); //if not a UI command, assuming its a Gesture name - if Visualizer is visible, also executing keyboard shortcut for the gesture (apart from highighting it)
                }
                else if (cmd.CanExecute(null))
                {
                    cmd.Execute(null);
                }
            }
        }
Ejemplo n.º 13
0
        private static async void OnSpeechRecognizedAsync(object sender, SpeechRecognitionEventArgs e)
        {
            if (e.Result.Reason == ResultReason.NoMatch)
            {
                System.Console.WriteLine("NO MATCH: Speech could not be recognized.");
                return;
            }

            string spokenText = e.Result.Text;

            System.Console.WriteLine($"RECOGNIZED: Text={spokenText}");

            // Ignore everything that doesn't include the bot's name
            if (!spokenText.IncludesTheWords(s_twitchBotSettings.BotName))
            {
                return;
            }

            spokenText = spokenText.RemoveText(s_twitchBotSettings.BotName);

            // Process the request
            if (spokenText.IncludesTheWords("sleep") ||
                spokenText.IncludesTheWords("shut", "down"))
            {
                await SpeakAsync("Shutting down");

                s_stopRecognition.TrySetCanceled(new CancellationToken(true));
            }
            else if (spokenText.IncludesTheWords("Twitch"))
            {
                s_twitchBot.HandleTwitchCommand(spokenText);
            }
        }
Ejemplo n.º 14
0
        /// <summary>
        /// Logs the final recognition result
        /// </summary>
        private void RecognizedEventHandler(SpeechRecognitionEventArgs e, RecoType rt)
        {
            SpeechRecognizedEventArgs eventArgs = new SpeechRecognizedEventArgs();

#if DEBUG
            if (rt == RecoType.Base)
            {
                eventArgs.Message = e.Result.Text;
                OnSpeechRecognizedEvent(eventArgs);
            }
            else
            {
                eventArgs.Message = e.Result.Text;
                OnSpeechRecognizedEvent(eventArgs);
            }

            eventArgs.Message = $" --- Final result received. Reason: {e.Result.Reason.ToString()}. --- ";
            OnSpeechRecognizedEvent(eventArgs);
#endif
            if (!string.IsNullOrEmpty(e.Result.Text))
            {
                //this.WriteLine(log, e.Result.Text);
                eventArgs.Message = e.Result.Text;
                OnSpeechRecognizedEvent(eventArgs);
            }

            // if access to the JSON is needed it can be obtained from Properties
            string json = e.Result.Properties.GetProperty(PropertyId.SpeechServiceResponse_JsonResult);
        }
Ejemplo n.º 15
0
 private static void Recognizer_Recognizing(object sender, SpeechRecognitionEventArgs e)
 {
     //Console.ForegroundColor = ConsoleColor.Gray;
     //Console.WriteLine();
     //Console.WriteLine($"SpeechRecognitionEventArgs={e.Result}");
     //Console.WriteLine($"SpeechRecognitionEventArgs={e.Result.Text}");
 }
Ejemplo n.º 16
0
 private void RecognisedEventHandler(object sender, SpeechRecognitionEventArgs e)
 {
     if (!string.IsNullOrEmpty(e.Result.Text))
     {
         onSpeech(e.Result.Text);
     }
 }
Ejemplo n.º 17
0
        private async Task RecognizedEventHandlerAsync(SpeechRecognitionEventArgs e, RecoType rt)
        {
            TextBox log;

            if (rt == RecoType.Base)
            {
                log = this.baseModelLogText;
                this.SetCurrentText(this.baseModelCurrentText, e.Result.Text);
                if (e.Result.Text.Equals("What's the weather like?"))
                {
                    var config = SpeechConfig.FromSubscription(this.SubscriptionKey, this.Region);
                    config.SpeechRecognitionLanguage = this.RecognitionLanguage;

                    SpeechSynthesizer basicRecognizer;
                    if (this.UseMicrophone)
                    {
                        using (basicRecognizer = new SpeechSynthesizer(config))
                        {
                            await basicRecognizer.SpeakTextAsync("cuacanya panas banget boo!");
                        }
                    }
                }
            }
            else
            {
                log = this.customModelLogText;
                this.SetCurrentText(this.customModelCurrentText, e.Result.Text);
            }
        }
Ejemplo n.º 18
0
        private void OnSpeechRecognized(object sender, SpeechRecognitionEventArgs e)
        {
            string previousResult = Interlocked.Exchange(ref _lastResult, e.Result.Text);

            switch (e.Result.Reason)
            {
            case ResultReason.RecognizingSpeech:
                Interlocked.Increment(ref _intermediateResultsReceived);
                Console.Out.Write($"[{e.Result.Text.Length}]\r");
                if (previousResult != null && previousResult.Equals(e.Result.Text))
                {
                    Interlocked.Increment(ref _identicalResults);
                }
                break;

            case ResultReason.RecognizedSpeech:
                Interlocked.Increment(ref _finalResultsReceived);
                foreach (DetailedSpeechRecognitionResult result in SpeechRecognitionResultExtensions.Best(e.Result))
                {
                    string confidence = result.Confidence.ToString("F2");
                    string text       = $"{confidence}|{result.Text}";
                    Trace.WriteLine(text);
                    _transcript.WriteLine(text);
                    Console.Out.WriteLine(text);
                }
                _transcript.WriteLine();
                _transcript.Flush();
                break;
            }
        }
        private static void Recognizer_Recognizing(object sender, SpeechRecognitionEventArgs e)
        {
            if (GlobalFlowControl.moduleActivated == false ||
                GlobalFlowControl.ChatbotInterrupted == true)
            {
                queryResult = null;
                return;
            }
            if (GlobalFlowControl.AskByPressingButton)
            {
                return;
            }

            queryResult = e.Result.Text;
            Debug.WriteLine("Recognizing: " + e.Result.Text);
            if (stopRecognizing)
            {
                return;
            }

            if (isAskingQuestion == false)
            {
                if (IsContainKeyword(queryResult))
                {
                    //queryResult = e.Result.Text;
                    stopRecognizing = true;
                }
            }
            else
            {
                recognizingCount++;
            }
        }
Ejemplo n.º 20
0
 private void RecognisingSpeechHandler(object sender, SpeechRecognitionEventArgs e)
 {
     if (e.Result.Text != null)
     {
         this.RecognisedSpeech?.Invoke(e.Result.Text);
     }
 }
        private void SpeechBotConnector_Recognized(object sender, SpeechRecognitionEventArgs e)
        {
            if (e.Result.Reason == ResultReason.RecognizedSpeech)
            {
                this.RecognizedText = e.Result.Text;
                this.speechDuration = (int)e.Result.Duration.TotalMilliseconds;

                Trace.TraceInformation($"[{DateTime.Now.ToString("h:mm:ss tt", CultureInfo.CurrentCulture)}] Recognized event received. SessionId = {e.SessionId}, Speech duration = {this.speechDuration}, Recognized text = {this.RecognizedText}");
            }
            else if (e.Result.Reason == ResultReason.RecognizedKeyword)
            {
                this.RecognizedKeyword = e.Result.Text;

                Trace.TraceInformation($"[{DateTime.Now.ToString("h:mm:ss tt", CultureInfo.CurrentCulture)}] Recognized event received. SessionId = {e.SessionId}");
                Trace.TraceInformation($"Keyword Recognition Verified : {e.Result.Text}");
            }
            else if (e.Result.Reason == ResultReason.NoMatch)
            {
                Trace.TraceInformation($"[{DateTime.Now.ToString("h:mm:ss tt", CultureInfo.CurrentCulture)}] Recognized event received. Speech could not be recognized. SessionId = {e.SessionId}");
                Trace.TraceInformation($"No match details = {NoMatchDetails.FromResult(e.Result)}");
            }
            else
            {
                Trace.TraceInformation($"[{DateTime.Now.ToString("h:mm:ss tt", CultureInfo.CurrentCulture)}] Recognized event received. e.Result.Reason = {e.Result.Reason}. SessionId = {e.SessionId}");
            }
        }
        //Writes the response result.
        private void EchoResponse(SpeechRecognitionEventArgs e)
        {
            WriteLine("Speech To Text Result:");
            //handle the case when there are no results.
            //common situation is when there is a pause from user and audio captured has no speech in it
            if (e.Result.Text.Length == 0)
            {
                WriteLine("No phrase response is available.");
                WriteLine();
            }
            else
            {
                WriteLine(
                    "Text=\"{0}\"",
                    e.Result.Text);
                WriteLine();

                var botReplyTask = this.GetBotReplyAsync(e.Result.Text);

                string result = string.Empty;
                //send transcribed text to bot and get the response
                result = botReplyTask.Result;

                //Play audio from text to speech API
                var speakTask = this.PlaySpeechAudioAsync(result);
            }
        }
        /// <summary>
        /// Logs the final recognition result.
        /// </summary>
        private void RecognizedEventHandler(SpeechRecognitionEventArgs e)
        {
            string json = e.Result.Properties.GetProperty(PropertyId.SpeechServiceResponse_JsonResult);
            var    utt  = JsonConvert.DeserializeObject <RealTimeUtt>(json);

            FinalResultsCumulative.Add(utt);
        }
Ejemplo n.º 24
0
 private void SpeechRecognizer_Recognized(object sender, SpeechRecognitionEventArgs e)
 {
     DispatcherHelper.CheckBeginInvokeOnUI(() =>
     {
         SelectedItem.Text += $" {e.Result.Text}";
     });
 }
        /// <summary>
        /// Logs the final recognition result
        /// </summary>
        private void RecognizedEventHandler(SpeechRecognitionEventArgs e, RecoType rt)
        {
            TextBox log;

            if (rt == RecoType.Base)
            {
                log = this.baseModelLogText;
                this.SetCurrentText(this.baseModelCurrentText, e.Result.Text);
            }
            else
            {
                log = this.customModelLogText;
                this.SetCurrentText(this.customModelCurrentText, e.Result.Text);
            }

            this.WriteLine(log);
            this.WriteLine(log, $" --- Final result received. Reason: {e.Result.Reason.ToString()}. --- ");
            if (!string.IsNullOrEmpty(e.Result.Text))
            {
                this.WriteLine(log, e.Result.Text);
            }

            // if access to the JSON is needed it can be obtained from Properties
            string json = e.Result.Properties.GetProperty(PropertyId.SpeechServiceResponse_JsonResult);
        }
Ejemplo n.º 26
0
        private static void Recognized(object sender, SpeechRecognitionEventArgs e)
        {
            switch (e.Recognized)
            {
            case "英語":
                _language = Language.English;
                return;

            case "韓国語":
                _language = Language.Korean;
                return;

            case "スペイン語":
                _language = Language.Spanish;
                return;

            case "フランス語":
                _language = Language.French;
                return;

            case "ロシア語":
                _language = Language.Russian;
                return;
            }

            Console.WriteLine(e.Recognized);
            var translated = Translate(e.Recognized, _language);

            Console.WriteLine(translated);
            var mp3 = TextToSpeech(translated, _language);

            Play(mp3);
        }
Ejemplo n.º 27
0
        // "Recognized" events are fired when the utterance end was detected by the server
        private void RecognizedHandler(object sender, SpeechRecognitionEventArgs e)
        {
            //This if statement creates the transcript :)
            if (e.Result.Reason == ResultReason.RecognizedSpeech)
            {
                UnityEngine.Debug.LogFormat($"RECOGNIZED: Text={e.Result.Text}");

                //Will eliminate false speech pick-ups from transcript
                if (e.Result.Text.Contains(" "))
                {
                    transcript += (e.Result.Text + "\n");
                }

                //Will accept valid answer as long as text length > 5
                if (e.Result.Text.Length > 5)
                {
                    questionNum++;               //Goes to next question
                    canPlay[questionNum] = true; //This (new) question's canPlay value is now true, meaning it will be played.
                }
                UnityEngine.Debug.Log(transcript);
            }
            else if (e.Result.Reason == ResultReason.NoMatch)
            {
                UnityEngine.Debug.LogFormat($"NOMATCH: Speech could not be recognized.");
            }
        }
Ejemplo n.º 28
0
 // "Recognizing" events are fired every time we receive interim results during recognition (i.e. hypotheses)
 private void RecognizingHandler(object sender, SpeechRecognitionEventArgs e)
 {
     if (e.Result.Reason == ResultReason.RecognizingSpeech)
     {
         UnityEngine.Debug.LogFormat($"HYPOTHESIS: Text={e.Result.Text}");
     }
 }
Ejemplo n.º 29
0
        private static void Recognizer_Recognizing(object sender, SpeechRecognitionEventArgs e)
        {
            var result = e.Result;

            if (result.Reason == ResultReason.RecognizingSpeech)
            {
                Console.WriteLine($"We recognized: {result.Text}");
            }
            else if (result.Reason == ResultReason.NoMatch)
            {
                Console.WriteLine($"NOMATCH: Speech could not be recognized.");
            }
            else if (result.Reason == ResultReason.Canceled)
            {
                var cancellation = CancellationDetails.FromResult(result);
                Console.WriteLine($"CANCELED: Reason={cancellation.Reason}");

                if (cancellation.Reason == CancellationReason.Error)
                {
                    Console.WriteLine($"CANCELED: ErrorCode={cancellation.ErrorCode}");
                    Console.WriteLine($"CANCELED: ErrorDetails={cancellation.ErrorDetails}");
                    Console.WriteLine($"CANCELED: Did you update the subscription info?");
                }
            }
        }
Ejemplo n.º 30
0
 private void RecognizingHandler(object sender, SpeechRecognitionEventArgs e)
 {
     lock (threadLocker)
     {
         message = e.Result.Text;
     }
 }
Ejemplo n.º 31
0
        private void SpeechRecognition_Recognized(object sender, SpeechRecognitionEventArgs e)
        {
            Dictionary<string, ICommand> commands;
              if (EditorVisible) commands = commands_Editor;
              else if (VisualizerVisible) commands = commands_Visualizer;
              else commands = commands_Manager;

              string speechCommand = e.command;
              ICommand cmd;
              if (e.confidence >= SpeechRecognitionConfidenceThreshold)
            if (!commands.TryGetValue(speechCommand, out cmd))
              ExecuteGesture(speechCommand, VisualizerVisible); //if not a UI command, assuming its a Gesture name - if Visualizer is visible, also executing keyboard shortcut for the gesture (apart from highighting it)
            else if (cmd.CanExecute(null))
              cmd.Execute(null);
        }
 void ipClient_SpeechRecognized(object sender, SpeechRecognitionEventArgs e)
 {
     RaiseSpeechRecognized(new CallInputEventArgs(e.LineNumber, e.Phrase));
 }