Beispiel #1
0
    private void OnRecognize(SpeechRecognitionEvent result)
    {
        if (result != null && result.results.Length > 0)
        {
            foreach (var res in result.results)
            {
                foreach (var alt in res.alternatives)
                {
                    string text = string.Format("{0} ({1}, {2:0.00})\n", alt.transcript, res.final ? "Final" : "Interim", alt.confidence);
                    Log.Debug("ExampleStreaming.OnRecognize()", text);
                    ResultsField.text = text;

                    // Magic happens here (Hello World Magic!)
                    // FOR NOW HARD WIRING THE EXPLICIT UTTERANCES TO COLOR STATE CHANGES
                    // LATER WILL USE LOOKUPS; THRESHOLDES; AND STATEHOLDERS
//					if (alt.transcript.Contains("red")) {
//						sphereMeshRenderer.material = red_material;
//						//DroidRenderer.material = red_material;
//					}
//					if (alt.transcript.Contains("blue")) {
//						sphereMeshRenderer.material = blue_material;
//						//DroidRenderer.material = blue_material;
//					}
//					if (alt.transcript.Contains("green")) {
//						cubeMeshRenderer.material = green_material; // disable for cactus
//						DroidRenderer.material = green_material;
//					}
//					if (alt.transcript.Contains("yellow")) {
//						cubeMeshRenderer.material = yellow_material;  // disable for cactus
//						//DroidRenderer.material = yellow_material;
//					}

                    //  Here is the Emotional Zone - GREP For now
//					if (alt.transcript.Contains("happy") | alt.transcript.Contains("joy")) {
//						bar1JoyRenderer.material = yellow_material;
//						//DroidRenderer.material = yellow_material;
//					}
//					if (alt.transcript.Contains("sad") | alt.transcript.Contains("depressed")) {
//						bar2SadnessRenderer.material = blue_material;
//						//DroidRenderer.material = blue_material;
//					}
//					if (alt.transcript.Contains("scared") | alt.transcript.Contains("fear")) {
//						bar3FearRenderer.material = purple_material;
//						//DroidRenderer.material = purple_material;
//					}
//					if (alt.transcript.Contains("yucky") | alt.transcript.Contains("gross")) {
//						bar4DisgustRenderer.material = green_material;
//						//DroidRenderer.material = green_material;
//					}
//					if (alt.transcript.Contains("mad") | alt.transcript.Contains("angry")) {
//						bar5AngerRenderer.material = red_material;
//						//DroidRenderer.material = red_material;
//					}

                    // ENTERING THE TONE ZONE - when the utterance contains this word
                    //if (alt.transcript.Contains ("feel") | alt.transcript.Contains ("you") | alt.transcript.Contains ("Jimmy") | alt.transcript.Contains ("robot")) {
                    if (true)
                    {
                        // if the utterance
                        // Runnable.Run(Examples()); // this compiled - it's simply the same test from startup


                        string GHI = alt.transcript;
                        if (!_toneAnalyzer.GetToneAnalyze(OnGetToneAnalyze, OnFail, GHI))
                        {
                            Log.Debug("ExampleToneAnalyzer.Examples()", "Failed to analyze!");
                        }

                        // TEST START
                        //  Analyze tone
                        //						if (!_toneAnalyzer.GetToneAnalyze(OnGetToneAnalyze, OnFail, _stringToTestTone2))
                        //							Log.Debug("ExampleToneAnalyzer.Examples()", "Failed to analyze!");

                        Log.Debug("ExampleToneAnalyzer.Examples()", "NESTED TONE ZONE branch complete.");
                        //ResultsField.text = "tone analyzed! 111";
                        // TEST END
                    }

                    // ENTERING THE TONE ZONE - when the utterance contains this word
                    if (alt.transcript.Contains("reset"))
                    {
                        cyl1AnalyticalRenderer.transform.localScale = new Vector3(1F, 1F, 1F);
                        cyl2ConfidentRenderer.transform.localScale  = new Vector3(1F, 1F, 1F);
                        cyl3TentativeRenderer.transform.localScale  = new Vector3(1F, 1F, 1F);
                        bar1JoyRenderer.transform.localScale        = new Vector3(1F, 1F, 1F);
                        bar2SadnessRenderer.transform.localScale    = new Vector3(1F, 1F, 1F);
                        bar3FearRenderer.transform.localScale       = new Vector3(1F, 1F, 1F);
                        bar4DisgustRenderer.transform.localScale    = new Vector3(1F, 1F, 1F);
                        bar5AngerRenderer.transform.localScale      = new Vector3(1F, 1F, 1F);
                        DroidRenderer.material = white_material;

                        sphere_emo_sadnessRenderer.transform.localScale = new Vector3(.075F, .075F, .075F);
                        sphere_emo_angerRenderer.transform.localScale   = new Vector3(.075F, .075F, .075F);
                        sphere_emo_disgustRenderer.transform.localScale = new Vector3(.075F, .075F, .075F);
                        sphere_emo_fearRenderer.transform.localScale    = new Vector3(.075F, .075F, .075F);
                        sphere_emo_joyRenderer.transform.localScale     = new Vector3(.075F, .075F, .075F);

                        TinManRenderer.transform.localScale = new Vector3(10F, 10F, 10F);
                    }

                    if (alt.transcript.Contains("panic attack"))
                    {
                        sphere_emo_sadnessRenderer.transform.localScale = new Vector3(1F, 1F, 1F);
                        sphere_emo_angerRenderer.transform.localScale   = new Vector3(1F, 1F, 1F);
                        sphere_emo_disgustRenderer.transform.localScale = new Vector3(1F, 1F, 1F);
                        sphere_emo_fearRenderer.transform.localScale    = new Vector3(1F, 1F, 1F);
                        sphere_emo_joyRenderer.transform.localScale     = new Vector3(1F, 1F, 1F);
                    }

                    if (alt.transcript.Contains("mouse"))
                    {
                        TinManRenderer.transform.localScale = new Vector3(2F, 2F, 2F);
                    }

                    if (alt.transcript.Contains("elephant"))
                    {
                        TinManRenderer.transform.localScale = new Vector3(20F, 20F, 20F);
                    }
                }

                if (res.keywords_result != null && res.keywords_result.keyword != null)
                {
                    foreach (var keyword in res.keywords_result.keyword)
                    {
                        Log.Debug("ExampleStreaming.OnRecognize()", "keyword: {0}, confidence: {1}, start time: {2}, end time: {3}", keyword.normalized_text, keyword.confidence, keyword.start_time, keyword.end_time);
                        //ResultsField.text = "tone analyzed! 222";
                    }
                }

                if (res.word_alternatives != null)
                {
                    foreach (var wordAlternative in res.word_alternatives)
                    {
                        Log.Debug("ExampleStreaming.OnRecognize()", "Word alternatives found. Start time: {0} | EndTime: {1}", wordAlternative.start_time, wordAlternative.end_time);
                        foreach (var alternative in wordAlternative.alternatives)
                        {
                            Log.Debug("ExampleStreaming.OnRecognize()", "\t word: {0} | confidence: {1}", alternative.word, alternative.confidence);
                        }
                    }
                }
            }
        }
    }
 private void HandleOnRecognizeOgg(SpeechRecognitionEvent result, Dictionary <string, object> customData)
 {
     Log.Debug("ExampleSpeechToText.HandleOnRecognizeOgg()", "Speech to Text - Get model response: {0}", customData["json"].ToString());
     _recognizeOggTested = true;
 }
 private void HandleOnRecognize(SpeechRecognitionEvent result, Dictionary <string, object> customData)
 {
     Log.Debug("ExampleSpeechToText.HandleOnRecognize()", "{0}", customData["json"].ToString());
     Test(result != null);
     _recognizeTested = true;
 }
    private void OnRecognize(SpeechRecognitionEvent result, Dictionary <string, object> customData)
    {
        if (result != null && result.results.Length > 0)
        {
            foreach (var res in result.results)
            {
                foreach (var alt in res.alternatives) // This section outputs the Speech to Text Predicitons.
                {
                    State = res.final ? "Final" : "Interim";
                    string text = string.Format("{0} ({1}, {2:0.00})\n", alt.transcript, res.final ? "Final" : "Interim", alt.confidence);
                    Log.Debug("ExampleStreaming.OnRecognize()", text);

                    if (alt.transcript.Contains("are you") && State.Contains("Final")) // Does our transcipt contain the words "are you" and is the prediction final.
                    {
                        StartCoroutine(ExampleTextToSpeech.I1_AreYouThere());          //When it detect these words it will execute a Coroutine in the Text to Speech script.
                    }

                    if (text.Contains("") && State.Contains("Final"))         // Does our transcipt contain the words "are you" and is the prediction final.
                    {
                        StartCoroutine(ExampleTextToSpeech.I1_AreYouThere()); //When it detect these words it will execute a Coroutine in the Text to Speech script.
                        Debug.Log("Temperature status");
                    }

                    if (text.Contains("yeah") || text.Contains("yes") && State.Contains("Final")) // needs to be final or ECHO happens
                    {
                        StartCoroutine(ExampleTextToSpeech.I2_Design());
                        REST.GetComponent <MainScript>().PostHeaterOn();
                        Toggle.SetActive(true);
                        Glow.SetActive(true);
                        //Activates game object/s on Command.
                    }

                    if (text.Contains("lights") || text.Contains("lets") && State.Contains("Final")) // needs to be final or ECHO happens
                    {
                        StartCoroutine(ExampleTextToSpeech.I3_Open());
                        REST.GetComponent <MainScript>().PostLED_ON();
                        Toggle.SetActive(true);
                        Glow.SetActive(true);
                    }



                    if (text.Contains("green") && State.Contains("Final")) // needs to be final or ECHO happens
                    {
                        StartCoroutine(ExampleTextToSpeech.I4_Rotation());
                        REST.GetComponent <MainScript>().PostLED_GREEN();
                        Toggle.SetActive(true);
                    }


                    if (text.Contains("shut") && State.Contains("Final")) // needs to be final or ECHO happens
                    {
                        StartCoroutine(ExampleTextToSpeech.I5_Change());
                        REST.GetComponent <MainScript>().PostLED_OFF();
                        REST.GetComponent <MainScript>().PostHeaterOff();
                        Toggle.SetActive(false);
                    }

                    if (text.Contains("status") && State.Contains("Final")) // needs to be final or ECHO happens
                    {
                        StartCoroutine(ExampleTextToSpeech.I6_Status());
                        Debug.Log("Temperature status");
                    }



                    ResultsField.text = text;
                }

                if (res.keywords_result != null && res.keywords_result.keyword != null)
                {
                    foreach (var keyword in res.keywords_result.keyword)
                    {
                        Log.Debug("ExampleStreaming.OnRecognize()", "keyword: {0}, confidence: {1}, start time: {2}, end time: {3}", keyword.normalized_text, keyword.confidence, keyword.start_time, keyword.end_time);
                    }
                }

                if (res.word_alternatives != null)
                {
                    foreach (var wordAlternative in res.word_alternatives)
                    {
                        Log.Debug("ExampleStreaming.OnRecognize()", "Word alternatives found. Start time: {0} | EndTime: {1}", wordAlternative.start_time, wordAlternative.end_time);
                        foreach (var alternative in wordAlternative.alternatives)
                        {
                            Log.Debug("ExampleStreaming.OnRecognize()", "\t word: {0} | confidence: {1}", alternative.word, alternative.confidence);
                        }
                    }
                }
            }
        }
    }