Ejemplo n.º 1
0
    void Start()
    {
        Recognitions.text = "";

        DictationRecognizer = new DictationRecognizer();

        DictationRecognizer.DictationResult += (text, confidence) =>
        {
            Debug.LogFormat("Dictation result: {0}", text);
            Recognitions.text = text;
        };

        DictationRecognizer.DictationComplete += (completionCause) =>
        {
            if (completionCause != DictationCompletionCause.Complete)
            {
                Debug.LogErrorFormat("Dictation completed unsuccessfully: {0}.", completionCause);
            }
        };

        DictationRecognizer.DictationError += (error, hresult) =>
        {
            Debug.LogErrorFormat("Dictation error: {0}; HResult = {1}.", error, hresult);
        };

        DictationRecognizer.Start();
    }
    void Start()
    {
        //set voice controls to false
        voiceContolsOn = false;

        //disable all panel items to be shown when name is interpreted
        playerName.enabled           = false;
        progressText.enabled         = false;
        loadingimage.enabled         = false;
        loadingimageprogress.enabled = false;

        //initialise name to empty string - reset previous rounds name if present
        PlayerPrefs.SetString("Name", "");

        // Get scripts to access methods.
        startgame = gameObject.GetComponent <StartGame>();
        pause     = GameObject.Find("Main Camera").GetComponent <Pause>();
        options   = GameObject.Find("VikingSoundHorn").GetComponent <Options>();

        // All all voice commands to Dictionary
        AddAllVoiceCommands();

        //set up dictation recogniser to allow user to input name
        dictationRecognizer = new DictationRecognizer();
        //set timeout period for dictation recognizer
        dictationRecognizer.InitialSilenceTimeoutSeconds = 15;
        dictationRecognizer.DictationResult     += DictationRecognizer_DictationResult;
        dictationRecognizer.DictationHypothesis += DictationRecognizer_DictationHypothesis;
        dictationRecognizer.Start();

        //added timeout to dictation for name - after 15 seconds disable and enable voice commands
        Invoke("DicationTimeout", 15);
    }
Ejemplo n.º 3
0
    void Awake()
    {
        // 3.a: Create a new DictationRecognizer and assign it to dictationRecognizer variable.
        dictationRecognizer = new DictationRecognizer();

        // 3.a: Register for dictationRecognizer.DictationHypothesis and implement DictationHypothesis below
        // This event is fired while the user is talking. As the recognizer listens, it provides text of what it's heard so far.
        dictationRecognizer.DictationHypothesis += DictationRecognizer_DictationHypothesis;

        // 3.a: Register for dictationRecognizer.DictationResult and implement DictationResult below
        // This event is fired after the user pauses, typically at the end of a sentence. The full recognized string is returned here.
        dictationRecognizer.DictationResult += DictationRecognizer_DictationResult;

        // 3.a: Register for dictationRecognizer.DictationComplete and implement DictationComplete below
        // This event is fired when the recognizer stops, whether from Stop() being called, a timeout occurring, or some other error.
        dictationRecognizer.DictationComplete += DictationRecognizer_DictationComplete;

        // 3.a: Register for dictationRecognizer.DictationError and implement DictationError below
        // This event is fired when an error occurs.
        dictationRecognizer.DictationError += DictationRecognizer_DictationError;


        // Query the maximum frequency of the default microphone. Use 'unused' to ignore the minimum frequency.
        int unused;

        Microphone.GetDeviceCaps(deviceName, out unused, out samplingRate);

        // Use this string to cache the text currently displayed in the text box.
        textSoFar = new StringBuilder();
    }
Ejemplo n.º 4
0
    void Start()
    {
        m_DictationRecognizer = new DictationRecognizer();


        m_DictationRecognizer.DictationResult += (text, confidence) =>
        {
            Debug.LogFormat("Dictation result: {0}", text);
            //m_Recognitions.text += text + "\n";
        };

        m_DictationRecognizer.DictationHypothesis += (text) =>
        {
            //Debug.LogFormat("Dictation hypothesis: {0}", text);
            //m_Hypotheses.text += text;
        };

        m_DictationRecognizer.DictationComplete += (completionCause) =>
        {
            if (completionCause != DictationCompletionCause.Complete)
            {
                Debug.LogErrorFormat("Dictation completed unsuccessfully: {0}.", completionCause);
            }
        };

        m_DictationRecognizer.DictationError += (error, hresult) =>
        {
            Debug.LogErrorFormat("Dictation error: {0}; HResult = {1}.", error, hresult);
        };

        m_DictationRecognizer.Start();
    }
        public void Awake()
        {
            this.dictationRecognizer = new DictationRecognizer();
            this.dictationRecognizer.InitialSilenceTimeoutSeconds = this.initialTimeout;
            this.dictationRecognizer.AutoSilenceTimeoutSeconds    = this.automaticTimeout;

            this.dictationRecognizer.DictationHypothesis += (text) => {
                if (this.Hypothesis != null)
                {
                    this.Hypothesis(text);
                }
            };
            this.dictationRecognizer.DictationResult += (text, confidence) => {
                if (this.Recognized != null)
                {
                    this.Recognized(text);
                }
            };
            this.dictationRecognizer.DictationComplete += (completionCause) => {
                if (this.Completed != null)
                {
                    this.Completed();
                }
            };
        }
        /// <inheritdoc />
        public override void Enable()
        {
            if (!Application.isPlaying)
            {
                return;
            }

            if (MixedRealityToolkit.InputSystem == null)
            {
                Debug.LogError($"Unable to start {Name}. An Input System is required for this feature.");
                return;
            }

            inputSource     = MixedRealityToolkit.InputSystem.RequestNewGenericInputSource(Name);
            dictationResult = string.Empty;

            if (dictationRecognizer == null)
            {
                dictationRecognizer = new DictationRecognizer();
            }

            dictationRecognizer.DictationHypothesis += DictationRecognizer_DictationHypothesis;
            dictationRecognizer.DictationResult     += DictationRecognizer_DictationResult;
            dictationRecognizer.DictationComplete   += DictationRecognizer_DictationComplete;
            dictationRecognizer.DictationError      += DictationRecognizer_DictationError;
        }
Ejemplo n.º 7
0
    /// <summary> ############################################################
    /// ---------------------  Dictation Methods below  ----------------------
    /// </summary> ###########################################################
    public void noteTaking()
    {
        // stop keyword recognizer to prevent dictation recognition conflict
        PhraseRecognitionSystem.Shutdown();

        Debug.Log("Shutting down Phrase Recognition");

        dictationRecognizer = new DictationRecognizer();

        dictationRecognizer.InitialSilenceTimeoutSeconds = 6f;
        dictationRecognizer.AutoSilenceTimeoutSeconds    = 6f;

        dictationRecognizer.DictationResult     += dictationRecognizer_DictationResult;
        dictationRecognizer.DictationHypothesis += dictationRecognizer_DictationHypothesis;
        dictationRecognizer.DictationComplete   += dictationRecognizer_DictationComplete;
        dictationRecognizer.DictationError      += dictationRecognizer_DictationError;

        // Used for debugging to show dictation parameters has been activated.
        // So, dictation can be used in App.
        Debug.Log("Initiliazed Dictation Recognizer");

        // Start dictation recogntion
        dictationRecognizer.Start();

        // Change bool to true for dictation control
        IsRunning = true;

        checkDictationOn();

        // Used for debugging to show dictation recognizer has started.
        Debug.Log("Dictation started");
    }
        /// <inheritdoc />
        public WindowsDictationDataProvider(string name, uint priority, BaseMixedRealityControllerDataProviderProfile profile, IMixedRealityInputSystem parentService)
            : base(name, priority, profile, parentService)
        {
#if UNITY_STANDALONE_WIN || UNITY_WSA || UNITY_EDITOR_WIN
            if (dictationRecognizer == null)
            {
                try
                {
                    dictationRecognizer = new DictationRecognizer();
                }
                catch (UnityException e)
                {
                    switch (e.Message)
                    {
                    case string message when message.Contains("Speech recognition is not supported on this machine."):
                        Debug.LogWarning($"Skipping {nameof(WindowsDictationDataProvider)} registration.\n{e.Message}");

                        break;

                    default:
                        throw;
                    }
                }
            }
#endif // UNITY_STANDALONE_WIN || UNITY_WSA || UNITY_EDITOR_WIN
        }
Ejemplo n.º 9
0
    void Start()
    {
        dicRecognizer = new DictationRecognizer();
        dicRecognizer.InitialSilenceTimeoutSeconds = 10;
        // 確定
        dicRecognizer.DictationResult += (text, confidence) =>
        {
            gameObject.GetComponent <UnityEngine.UI.Text>().text = text;
            GUIUtility.systemCopyBuffer = text;
//			System.Diagnostics.Process.Start(path);
        };
        // 推測
        dicRecognizer.DictationHypothesis += (text) => {
            // 推測時にする処理
        };
        // 停止時
        dicRecognizer.DictationComplete += (completeCause) =>
        {
            // 要因がタイムアウトなら再び起動
            if (completeCause == DictationCompletionCause.TimeoutExceeded)
            {
                dicRecognizer.Start();
            }
        };
        dicRecognizer.Start();
    }
Ejemplo n.º 10
0
    public override void Initialize(SpeechToTextOptions speechToTextOptions)
    {
        if (speechToTextOptions.GetType() == typeof(WindowsSTTOptions))
        {
            windowsSTTOptions = speechToTextOptions as WindowsSTTOptions;

            dictationRecognizer = new DictationRecognizer();
            dictationRecognizer.AutoSilenceTimeoutSeconds = windowsSTTOptions.AutoSilenceTimeoutSeconds;

            dictationRecognizer.DictationResult += (result, confidence) =>
            {
                if (confidence <= windowsSTTOptions.ConfidenceLevel)
                {
                    base.OnSTTResult(new SpeechToTextResultEventArgs(result));
                }
                else
                {
                    Debug.LogWarning("Windows STT Result under confidence level");
                }
            };

            dictationRecognizer.DictationError += (string error, int hresult) =>
            {
                Debug.LogError(error + " " + hresult);
            };
        }
    }
Ejemplo n.º 11
0
    /// <summary>
    /// <c>start</c>
    ///
    /// Description: Builds data before game begins for Speech-to-text system
    ///
    /// Pre-condition: None
    ///
    /// Post-condition: set up the data required to run the dictationReconginzer. Once the
    /// dictationReconginzer is up and running we can begin logging what the player says.
    /// only need to set up this content once, so at beginning of play time we will do this.
    /// Once phrase is spoken, we will log it, and then send it to the dialogueTree.
    ///
    /// </summary>
    /// <returns>NULL</returns>
    public void Start()
    {
        dictationRecognizer = new DictationRecognizer();

        // When speech has been recognized.
        dictationRecognizer.DictationResult += (text, confidence) =>
        {
            phraseSpoken = text;
            // Write the text to the log.
            GameObject.FindGameObjectWithTag("Log").GetComponent <LogSystem>().WriteToFile(phraseSpoken);
            this.text.text = phraseSpoken;

            Debug.Log("what is inside the Phrase Spoken:" + phraseSpoken);
            //if (dialogueTree != null)
            //dialogueTree.inTree(phraseSpoken);
        };

        text.gameObject.SetActive(false);

        if (Microphone.devices.Length == 0)
        {
            text.text = "Warning: There are no audio input devices connected!";
            text.gameObject.SetActive(true);
        }

        dictationRecognizer.Start();
    }
Ejemplo n.º 12
0
    /// <summary>
    /// Called when the game is started. <para/>
    /// We setup the voice recog
    ///
    /// preconditions: Language Engine exists.
    ///
    /// postconditions: dictationRecognizer is setup.
    /// </summary>
    private void Start()
    {
        Debug.Log(string.Format("SpeechToText::Start"));

        // create the DictationRecognizer, it will start recog text from the mic.
        dictationRecognizer = new DictationRecognizer();

        // When speech has been recognized.
        dictationRecognizer.DictationResult += OnDictationResult;

        // make sure to rerun the dictation if it finishes
        dictationRecognizer.DictationComplete += (DictationCompletionCause cause) =>
        {
            Debug.Log("DictationCompletionCause: " + cause);

            if (/*cause != DictationCompletionCause.Canceled && */ cause != DictationCompletionCause.Complete)
            {
                dictationRecognizer.Start();
            }
        };

        // catch errors and debug them.
        dictationRecognizer.DictationError += (string error, int hresult) =>
        {
            Debug.LogError("DictationError: " + error);
        };

        // start it now.
        dictationRecognizer.Start();
    }
Ejemplo n.º 13
0
    void Start()
    {
        m_DictationRecognizer = new DictationRecognizer();

        m_DictationRecognizer.DictationResult += (text, confidence) =>
        {
            Debug.LogFormat("Dictation result: {0}", text);
            sprachAusgabe.SetWordsToSay(text);
        };

        m_DictationRecognizer.DictationHypothesis += (text) =>
        {
            Debug.LogFormat("Dictation hypothesis: {0}", text);
        };

        m_DictationRecognizer.DictationComplete += (completionCause) =>
        {
            if (completionCause != DictationCompletionCause.Complete)
            {
                Debug.LogErrorFormat("Dictation completed unsuccessfully: {0}.", completionCause);
            }
        };

        m_DictationRecognizer.DictationError += (error, hresult) =>
        {
            Debug.LogErrorFormat("Dictation error: {0}; HResult = {1}.", error, hresult);
        };

        m_DictationRecognizer.Start();
    }
    private void Awake()
    {
        dictationRecognizer = new DictationRecognizer();
        dictationRecognizer.AutoSilenceTimeoutSeconds = TIMEOUT;
        // 3.a: Register for dictationRecognizer.DictationHypothesis and implement DictationHypothesis below
        // This event is fired while the user is talking. As the recognizer listens, it provides text of what it's heard so far.
        dictationRecognizer.DictationHypothesis += DictationRecognizer_DictationHypothesis;

        // 3.a: Register for dictationRecognizer.DictationComplete and implement DictationComplete below
        // This event is fired when the recognizer stops, whether from Stop() being called, a timeout occurring, or some other error.
        dictationRecognizer.DictationComplete += DictationRecognizer_DictationComplete;

        dictationRecognizer = new DictationRecognizer();
        dictationRecognizer.AutoSilenceTimeoutSeconds = TIMEOUT;
        // 3.a: Register for dictationRecognizer.DictationHypothesis and implement DictationHypothesis below
        // This event is fired while the user is talking. As the recognizer listens, it provides text of what it's heard so far.
        dictationRecognizer.DictationHypothesis += DictationRecognizer_DictationHypothesis;

        // 3.a: Register for dictationRecognizer.DictationComplete and implement DictationComplete below
        // This event is fired when the recognizer stops, whether from Stop() being called, a timeout occurring, or some other error.
        dictationRecognizer.DictationComplete += DictationRecognizer_DictationComplete;

        PhraseRecognitionSystem.Shutdown();
        dictationRecognizer.Start();
        // Query the maximum frequency of the default microphone. Use 'unused' to ignore the minimum frequency.
        int unused;

        Microphone.GetDeviceCaps(deviceName, out unused, out samplingRate);
        Microphone.Start(deviceName, false, messageLength, samplingRate);
    }
Ejemplo n.º 15
0
    private void Start()
    {
        m_DictationRecognizer = new DictationRecognizer();
        m_DictationRecognizer.DictationResult += (text, confidence) =>
        {
            OnSpeechResult?.Invoke(this, text);
        };

        m_DictationRecognizer.DictationHypothesis += (text) =>
        {
            OnSpeechHypothesis?.Invoke(this, text);
        };

        m_DictationRecognizer.DictationComplete += (completionCause) =>
        {
            if (completionCause != DictationCompletionCause.Complete)
            {
                Debug.LogErrorFormat("Dictation completed unsuccessfully: {0}.", completionCause);
            }
        };

        m_DictationRecognizer.DictationError += (error, hresult) =>
        {
            Debug.LogErrorFormat("Dictation error: {0}; HResult = {1}.", error, hresult);
        };

        m_DictationRecognizer.Start();
    }
Ejemplo n.º 16
0
    public void StartDictation()
    {
        if (PhraseRecognitionSystem.Status != SpeechSystemStatus.Stopped)
        {
            PhraseRecognitionSystem.Shutdown();
        }

        m_DictationRecognizer = new DictationRecognizer();
        m_DictationRecognizer.DictationResult += (string text, ConfidenceLevel confidence) => {
            m_Recognitions.text += text + "\n";
        };
        m_DictationRecognizer.DictationHypothesis += ((string text) => {
            m_Hypotheses.text += text + "\n";
        });
        m_DictationRecognizer.DictationComplete += ((DictationCompletionCause completionCause) => {
            if (completionCause != DictationCompletionCause.Complete)
            {
                Debug.LogErrorFormat("Dictation completed unsuccessfully: {0}.", completionCause);
            }
        });
        m_DictationRecognizer.DictationError += ((string error, int hresult) => {
            Debug.LogErrorFormat("Dictation error: {0}; HResult = {1}.", error, hresult);
        });

        m_DictationRecognizer.Start();
        m_Recognitions.text = "";
        m_Hypotheses.text   = "";
        enableUI("StartDictation");
    }
Ejemplo n.º 17
0
    void Start()
    {
        if (!m_enableListening)
        {
            return;
        }
        m_DictationRecognizer = new DictationRecognizer();

        m_DictationRecognizer.DictationResult     += VoiceResult;
        m_DictationRecognizer.DictationHypothesis += VoiceHypothesis;

        m_DictationRecognizer.DictationComplete += (completionCause) =>
        {
            if (completionCause != DictationCompletionCause.Complete)
            {
                Debug.LogErrorFormat("Dictation completed unsuccessfully: {0}.", completionCause);
            }
        };

        m_DictationRecognizer.DictationError += (error, hresult) =>
        {
            Debug.LogErrorFormat("Dictation error: {0}; HResult = {1}.", error, hresult);
        };

        m_DictationRecognizer.Start();
    }
Ejemplo n.º 18
0
        private void InitializeDictationRecognizer()
        {
            try
            {
                if (dictationRecognizer == null)
                {
                    dictationRecognizer = new DictationRecognizer();

                    dictationRecognizer.DictationHypothesis += DictationRecognizer_DictationHypothesis;
                    dictationRecognizer.DictationResult     += DictationRecognizer_DictationResult;
                    dictationRecognizer.DictationComplete   += DictationRecognizer_DictationComplete;
                    dictationRecognizer.DictationError      += DictationRecognizer_DictationError;
                }
            }
            catch (System.Exception ex)
            {
                // Don't log if the application is currently running in batch mode (for example, when running tests). This failure is expected in this case.
                if (!Application.isBatchMode)
                {
                    Debug.LogWarning($"Failed to start dictation recognizer. Are microphone permissions granted? Exception: {ex}");
                }
                Disable();
                dictationRecognizer = null;
            }
        }
Ejemplo n.º 19
0
    //public ChatBot bot;

    //SpeechToTextResult m_LastResult;

    void Start()
    {
        m_DictationRecognizer = new DictationRecognizer(ConfidenceLevel.High, DictationTopicConstraint.Dictation);

        m_DictationRecognizer.DictationResult += (text, confidence) =>
        {
            Debug.LogFormat("Dictation result: {0}", text);
            m_Recognitions.text += text + "\n";
            //bot.SendChat(text);
        };

        m_DictationRecognizer.DictationHypothesis += (text) =>
        {
            Debug.LogFormat("Dictation hypothesis: {0}", text);
            m_Hypotheses.text += text + "\n";
        };

        m_DictationRecognizer.DictationComplete += (completionCause) =>
        {
            if (completionCause != DictationCompletionCause.Complete)
            {
                Debug.LogErrorFormat("Dictation completed unsuccessfully: {0}.", completionCause);
            }
        };

        m_DictationRecognizer.DictationError += (error, hresult) =>
        {
            Debug.LogErrorFormat("Dictation error: {0}; HResult = {1}.", error, hresult);
        };

        // m_LastResult = new SpeechToTextResult("", false);
        m_DictationRecognizer.Start();
    }
Ejemplo n.º 20
0
    void Awake()
    {
        //to add additional voices you must make them available for 32 bit apps
        //https://winaero.com/unlock-extra-voices-windows-10/
        foreach (ISpeechObjectToken name in voice.GetVoices())
        {
            print("Voice Available: " + name.GetDescription());
        }
        voice.Voice = voice.GetVoices().Item(2);

        //setup keyword reecognizer
        keywordRecognizer = new KeywordRecognizer(new string[] { Config.KEYWORD }, ConfidenceLevel.Low);
        keywordRecognizer.OnPhraseRecognized += KeywordRecognizer_OnPhraseRecognized;

        //setup dictation recognizer
        m_DictationRecognizer = new DictationRecognizer();
        m_DictationRecognizer.AutoSilenceTimeoutSeconds = 2;
        m_DictationRecognizer.DictationResult          += (text, confidence) => {
            StartCoroutine(OnSpeechResult(text));
        };
        m_DictationRecognizer.DictationComplete += (completionCause) => {
            if (completionCause != DictationCompletionCause.Complete)
            {
                StartCoroutine(OnSpeechResult("error"));
            }
        };
        m_DictationRecognizer.DictationError += (error, hresult) => {
            StartCoroutine(OnSpeechResult("error"));
        };
    }
    private void Awake()
    {
        gramar = new Dictionary <string, UnityAction>();
        string[] keywords = { "gauche", "droite", "haut", "bas", "jump", "stop", "utiliser", "inventaire" };
        gramar.Add(keywords[0], GoLeft);
        gramar.Add(keywords[1], GoRight);
        gramar.Add(keywords[2], GoUp);
        gramar.Add(keywords[3], GoDown);
        gramar.Add(keywords[4], PressJump);
        gramar.Add(keywords[5], Stop);
        gramar.Add(keywords[6], PressUse);
        gramar.Add(keywords[7], PressInv);


        dR = new DictationRecognizer(confidence);


        dR.DictationHypothesis += (text) =>
        {
            string[] textArray = text.Split(' ');

            foreach (string s in textArray)
            {
                if (IsInside <string>(keywords, s))
                {
                    Debug.Log(s + " is Valid");
                    gramar[s].Invoke();
                }
            }
        };
    }
Ejemplo n.º 22
0
    public void Start()
    {
        dictationRecognizer = new DictationRecognizer();
        dictationRecognizer.AutoSilenceTimeoutSeconds    = 3;
        dictationRecognizer.InitialSilenceTimeoutSeconds = 5;

        // Fires while the user is talking. As the recognizer listens, it provides text of what it's heard so far.
        dictationRecognizer.DictationHypothesis += DictationRecognizer_DictationHypothesis;

        // Fires after the user pauses, typically at the end of a sentence. The full recognized string is returned here.
        dictationRecognizer.DictationResult += DictationRecognizer_DictationResult;

        // Fires when the recognizer stops, whether from Stop() being called, a timeout occurring, or some other error.
        dictationRecognizer.DictationComplete += DictationRecognizer_DictationComplete;

        // Fires when an error occurs
        dictationRecognizer.DictationError += DictationRecognizer_DictationError;

        // Query the maximum frequency of the default microphone. Use 'unused' to ignore the minimum frequency.
        int unused;

        Microphone.GetDeviceCaps(deviceName, out unused, out samplingRate);

        // Use this string to cache the text currently displayed in the text box.
        textSoFar = new StringBuilder();

        dictationAudio = GetComponent <AudioSource>();

        dictationAudio.clip = StartRecording();
    }
Ejemplo n.º 23
0
 public UnitySpeechRecognizer()
 {
     _recognizerDictionary = new Dictionary <Guid, KeywordRecognizer>();
     _dictationRecognizer  = new DictationRecognizer();
     _dictationRecognizer.DictationComplete += OnDictationCompleteHandler;
     _keywords = new List <string>();
 }
    void Awake()
    {
        m_audioSource      = GetComponent <AudioSource>();
        m_audioSource.loop = false;

        //  Create a new DictationRecognizer and assign it to dictationRecognizer variable.
        dictationRecognizer = new DictationRecognizer(ConfidenceLevel.High);

        // 在用户说话时被触发。当识别器监听时,它提供了到目前为止所听到的文本。
        dictationRecognizer.DictationHypothesis += DictationRecognizer_DictationHypothesis;

        // 在用户暂停之后被触发的,通常是在一个句子的结尾。完整的识别字符串在这里返回。
        dictationRecognizer.DictationResult += DictationRecognizer_DictationResult;

        // 当识别器停止时,该事件将被触发,无论是停止()被调用,超时发生,还是其他错误。
        dictationRecognizer.DictationComplete += DictationRecognizer_DictationComplete;

        // 当发生错误时,该事件将被触发。
        dictationRecognizer.DictationError += DictationRecognizer_DictationError;

        // 查询缺省麦克风的最大频率。使用'unused'来忽略最小频率。
        int unused;

        Microphone.GetDeviceCaps(deviceName, out unused, out samplingRate);

        textSoFar = new StringBuilder();

        hasRecordingStarted = false;
    }
Ejemplo n.º 25
0
    void Start()
    {
        camera      = GameObject.Find("Main Camera");
        videoPlayer = camera.AddComponent <VideoPlayer>();
        audioSource = gameObject.AddComponent <AudioSource>();
        videoPlayer.audioOutputMode = VideoAudioOutputMode.AudioSource;
        videoPlayer.renderMode      = UnityEngine.Video.VideoRenderMode.CameraNearPlane;
        videoPlayer.playOnAwake     = false;
        videoPlayer.source          = VideoSource.Url;
        gender = "Male";

        dictationRecognizer = new DictationRecognizer();
        dictationRecognizer.DictationHypothesis += (text) => {
            print(text);
        };

        dictationRecognizer.DictationResult += (text, confidence) => {
            print(text + " - " + confidence);
        };
        dictationRecognizer.Start();

        palabrasClaves   = new string[] { "Historia", "Fundado", "Francia" };
        reconocedorDeVoz = new KeywordRecognizer(palabrasClaves);
        reconocedorDeVoz.OnPhraseRecognized += OnPhraseRecognized;

        reconocedorDeVoz.Start();

        videoPlayer.loopPointReached += CheckOver;
    }
Ejemplo n.º 26
0
    private DictationRecognizer InstantiateDictationRecognizer()
    {
        DictationRecognizer dict = new DictationRecognizer();

        // Dictation result after a couple seconds of silence
        dict.DictationResult += (text, confidence) => {
            dictationResult = text;
        };

        // Dication result immediately during speech
        dict.DictationHypothesis += (text) => {
            hypothesisResult = text;
        };

        // Gets called every time a dictation is finished
        dict.DictationComplete += (completionCause) => {
            if (completionCause != DictationCompletionCause.Complete)
            {
                Debug.LogWarningFormat("Dictation completed unsuccessfully: {0}", completionCause);
            }
        };

        // Gets called on explicit errors
        // Also falls back to this block when SPERR_SPEECH_PRIVACY_POLICY_NOT_ACCEPTED is thrown
        dict.DictationError += (error, hresult) => {
            Debug.LogErrorFormat("Dictation error: {0}; HResult = {1}.", error, hresult);
        };

        return(dict);
    }
Ejemplo n.º 27
0
    public void Start()
    {
        m_DictationRecognizer = new DictationRecognizer();

        m_DictationRecognizer.DictationResult += (text, confidence) =>
        {
            Debug.LogFormat("Dictation result: {0}", text);
            m_Recognitions.text += text + "\n";
            allspeech           += text + " ";
        };

        m_DictationRecognizer.DictationHypothesis += (text) =>
        {
            Debug.LogFormat("Dictation hypothesis: {0}", text);
            m_Hypotheses.text += text;
        };

        //m_DictationRecognizer.DictationComplete += (completionCause) =>
        //{
        //    if (completionCause != DictationCompletionCause.Complete)
        //        Debug.LogErrorFormat("Dictation completed unsuccessfully: {0}.", completionCause);
        //};

        //m_DictationRecognizer.DictationError += (error, hresult) =>
        //{
        //    Debug.LogErrorFormat("Dictation error: {0}; HResult = {1}.", error, hresult);
        //};
        m_DictationRecognizer.InitialSilenceTimeoutSeconds = 300f;
        m_DictationRecognizer.AutoSilenceTimeoutSeconds    = 300f;
        m_DictationRecognizer.Start();
    }
Ejemplo n.º 28
0
    private void Start()
    {
        Advertise("VoicePub", "/hololens/audio/user_transcript", 1, out pub);

        voicebox = gameObject.GetComponent <TextToSpeech>();

        // Activation phrase for dictation
        Keywords.Add("Hello", () =>
        {
            ros.std_msgs.String msg = new ros.std_msgs.String("Hello!");
            if (pub != null)
            {
                pub.SendMessage(msg);
            }
            voicebox.StartSpeaking("Hello");
        });

        Keywords.Add("record this", () =>
        {
            PhraseRecognitionSystem.Shutdown();
            StartBeep.Play();
            dictationRecognizer.Start();
        });

        dictationRecognizer = new DictationRecognizer();
        dictationRecognizer.DictationComplete   += DictationComplete;
        dictationRecognizer.DictationError      += DictationError;
        dictationRecognizer.DictationHypothesis += DictationHypothesis;
        dictationRecognizer.DictationResult     += DictationResult;

        keywordRecognizer = new KeywordRecognizer(Keywords.Keys.ToArray());
        keywordRecognizer.OnPhraseRecognized += KeywordRecognizer_OnPhraseRecognized;
        keywordRecognizer.Start();
    }
Ejemplo n.º 29
0
 private void Awake()
 {
     _dictationRecognizer = new DictationRecognizer();
     _dictationRecognizer.DictationHypothesis += _dictationRecognizer_DictationHypothesis;
     _dictationRecognizer.DictationResult     += _dictationRecognizer_DictationResult;
     _dictationRecognizer.DictationComplete   += _dictationRecognizer_DictationComplete;
 }
Ejemplo n.º 30
0
    DictationRecognizer recognizer; // 音声認識

    private void Start()
    {
        // 起動時に一回全取得
        var    all = GetAllChildWindows(GetWindow(IntPtr.Zero), new List <Window>());
        string log = "";

        for (int i = 0; i < all.Count; i++)
        {
            log += all[i].Title + "-" + all[i].ClassName + "(" + all[i].hWnd + ") [" + all[i].Style + "]\r\n";
        }
        Debug.Log("Check all window\r\n" + log);



        // 音声認識初期化
        recognizer = new DictationRecognizer();
        recognizer.InitialSilenceTimeoutSeconds = 10;
        recognizer.AutoSilenceTimeoutSeconds    = 10;
        recognizer.DictationResult += OnResult;

        // ターゲットから取得
        int index    = all.IndexOf(FindTarget("VOICEROID2", all));
        var allVoiRo = GetAllChildWindows(GetWindow(all[index].hWnd), new List <Window>());

        for (int i = 0; i < allVoiRo.Count; i++)
        {
            Debug.Log(allVoiRo[i].Title + "-" + allVoiRo[i].ClassName + "(" + allVoiRo[i].hWnd + ") [" + allVoiRo[i].Style + "]\r\n");
        }
        // ウィンドウハンドルを取得したい...
        //IntPtr handle=new WindowInteropHelper()
        //edit = all[index + 10].hWnd;
        //edit = FindTarget("", all, index).hWnd;
        //play = FindTarget("", all, index).hWnd;
    }