示例#1
0
    public override void OnInspectorGUI()
    {
        DrawDefaultInspector();
        SpeechCommands myCommands = (SpeechCommands)target;

        if (GUILayout.Button("Send Informations"))
        {
            myCommands.SendWords();
        }
    }
        /// <summary>
        /// Instantiates a push button prefab and uses simulated voice input events to press it.
        /// </summary>
        /// <returns></returns>
        /// https://github.com/microsoft/MixedRealityToolkit-Unity/issues/5153
        // [UnityTest]
        public IEnumerator TestSimulatedVoiceInputOnPrefab()
        {
            // Load interactable prefab
            GameObject   interactableObject;
            Interactable interactable;
            Transform    translateTargetObject;

            InstantiateDefaultInteractablePrefab(
                new Vector3(0.0f, 0.0f, 0.5f),
                new Vector3(-90f, 0f, 0f),
                out interactableObject,
                out interactable,
                out translateTargetObject);

            // Subscribe to interactable's on click so we know the click went through
            bool wasClicked = false;

            interactable.OnClick.AddListener(() => { wasClicked = true; });

            // Set up its voice command
            interactable.VoiceCommand = "Select";

            Vector3 targetStartPosition = translateTargetObject.localPosition;

            yield return(null);

            // Find an input source to associate with the input event (doesn't matter which one)
            IMixedRealityInputSource defaultInputSource = MixedRealityToolkit.InputSystem.DetectedInputSources.FirstOrDefault();

            Assert.NotNull(defaultInputSource, "At least one input source must be present for this test to work.");

            // Raise a voice select input event, then wait for transition to take place
            SpeechCommands commands = new SpeechCommands("Select", KeyCode.None, interactable.InputAction);

            MixedRealityToolkit.InputSystem.RaiseSpeechCommandRecognized(defaultInputSource, RecognitionConfidenceLevel.High, new System.TimeSpan(100), System.DateTime.Now, commands);
            // Wait for at least one frame explicitly to ensure the input goes through
            yield return(new WaitForFixedUpdate());

            float pressStartTime = Time.time;
            bool  wasTranslated  = false;

            while (Time.time < pressStartTime + buttonPressAnimationDelay)
            {   // If the transform is moved at any point during this interval, we were successful
                yield return(new WaitForFixedUpdate());

                wasTranslated |= targetStartPosition != translateTargetObject.localPosition;
            }

            // Wait for button press to expire
            yield return(new WaitForSeconds(buttonReleaseAnimationDelay));

            Assert.True(wasClicked, "Interactable was not clicked.");
            Assert.True(wasTranslated, "Transform target object was not translated by action.");
        }
示例#3
0
        private void InitializeKeywordRecognizer()
        {
            //ManualList m_ManualList = SpeechInputManager2.Instance.ManualList;

            string manualJson = "";
            var    textAsset  = Resources.Load("Manual") as TextAsset;

            manualJson = textAsset.text;
            ManualList m_ManualList = JsonUtility.FromJson <ManualList>(manualJson);

            m_SpeechCommandsArray = Commands;
            foreach (var val in m_ManualList.list[0].manual[0].operation)
            {
                SpeechCommands m_SpeechCommands = new SpeechCommands(val.kaiheikikana, KeyCode.B, MixedRealityInputAction.None, null);
                Array.Resize(ref m_SpeechCommandsArray, m_SpeechCommandsArray.Length + 1);
                m_SpeechCommandsArray[m_SpeechCommandsArray.Length - 1] = m_SpeechCommands;
            }

            if (!Application.isPlaying ||
                (m_SpeechCommandsArray == null) ||
                (m_SpeechCommandsArray.Length == 0) ||
                InputSystemProfile == null ||
                keywordRecognizer != null
                )
            {
                return;
            }

            InputSource = Service?.RequestNewGenericInputSource("Windows Speech Input Source", sourceType: InputSourceType.Voice);

            var newKeywords = new string[m_SpeechCommandsArray.Length];

            for (int i = 0; i < m_SpeechCommandsArray.Length; i++)
            {
                newKeywords[i] = m_SpeechCommandsArray[i].LocalizedKeyword;
            }

            RecognitionConfidenceLevel = InputSystemProfile.SpeechCommandsProfile.SpeechRecognitionConfidenceLevel;

            try
            {
                keywordRecognizer = new KeywordRecognizer(newKeywords, (ConfidenceLevel)RecognitionConfidenceLevel);
            }
            catch (Exception ex)
            {
                Debug.LogWarning($"Failed to start keyword recognizer. Are microphone permissions granted? Exception: {ex}");
                keywordRecognizer = null;
                return;
            }

            keywordRecognizer.OnPhraseRecognized += KeywordRecognizer_OnPhraseRecognized;
        }
        private IEnumerator FireSpeechCommand(string voiceCommand, MixedRealityInputAction inputAction, IMixedRealityInputSource inputSource = null)
        {
            if (inputSource == null)
            {
                // Find an input source to associate with the input event (doesn't matter which one)
                IMixedRealityInputSource defaultInputSource = CoreServices.InputSystem.DetectedInputSources.FirstOrDefault();
                Assert.NotNull(defaultInputSource, "At least one input source must be present for this test to work.");
                inputSource = defaultInputSource;
            }

            // Raise a voice select input event, then wait for transition to take place
            // Wait for at least one frame explicitly to ensure the input goes through
            SpeechCommands commands = new SpeechCommands(voiceCommand, KeyCode.None, inputAction);

            CoreServices.InputSystem.RaiseSpeechCommandRecognized(inputSource, RecognitionConfidenceLevel.High, new System.TimeSpan(100), System.DateTime.Now, commands);
            yield return(PlayModeTestUtilities.WaitForInputSystemUpdate());
        }
示例#5
0
 private void OnPhraseRecognized(ConfidenceLevel confidence, TimeSpan phraseDuration, DateTime phraseStartTime, string text)
 {
     using (OnPhraseRecognizedPerfMarker.Auto())
     {
         SpeechCommands[] commands = Commands;
         int commandsCount         = commands?.Length ?? 0;
         for (int i = 0; i < commandsCount; i++)
         {
             SpeechCommands command = commands[i];
             if (command.LocalizedKeyword == text)
             {
                 globalInputSource.UpdateActivePointers();
                 Service?.RaiseSpeechCommandRecognized(InputSource, (RecognitionConfidenceLevel)confidence, phraseDuration, phraseStartTime, command);
                 break;
             }
         }
     }
 }
        public IEnumerator TestVoiceInputOnPrefab()
        {
            // Load interactable prefab
            Interactable interactable;
            Transform    translateTargetObject;

            InstantiatePressButtonPrefab(
                new Vector3(0.0f, 0.0f, 0.5f),
                DefaultRotation,
                out interactable,
                out translateTargetObject);

            // Subscribe to interactable's on click so we know the click went through
            bool wasClicked = false;

            interactable.OnClick.AddListener(() => { wasClicked = true; });

            Vector3 targetStartPosition = translateTargetObject.localPosition;

            // Set up its voice command
            interactable.VoiceCommand = "Select";

            yield return(PlayModeTestUtilities.WaitForInputSystemUpdate());

            // Find an input source to associate with the input event (doesn't matter which one)
            IMixedRealityInputSource defaultInputSource = CoreServices.InputSystem.DetectedInputSources.FirstOrDefault();

            Assert.NotNull(defaultInputSource, "At least one input source must be present for this test to work.");

            // Raise a voice select input event, then wait for transition to take place
            // Wait for at least one frame explicitly to ensure the input goes through
            SpeechCommands commands = new SpeechCommands(interactable.VoiceCommand, KeyCode.None, interactable.InputAction);

            CoreServices.InputSystem.RaiseSpeechCommandRecognized(defaultInputSource, RecognitionConfidenceLevel.High, new System.TimeSpan(100), System.DateTime.Now, commands);
            yield return(PlayModeTestUtilities.WaitForInputSystemUpdate());

            yield return(CheckButtonTranslation(targetStartPosition, translateTargetObject));

            Assert.True(wasClicked, "Interactable was not clicked.");

            //Cleanup
            GameObject.Destroy(interactable.gameObject);
        }
示例#7
0
        /// <inheritdoc />
        public override void Update()
        {
            using (UpdatePerfMarker.Auto())
            {
                base.Update();

                if (keywordRecognizer != null && keywordRecognizer.IsRunning)
                {
                    SpeechCommands[] commands = Commands;
                    int commandsCount         = commands?.Length ?? 0;
                    for (int i = 0; i < commandsCount; i++)
                    {
                        SpeechCommands command = commands[i];
                        if (UInput.GetKeyDown(command.KeyCode))
                        {
                            OnPhraseRecognized((ConfidenceLevel)RecognitionConfidenceLevel, TimeSpan.Zero, DateTime.UtcNow, command.LocalizedKeyword);
                        }
                    }
                }
            }
        }