public IEnumerator TestGazeCursorArticulated() { IMixedRealityInputSystem inputSystem = PlayModeTestUtilities.GetInputSystem(); yield return(null); // Verify that the gaze cursor is visible at the start Assert.IsTrue(inputSystem.GazeProvider.GazePointer.IsInteractionEnabled, "Gaze cursor should be visible at start"); // raise hand up -- gaze cursor should no longer be visible // disable user input InputSimulationService inputSimulationService = PlayModeTestUtilities.GetInputSimulationService(); inputSimulationService.UserInputEnabled = false; ArticulatedHandPose gesturePose = ArticulatedHandPose.GetGesturePose(ArticulatedHandPose.GestureId.Open); var handOpenPose = PlayModeTestUtilities.GenerateHandPose(ArticulatedHandPose.GestureId.Open, Handedness.Right, Vector3.forward * 0.1f, Quaternion.identity); inputSimulationService.HandDataRight.Update(true, false, handOpenPose); yield return(null); // Gaze cursor should not be visible Assert.IsFalse(inputSystem.GazeProvider.GazePointer.IsInteractionEnabled, "Gaze cursor should not be visible when one articulated hand is up"); inputSimulationService.HandDataRight.Update(false, false, handOpenPose); yield return(null); // Say "select" to make gaze cursor active again // Really we need to tear down the scene and create it again but MRTK doesn't support that yet var gazeInputSource = inputSystem.DetectedInputSources.Where(x => x.SourceName.Equals("Gaze")).First(); inputSystem.RaiseSpeechCommandRecognized(gazeInputSource, RecognitionConfidenceLevel.High, new System.TimeSpan(), System.DateTime.Now, new SpeechCommands("select", KeyCode.Alpha1, MixedRealityInputAction.None)); yield return(null); Assert.IsTrue(inputSystem.GazeProvider.GazePointer.IsInteractionEnabled, "Gaze cursor should be visible after select command"); }
public IEnumerator TestSpeechEventCallsForGlobalHandlers() { // We need Gaze Cursor in this test to use it as source to emit events. IMixedRealityInputSystem inputSystem = null; MixedRealityServiceRegistry.TryGetService(out inputSystem); var object1 = new GameObject("Object 1"); // These 2 handlers are independent var objectBasedListener = object1.AddComponent <TestInputGlobalListenerObjectBased>(); var handlerBasedListener = object1.AddComponent <TestInputGlobalListenerHandlerBasedSpeechHandler>(); var object2 = new GameObject("Object 2"); // These 2 handlers are independent var handlerBasedListener1 = object2.AddComponent <TestInputGlobalListenerHandlerBasedAllHandlers>(); var handlerBasedListener2 = object2.AddComponent <TestInputGlobalListenerHandlerBasedSpeechHandler>(); yield return(null); LogAssert.Expect(LogType.Error, new Regex("Detected simultaneous usage of IMixedRealityEventSystem.Register and IMixedRealityEventSystem.RegisterHandler")); // Emit speech event, which should be received by all handlers. var gazeInputSource = inputSystem.DetectedInputSources.Where(x => x.SourceName.Equals("Gaze")).First(); inputSystem.RaiseSpeechCommandRecognized(gazeInputSource, RecognitionConfidenceLevel.High, new System.TimeSpan(), System.DateTime.Now, new SpeechCommands("menu", KeyCode.Alpha1, MixedRealityInputAction.None)); Assert.Zero(objectBasedListener.pointerClickedCount, "Pointer clicked event is received by old API handler."); Assert.Zero(objectBasedListener.pointerDownCount, "Pointer down event is received by old API handler."); Assert.Zero(objectBasedListener.pointerUpCount, "Pointer up event is received by old API handler."); Assert.Zero(objectBasedListener.pointerDraggedCount, "Pointer dragged event is received by old API handler."); Assert.AreEqual(objectBasedListener.speechCount, 1, "Speech event is not received by old API handler."); Assert.Zero(handlerBasedListener.pointerClickedCount, "Pointer clicked event is received by new API handler."); Assert.Zero(handlerBasedListener.pointerDownCount, "Pointer down event is received by new API handler."); Assert.Zero(handlerBasedListener.pointerUpCount, "Pointer up event is received by new API handler."); Assert.Zero(handlerBasedListener.pointerDraggedCount, "Pointer dragged event is received by new API handler."); Assert.AreEqual(handlerBasedListener.speechCount, 1, "Speech event is not received by new API handler."); Assert.Zero(handlerBasedListener1.pointerClickedCount, "Pointer clicked event is received by all-handlers component."); Assert.Zero(handlerBasedListener1.pointerDownCount, "Pointer down event is received by all-handlers component."); Assert.Zero(handlerBasedListener1.pointerUpCount, "Pointer up event is received by all-handlers component."); Assert.Zero(handlerBasedListener1.pointerDraggedCount, "Pointer dragged event is received by all-handlers component."); Assert.AreEqual(handlerBasedListener1.speechCount, 1, "Speech event is not received by all-handlers component."); // No pointer clicked event: Assert.Zero(handlerBasedListener2.pointerClickedCount, "Pointer clicked event is received by speech-handler component."); Assert.Zero(handlerBasedListener2.pointerDownCount, "Pointer down event is received by speech-handler component."); Assert.Zero(handlerBasedListener2.pointerUpCount, "Pointer up event is received by speech-handler component."); Assert.Zero(handlerBasedListener2.pointerDraggedCount, "Pointer dragged event is received by speech-handler component."); Assert.AreEqual(handlerBasedListener2.speechCount, 1, "Speech event is not received by speech-handler component."); Object.Destroy(object1); Object.Destroy(object2); yield return(null); }
private void OnPhraseRecognized(ConfidenceLevel confidence, TimeSpan phraseDuration, DateTime phraseStartTime, string text) { IMixedRealityInputSystem inputSystem = Service as IMixedRealityInputSystem; for (int i = 0; i < Commands?.Length; i++) { if (Commands[i].LocalizedKeyword == text) { inputSystem?.RaiseSpeechCommandRecognized(InputSource, (RecognitionConfidenceLevel)confidence, phraseDuration, phraseStartTime, Commands[i]); break; } } }
private void OnPhraseRecognized(ConfidenceLevel confidence, TimeSpan phraseDuration, DateTime phraseStartTime, string text) { Profiler.BeginSample("[MRTK] WindowsSpeechInputProvider.OnPhraseRecognized"); IMixedRealityInputSystem inputSystem = Service as IMixedRealityInputSystem; for (int i = 0; i < Commands?.Length; i++) { if (Commands[i].LocalizedKeyword == text) { inputSystem?.RaiseSpeechCommandRecognized(InputSource, (RecognitionConfidenceLevel)confidence, phraseDuration, phraseStartTime, Commands[i]); break; } } Profiler.EndSample(); // OnPhraseRecognized }
public IEnumerator TestToggleProfilerCommand() { // Confirm that the diagnostics system is enabled. IMixedRealityDiagnosticsSystem diagnosticsSystem = null; MixedRealityServiceRegistry.TryGetService <IMixedRealityDiagnosticsSystem>(out diagnosticsSystem); Assert.IsNotNull(diagnosticsSystem, "The diagnostics system is not enabled in the scene."); yield return(null); // This test uses the input system to simulate speech commands. IMixedRealityInputSystem inputSystem = null; MixedRealityServiceRegistry.TryGetService <IMixedRealityInputSystem>(out inputSystem); Assert.IsNotNull(inputSystem, "The input system is not enabled in the scene."); yield return(null); // Verify that the VisualProfiler is enabled. Assert.IsTrue(diagnosticsSystem.ShowProfiler, "The VisualProfiler is not active."); yield return(null); int frameDelay = 10; // Toggle the profiler visualization off. var gazeInputSource = inputSystem.DetectedInputSources.Where(x => x.SourceName.Equals("Gaze")).First(); inputSystem.RaiseSpeechCommandRecognized( gazeInputSource, RecognitionConfidenceLevel.High, new TimeSpan(), DateTime.Now, new SpeechCommands("toggle profiler", KeyCode.Alpha9, MixedRealityInputAction.None)); // It may take a few frames before the event is handled and the system responds to the state change. for (int i = 0; i < frameDelay; i++) { yield return(null); } // Verify that the VisualProfiler is disabled. Assert.IsFalse(diagnosticsSystem.ShowProfiler, "The VisualProfiler is active (should be inactive)."); yield return(null); }