internal void FillCurrentFrame(MixedRealityPose[] jointsOut) { ArticulatedHandPose gesturePose = ArticulatedHandPose.GetGesturePose(gesture); if (gesturePose != null) { if (gestureBlending > poseBlending) { float range = Mathf.Clamp01(1.0f - poseBlending); float lerpFactor = range > 0.0f ? (gestureBlending - poseBlending) / range : 1.0f; pose.InterpolateOffsets(pose, gesturePose, lerpFactor); } } poseBlending = gestureBlending; Vector3 screenPosition = CameraCache.Main.ViewportToScreenPoint(ViewportPosition); Vector3 worldPosition = CameraCache.Main.ScreenToWorldPoint(screenPosition + JitterOffset); // Apply rotation relative to the wrist joint Quaternion rotationRef = pose.GetLocalJointPose(TrackedHandJoint.Wrist, handedness).Rotation; Quaternion localRotation = Quaternion.Euler(ViewportRotation) * Quaternion.Inverse(rotationRef); Quaternion worldRotation = CameraCache.Main.transform.rotation * localRotation; pose.ComputeJointPoses(handedness, worldRotation, worldPosition, jointsOut); }
public static SimulatedHandData.HandJointDataGenerator GenerateHandPose(ArticulatedHandPose.GestureId gesture, Handedness handedness, Vector3 worldPosition, Quaternion rotation) { return((jointsOut) => { ArticulatedHandPose gesturePose = ArticulatedHandPose.GetGesturePose(gesture); gesturePose.ComputeJointPoses(handedness, rotation, worldPosition, jointsOut); }); }
public static SimulatedHandData.HandJointDataGenerator GenerateHandPose(ArticulatedHandPose.GestureId gesture, Handedness handedness, Vector3 worldPosition, Quaternion rotation) { return((jointsOut) => { ArticulatedHandPose gesturePose = ArticulatedHandPose.GetGesturePose(gesture); Quaternion worldRotation = rotation * CameraCache.Main.transform.rotation; gesturePose.ComputeJointPoses(handedness, worldRotation, worldPosition, jointsOut); }); }
public static SimulatedHandData.HandJointDataGenerator GenerateHandPose(ArticulatedHandPose.GestureId gesture, Handedness handedness, Vector3 screenPosition) { return((jointsOut) => { ArticulatedHandPose gesturePose = ArticulatedHandPose.GetGesturePose(gesture); Quaternion rotation = Quaternion.identity; Vector3 position = CameraCache.Main.ScreenToWorldPoint(screenPosition); gesturePose.ComputeJointPoses(handedness, rotation, position, jointsOut); }); }
public void ResetGesture() { gestureBlending = 1.0f; ArticulatedHandPose gesturePose = ArticulatedHandPose.GetGesturePose(gesture); if (gesturePose != null) { pose.Copy(gesturePose); } }
public IEnumerator TestGazeCursorArticulated() { TestUtilities.InitializeMixedRealityToolkitAndCreateScenes(true); TestUtilities.InitializePlayspace(); RenderSettings.skybox = null; IMixedRealityInputSystem inputSystem; MixedRealityServiceRegistry.TryGetService(out inputSystem); Assert.IsNotNull(inputSystem, "MixedRealityInputSystem is null!"); yield return(null); // Verify that the gaze cursor is visible at the start Assert.IsTrue(inputSystem.GazeProvider.GazePointer.IsInteractionEnabled, "Gaze cursor should be visible at start"); // raise hand up -- gaze cursor should no longer be visible // disable user input InputSimulationService inputSimulationService = (inputSystem as IMixedRealityDataProviderAccess).GetDataProvider <InputSimulationService>(); Assert.IsNotNull(inputSimulationService, "InputSimulationService is null!"); inputSimulationService.UserInputEnabled = false; ArticulatedHandPose gesturePose = ArticulatedHandPose.GetGesturePose(ArticulatedHandPose.GestureId.Open); var handOpenPose = PlayModeTestUtilities.GenerateHandPose(ArticulatedHandPose.GestureId.Open, Handedness.Right, Vector3.forward * 0.1f); inputSimulationService.HandDataRight.Update(true, false, handOpenPose); yield return(null); // Gaze cursor should not be visible Assert.IsFalse(inputSystem.GazeProvider.GazePointer.IsInteractionEnabled, "Gaze cursor should not be visible when one articulated hand is up"); inputSimulationService.HandDataRight.Update(false, false, handOpenPose); yield return(null); // Say "select" to make gaze cursor active again // Really we need to tear down the scene and create it again but MRTK doesn't support that yet var gazeInputSource = inputSystem.DetectedInputSources.Where(x => x.SourceName.Equals("Gaze")).First(); inputSystem.RaiseSpeechCommandRecognized(gazeInputSource, RecognitionConfidenceLevel.High, new System.TimeSpan(), System.DateTime.Now, new SpeechCommands("select", KeyCode.Alpha1, MixedRealityInputAction.None)); yield return(null); Assert.IsTrue(inputSystem.GazeProvider.GazePointer.IsInteractionEnabled, "Gaze cursor should be visible after select command"); }
internal void FillCurrentFrame(MixedRealityPose[] jointsOut) { ArticulatedHandPose gesturePose = ArticulatedHandPose.GetGesturePose(gesture); if (gesturePose != null) { if (gestureBlending > poseBlending) { float range = Mathf.Clamp01(1.0f - poseBlending); float lerpFactor = range > 0.0f ? (gestureBlending - poseBlending) / range : 1.0f; pose.InterpolateOffsets(pose, gesturePose, lerpFactor); } } poseBlending = gestureBlending; Quaternion rotation = Quaternion.Euler(HandRotateEulerAngles); Vector3 position = CameraCache.Main.ScreenToWorldPoint(ScreenPosition + JitterOffset); pose.ComputeJointPoses(handedness, rotation, position, jointsOut); }