internal void FillCurrentFrame(MixedRealityPose[] jointsOut) { ArticulatedHandPose gesturePose = SimulatedArticulatedHandPoses.GetGesturePose(gesture); if (gesturePose != null) { if (gestureBlending > poseBlending) { float range = Mathf.Clamp01(1.0f - poseBlending); float lerpFactor = range > 0.0f ? (gestureBlending - poseBlending) / range : 1.0f; pose.InterpolateOffsets(pose, gesturePose, lerpFactor); } } poseBlending = gestureBlending; Vector3 screenPosition = CameraCache.Main.ViewportToScreenPoint(ViewportPosition); Vector3 worldPosition = CameraCache.Main.ScreenToWorldPoint(screenPosition + JitterOffset); // Apply rotation relative to the wrist joint Quaternion rotationRef = pose.GetLocalJointPose(TrackedHandJoint.Wrist, handedness).Rotation; Quaternion localRotation = Quaternion.Euler(ViewportRotation) * Quaternion.Inverse(rotationRef); Quaternion worldRotation = CameraCache.Main.transform.rotation * localRotation; pose.ComputeJointPoses(handedness, worldRotation, worldPosition, jointsOut); }
public IEnumerator TestGazeCursorArticulated() { IMixedRealityInputSystem inputSystem = PlayModeTestUtilities.GetInputSystem(); yield return(null); // Verify that the gaze cursor is visible at the start Assert.IsTrue(inputSystem.GazeProvider.GazePointer.IsInteractionEnabled, "Gaze cursor should be visible at start"); // raise hand up -- gaze cursor should no longer be visible // disable user input InputSimulationService inputSimulationService = PlayModeTestUtilities.GetInputSimulationService(); inputSimulationService.UserInputEnabled = false; ArticulatedHandPose gesturePose = ArticulatedHandPose.GetGesturePose(ArticulatedHandPose.GestureId.Open); var handOpenPose = PlayModeTestUtilities.GenerateHandPose(ArticulatedHandPose.GestureId.Open, Handedness.Right, Vector3.forward * 0.1f, Quaternion.identity); inputSimulationService.HandDataRight.Update(true, false, handOpenPose); yield return(null); // Gaze cursor should not be visible Assert.IsFalse(inputSystem.GazeProvider.GazePointer.IsInteractionEnabled, "Gaze cursor should not be visible when one articulated hand is up"); inputSimulationService.HandDataRight.Update(false, false, handOpenPose); yield return(null); // Say "select" to make gaze cursor active again // Really we need to tear down the scene and create it again but MRTK doesn't support that yet var gazeInputSource = inputSystem.DetectedInputSources.Where(x => x.SourceName.Equals("Gaze")).First(); inputSystem.RaiseSpeechCommandRecognized(gazeInputSource, RecognitionConfidenceLevel.High, new System.TimeSpan(), System.DateTime.Now, new SpeechCommands("select", KeyCode.Alpha1, MixedRealityInputAction.None)); yield return(null); Assert.IsTrue(inputSystem.GazeProvider.GazePointer.IsInteractionEnabled, "Gaze cursor should be visible after select command"); }
public static SimulatedHandData.HandJointDataGenerator GenerateHandPose(ArticulatedHandPose.GestureId gesture, Handedness handedness, Vector3 worldPosition, Quaternion rotation) { return((jointsOut) => { ArticulatedHandPose gesturePose = ArticulatedHandPose.GetGesturePose(gesture); gesturePose.ComputeJointPoses(handedness, rotation, worldPosition, jointsOut); }); }
public static SimulatedHandData.HandJointDataGenerator GenerateHandPose(ArticulatedHandPose.GestureId gesture, Handedness handedness, Vector3 worldPosition, Quaternion rotation) { return((jointsOut) => { ArticulatedHandPose gesturePose = SimulatedArticulatedHandPoses.GetGesturePose(gesture); Quaternion worldRotation = rotation * CameraCache.Main.transform.rotation; gesturePose.ComputeJointPoses(handedness, worldRotation, worldPosition, jointsOut); }); }
public static SimulatedHandData.HandJointDataGenerator GenerateHandPose(ArticulatedHandPose.GestureId gesture, Handedness handedness, Vector3 screenPosition) { return((jointsOut) => { ArticulatedHandPose gesturePose = ArticulatedHandPose.GetGesturePose(gesture); Quaternion rotation = Quaternion.identity; Vector3 position = CameraCache.Main.ScreenToWorldPoint(screenPosition); gesturePose.ComputeJointPoses(handedness, rotation, position, jointsOut); }); }
public void ResetGesture() { gestureBlending = 1.0f; ArticulatedHandPose gesturePose = SimulatedArticulatedHandPoses.GetGesturePose(gesture); if (gesturePose != null) { pose.Copy(gesturePose); } }
public void RecordHandStop() { MixedRealityPose[] jointPoses = new MixedRealityPose[jointCount]; for (int i = 0; i < jointCount; ++i) { HandJointUtils.TryGetJointPose <WindowsMixedRealityArticulatedHand>((TrackedHandJoint)i, recordingHand, out jointPoses[i]); } ArticulatedHandPose pose = new ArticulatedHandPose(); pose.ParseFromJointPoses(jointPoses, recordingHand, Quaternion.identity, offset); recordingHand = Handedness.None; var filename = String.Format("{0}-{1}.json", OutputFileName, DateTime.UtcNow.ToString("yyyyMMdd-HHmmss")); StoreRecordedHandPose(pose.ToJson(), filename); }
internal void FillCurrentFrame(MixedRealityPose[] jointsOut) { ArticulatedHandPose gesturePose = ArticulatedHandPose.GetGesturePose(gesture); if (gesturePose != null) { if (gestureBlending > poseBlending) { float range = Mathf.Clamp01(1.0f - poseBlending); float lerpFactor = range > 0.0f ? (gestureBlending - poseBlending) / range : 1.0f; pose.InterpolateOffsets(pose, gesturePose, lerpFactor); } } poseBlending = gestureBlending; Quaternion rotation = Quaternion.Euler(HandRotateEulerAngles); Vector3 position = CameraCache.Main.ScreenToWorldPoint(ScreenPosition + JitterOffset); pose.ComputeJointPoses(handedness, rotation, position, jointsOut); }
/// <inheritdoc /> public override void Initialize() { ArticulatedHandPose.LoadGesturePoses(); }
public override void Destroy() { ArticulatedHandPose.ResetGesturePoses(); }