/// <summary> /// Update the hand data from the device. /// </summary> /// <param name="interactionSourceState">The InteractionSourceState retrieved from the platform.</param> private void UpdateHandData(InputDevice inputDevice) { using (UpdateHandDataPerfMarker.Auto()) { #if WINDOWS_UWP && WMR_ENABLED XRSDKSubsystemHelpers.InputSubsystem?.GetCurrentSourceStates(states); foreach (SpatialInteractionSourceState sourceState in states) { if (sourceState.Source.Handedness.ToMRTKHandedness() == ControllerHandedness) { handDefinition?.UpdateHandMesh(sourceState); break; } } #endif // WINDOWS_UWP && WMR_ENABLED Hand hand; if (inputDevice.TryGetFeatureValue(CommonUsages.handData, out hand)) { foreach (HandFinger finger in handFingers) { if (hand.TryGetFingerBones(finger, fingerBones)) { for (int i = 0; i < fingerBones.Count; i++) { TrackedHandJoint trackedHandJoint = ConvertToTrackedHandJoint(finger, i); Bone bone = fingerBones[i]; Vector3 position = Vector3.zero; Quaternion rotation = Quaternion.identity; bool positionAvailable = bone.TryGetPosition(out position); bool rotationAvailable = bone.TryGetRotation(out rotation); // If either position or rotation is available, use both pieces of data given. // This might result in using a zeroed out position or rotation. Most likely, // either both are available or both are unavailable. if (positionAvailable || rotationAvailable) { // We want input sources to follow the playspace, so fold in the playspace transform here to // put the controller pose into world space. position = MixedRealityPlayspace.TransformPoint(position); rotation = MixedRealityPlayspace.Rotation * rotation; unityJointPoses[trackedHandJoint] = new MixedRealityPose(position, rotation); } } // Unity doesn't provide a palm joint, so we synthesize one here MixedRealityPose palmPose = CurrentControllerPose; palmPose.Rotation *= (ControllerHandedness == Handedness.Left ? leftPalmOffset : rightPalmOffset); unityJointPoses[TrackedHandJoint.Palm] = palmPose; } } handDefinition?.UpdateHandJoints(unityJointPoses); } } }
/// <summary> /// Update the hand data from the device. /// </summary> /// <param name="interactionSourceState">The InteractionSourceState retrieved from the platform.</param> private void UpdateHandData(InteractionSourceState interactionSourceState) { #if WINDOWS_UWP || DOTNETWINRT_PRESENT // Articulated hand support is only present in the 18362 version and beyond Windows // SDK (which contains the V8 drop of the Universal API Contract). In particular, // the HandPose related APIs are only present on this version and above. if (!articulatedHandApiAvailable) { return; } Profiler.BeginSample("[MRTK] WindowsMixedRealityArticulatedHand.UpdateHandData"); PerceptionTimestamp perceptionTimestamp = PerceptionTimestampHelper.FromHistoricalTargetTime(DateTimeOffset.Now); IReadOnlyList <SpatialInteractionSourceState> sources = SpatialInteractionManager?.GetDetectedSourcesAtTimestamp(perceptionTimestamp); foreach (SpatialInteractionSourceState sourceState in sources) { if (sourceState.Source.Id.Equals(interactionSourceState.source.id)) { #if WINDOWS_UWP handDefinition?.UpdateHandMesh(sourceState); #endif // WINDOWS_UWP HandPose handPose = sourceState.TryGetHandPose(); if (handPose != null && handPose.TryGetJoints(WindowsMixedRealityUtilities.SpatialCoordinateSystem, jointIndices, jointPoses)) { for (int i = 0; i < jointPoses.Length; i++) { Vector3 jointPosition = jointPoses[i].Position.ToUnityVector3(); Quaternion jointOrientation = jointPoses[i].Orientation.ToUnityQuaternion(); // We want the joints to follow the playspace, so fold in the playspace transform here to // put the joint pose into world space. jointPosition = MixedRealityPlayspace.TransformPoint(jointPosition); jointOrientation = MixedRealityPlayspace.Rotation * jointOrientation; TrackedHandJoint handJoint = ConvertHandJointKindToTrackedHandJoint(jointIndices[i]); if (handJoint == TrackedHandJoint.IndexTip) { lastIndexTipRadius = jointPoses[i].Radius; } unityJointPoses[handJoint] = new MixedRealityPose(jointPosition, jointOrientation); } handDefinition?.UpdateHandJoints(unityJointPoses); } break; } } Profiler.EndSample(); // UpdateHandData #endif // WINDOWS_UWP || DOTNETWINRT_PRESENT }
/// <summary> /// Update the hand data from the device. /// </summary> /// <param name="interactionSourceState">The InteractionSourceState retrieved from the platform.</param> private void UpdateHandData(InputDevice inputDevice) { Profiler.BeginSample("[MRTK] WindowsMixedRealityXRSDKArticulatdHand.UpdateHandData"); #if WINDOWS_UWP && WMR_ENABLED XRSDKSubsystemHelpers.InputSubsystem?.GetCurrentSourceStates(states); foreach (SpatialInteractionSourceState sourceState in states) { if (sourceState.Source.Handedness.ToMRTKHandedness() == ControllerHandedness) { handDefinition?.UpdateHandMesh(sourceState); break; } } #endif // WINDOWS_UWP && WMR_ENABLED Hand hand; if (inputDevice.TryGetFeatureValue(CommonUsages.handData, out hand)) { foreach (HandFinger finger in handFingers) { if (hand.TryGetFingerBones(finger, fingerBones)) { for (int i = 0; i < fingerBones.Count; i++) { TrackedHandJoint trackedHandJoint = ConvertToTrackedHandJoint(finger, i); Bone bone = fingerBones[i]; Vector3 position = Vector3.zero; Quaternion rotation = Quaternion.identity; if (bone.TryGetPosition(out position) || bone.TryGetRotation(out rotation)) { // We want input sources to follow the playspace, so fold in the playspace transform here to // put the controller pose into world space. position = MixedRealityPlayspace.TransformPoint(position); rotation = MixedRealityPlayspace.Rotation * rotation; unityJointPoses[trackedHandJoint] = new MixedRealityPose(position, rotation); } } // Unity doesn't provide a palm joint, so we synthesize one here MixedRealityPose palmPose = CurrentControllerPose; palmPose.Rotation *= (ControllerHandedness == Handedness.Left ? leftPalmOffset : rightPalmOffset); unityJointPoses[TrackedHandJoint.Palm] = palmPose; } } handDefinition?.UpdateHandJoints(unityJointPoses); } Profiler.EndSample(); // UpdateHandData }