/// <summary>
        /// Update the hand data from the device.
        /// </summary>
        /// <param name="interactionSourceState">The InteractionSourceState retrieved from the platform.</param>
        private void UpdateHandData(InputDevice inputDevice)
        {
            using (UpdateHandDataPerfMarker.Auto())
            {
#if WINDOWS_UWP && WMR_ENABLED
                XRSubsystemHelpers.InputSubsystem?.GetCurrentSourceStates(states);

                foreach (SpatialInteractionSourceState sourceState in states)
                {
                    if (sourceState.Source.Handedness.ToMRTKHandedness() == ControllerHandedness)
                    {
                        handMeshProvider?.UpdateHandMesh(sourceState);
                        break;
                    }
                }
#endif // WINDOWS_UWP && WMR_ENABLED

                Hand hand;
                if (inputDevice.TryGetFeatureValue(CommonUsages.handData, out hand))
                {
                    foreach (HandFinger finger in handFingers)
                    {
                        if (hand.TryGetFingerBones(finger, fingerBones))
                        {
                            for (int i = 0; i < fingerBones.Count; i++)
                            {
                                TrackedHandJoint trackedHandJoint = ConvertToTrackedHandJoint(finger, i);
                                Bone             bone             = fingerBones[i];

                                Vector3    position = Vector3.zero;
                                Quaternion rotation = Quaternion.identity;

                                bool positionAvailable = bone.TryGetPosition(out position);
                                bool rotationAvailable = bone.TryGetRotation(out rotation);

                                // If either position or rotation is available, use both pieces of data given.
                                // This might result in using a zeroed out position or rotation. Most likely,
                                // either both are available or both are unavailable.
                                if (positionAvailable || rotationAvailable)
                                {
                                    // We want input sources to follow the playspace, so fold in the playspace transform here to
                                    // put the controller pose into world space.
                                    position = MixedRealityPlayspace.TransformPoint(position);
                                    rotation = MixedRealityPlayspace.Rotation * rotation;

                                    unityJointPoses[trackedHandJoint] = new MixedRealityPose(position, rotation);
                                }
                            }

                            // Unity doesn't provide a palm joint, so we synthesize one here
                            MixedRealityPose palmPose = CurrentControllerPose;
                            palmPose.Rotation *= (ControllerHandedness == Handedness.Left ? leftPalmOffset : rightPalmOffset);
                            unityJointPoses[TrackedHandJoint.Palm] = palmPose;
                        }
                    }

                    handDefinition?.UpdateHandJoints(unityJointPoses);
                }
            }
        }
 /// <summary>
 /// Update the hand data from the device.
 /// </summary>
 /// <param name="interactionSourceState">The InteractionSourceState retrieved from the platform.</param>
 private void UpdateHandData(InputDevice inputDevice)
 {
     using (UpdateHandDataPerfMarker.Auto())
     {
         handMeshProvider?.UpdateHandMesh();
         handJointProvider?.UpdateHandJoints(inputDevice, unityJointPoses);
         handDefinition?.UpdateHandJoints(unityJointPoses);
     }
 }
Ejemplo n.º 3
0
 /// <summary>
 /// Update the hand data from the device.
 /// </summary>
 /// <param name="interactionSourceState">The InteractionSourceState retrieved from the platform.</param>
 private void UpdateHandData(Hand hand)
 {
     using (UpdateHandDataPerfMarker.Auto())
     {
         handMeshProvider?.UpdateHandMesh();
         handJointProvider?.UpdateHandJoints(hand, ref unityJointPoses);
         handDefinition?.UpdateHandJoints(unityJointPoses);
     }
 }
        /// <summary>
        /// Update the hand data from the device.
        /// </summary>
        /// <param name="interactionSourceState">The InteractionSourceState retrieved from the platform.</param>
        private void UpdateHandData(InteractionSourceState interactionSourceState)
        {
#if WINDOWS_UWP || DOTNETWINRT_PRESENT
            using (UpdateHandDataPerfMarker.Auto())
            {
                // Articulated hand support is only present in the 18362 version and beyond Windows
                // SDK (which contains the V8 drop of the Universal API Contract). In particular,
                // the HandPose related APIs are only present on this version and above.
                if (!articulatedHandApiAvailable)
                {
                    return;
                }

                PerceptionTimestamp perceptionTimestamp = PerceptionTimestampHelper.FromHistoricalTargetTime(DateTimeOffset.Now);
                IReadOnlyList <SpatialInteractionSourceState> sources = SpatialInteractionManager?.GetDetectedSourcesAtTimestamp(perceptionTimestamp);
                foreach (SpatialInteractionSourceState sourceState in sources)
                {
                    if (sourceState.Source.Id.Equals(interactionSourceState.source.id))
                    {
#if WINDOWS_UWP
                        handMeshProvider?.UpdateHandMesh(sourceState);
#endif // WINDOWS_UWP

                        HandPose handPose = sourceState.TryGetHandPose();

                        if (handPose != null && handPose.TryGetJoints(WindowsMixedRealityUtilities.SpatialCoordinateSystem, jointIndices, jointPoses))
                        {
                            for (int i = 0; i < jointPoses.Length; i++)
                            {
                                Vector3    jointPosition    = jointPoses[i].Position.ToUnityVector3();
                                Quaternion jointOrientation = jointPoses[i].Orientation.ToUnityQuaternion();

                                // We want the joints to follow the playspace, so fold in the playspace transform here to
                                // put the joint pose into world space.
                                jointPosition    = MixedRealityPlayspace.TransformPoint(jointPosition);
                                jointOrientation = MixedRealityPlayspace.Rotation * jointOrientation;

                                TrackedHandJoint handJoint = ConvertHandJointKindToTrackedHandJoint(jointIndices[i]);

                                if (handJoint == TrackedHandJoint.IndexTip)
                                {
                                    lastIndexTipRadius = jointPoses[i].Radius;
                                }

                                unityJointPoses[handJoint] = new MixedRealityPose(jointPosition, jointOrientation);
                            }

                            handDefinition?.UpdateHandJoints(unityJointPoses);
                        }
                        break;
                    }
                }
            }
#endif // WINDOWS_UWP || DOTNETWINRT_PRESENT
        }
Ejemplo n.º 5
0
        /// <summary>
        /// Update the hand data from the device.
        /// </summary>
        /// <param name="interactionSourceState">The InteractionSourceState retrieved from the platform.</param>
        private void UpdateHandData(InteractionSourceState interactionSourceState)
        {
#if WINDOWS_UWP || DOTNETWINRT_PRESENT
            using (UpdateHandDataPerfMarker.Auto())
            {
                // Articulated hand support is only present in the 18362 version and beyond Windows
                // SDK (which contains the V8 drop of the Universal API Contract). In particular,
                // the HandPose related APIs are only present on this version and above.
                if (!articulatedHandApiAvailable)
                {
                    return;
                }

                SpatialInteractionSourceState sourceState = interactionSourceState.source.GetSpatialInteractionSourceState();

                if (sourceState == null)
                {
                    return;
                }

#if WINDOWS_UWP
                handMeshProvider?.UpdateHandMesh(sourceState);
#endif // WINDOWS_UWP

                HandPose handPose = sourceState.TryGetHandPose();

                if (handPose != null && handPose.TryGetJoints(WindowsMixedRealityUtilities.SpatialCoordinateSystem, jointIndices, jointPoses))
                {
                    for (int i = 0; i < jointPoses.Length; i++)
                    {
                        Vector3    position = jointPoses[i].Position.ToUnityVector3();
                        Quaternion rotation = jointPoses[i].Orientation.ToUnityQuaternion();

                        // We want the joints to follow the playspace, so fold in the playspace transform here to
                        // put the joint pose into world space.
                        position = MixedRealityPlayspace.TransformPoint(position);
                        rotation = MixedRealityPlayspace.Rotation * rotation;

                        TrackedHandJoint trackedHandJoint = ConvertHandJointKindToTrackedHandJoint(jointIndices[i]);

                        if (trackedHandJoint == TrackedHandJoint.IndexTip)
                        {
                            lastIndexTipRadius = jointPoses[i].Radius;
                        }

                        unityJointPoses[trackedHandJoint] = new MixedRealityPose(position, rotation);
                    }

                    handDefinition?.UpdateHandJoints(unityJointPoses);
                }
            }
#endif // WINDOWS_UWP || DOTNETWINRT_PRESENT
        }
        /// <summary>
        /// Update the hand data from the device.
        /// </summary>
        /// <param name="interactionSourceState">The InteractionSourceState retrieved from the platform.</param>
        private void UpdateHandData(InputDevice inputDevice)
        {
            using (UpdateHandDataPerfMarker.Auto())
            {
                handMeshProvider?.UpdateHandMesh();

#if MSFT_OPENXR && (UNITY_STANDALONE_WIN || UNITY_WSA)
                if (handTracker != null && handTracker.TryLocateHandJoints(FrameTime.OnUpdate, locations))
                {
                    foreach (HandJoint handJoint in HandJoints)
                    {
                        HandJointLocation handJointLocation = locations[(int)handJoint];

                        // We want input sources to follow the playspace, so fold in the playspace transform here to
                        // put the pose into world space.
#if MSFT_OPENXR_0_2_0_OR_NEWER
                        Vector3    position = MixedRealityPlayspace.TransformPoint(handJointLocation.Pose.position);
                        Quaternion rotation = MixedRealityPlayspace.Rotation * handJointLocation.Pose.rotation;
#else
                        Vector3    position = MixedRealityPlayspace.TransformPoint(handJointLocation.Position);
                        Quaternion rotation = MixedRealityPlayspace.Rotation * handJointLocation.Rotation;
#endif // MSFT_OPENXR_0_2_0_OR_NEWER

                        unityJointPoses[ConvertToTrackedHandJoint(handJoint)] = new MixedRealityPose(position, rotation);
                    }
#else
                if (inputDevice.TryGetFeatureValue(CommonUsages.handData, out Hand hand))
                {
                    foreach (HandFinger finger in handFingers)
                    {
                        if (hand.TryGetRootBone(out Bone rootBone))
                        {
                            ReadHandJoint(TrackedHandJoint.Wrist, rootBone);
                        }

                        if (hand.TryGetFingerBones(finger, fingerBones))
                        {
                            for (int i = 0; i < fingerBones.Count; i++)
                            {
                                ReadHandJoint(ConvertToTrackedHandJoint(finger, i), fingerBones[i]);
                            }
                        }
                    }
#endif // MSFT_OPENXR && (UNITY_STANDALONE_WIN || UNITY_WSA)

                    handDefinition?.UpdateHandJoints(unityJointPoses);
                }
            }
        }
Ejemplo n.º 7
0
        /// <summary>
        /// Updates the joint poses and interactions for the articulated hand.
        /// </summary>
        public void UpdateState()
        {
            using (UpdateStatePerfMarker.Auto())
            {
                // Get and set the joint poses provided by the Leap Motion Controller
                SetJointPoses();

                // Update hand joints and raise event via handDefinition
                handDefinition?.UpdateHandJoints(jointPoses);

                UpdateInteractions();

                UpdateVelocity();
            }
        }
        protected bool UpdateHandData(OVRHand ovrHand, OVRSkeleton ovrSkeleton)
        {
            bool isTracked = ovrHand.IsTracked;

            if (ovrHand.HandConfidence == OVRHand.TrackingConfidence.High)
            {
                _lastHighConfidenceTime = Time.unscaledTime;
            }
            if (ovrHand.HandConfidence == OVRHand.TrackingConfidence.Low)
            {
                if (settingsProfile.MinimumHandConfidence == OVRHand.TrackingConfidence.High)
                {
                    isTracked = false;
                }
                else
                {
                    float lowConfidenceTime = Time.time - _lastHighConfidenceTime;
                    if (settingsProfile.LowConfidenceTimeThreshold > 0 &&
                        settingsProfile.LowConfidenceTimeThreshold < lowConfidenceTime)
                    {
                        isTracked = false;
                    }
                }
            }

            if (ControllerHandedness == Handedness.Left)
            {
                settingsProfile.CurrentLeftHandTrackingConfidence = ovrHand.HandConfidence;
            }
            else
            {
                settingsProfile.CurrentRightHandTrackingConfidence = ovrHand.HandConfidence;
            }

            if (ovrSkeleton != null)
            {
                var bones = ovrSkeleton.Bones;
                foreach (var bone in bones)
                {
                    UpdateBone(bone);
                }

                UpdatePalm();
            }

            handDefinition?.UpdateHandJoints(jointPoses);

            // Note: After some testing, it seems when moving your hand fast, Oculus's pinch estimation data gets frozen, which leads to stuck pinches.
            // To counter this, we perform a distance check between thumb and index to determine if we should force the pinch to a false state.
            float pinchStrength = HandPoseUtils.CalculateIndexPinch(ControllerHandedness);

            if (pinchStrength == 0.0f)
            {
                IsPinching = false;
            }
            else
            {
                if (IsPinching)
                {
                    // If we are already pinching, we make the pinch a bit sticky
                    IsPinching = pinchStrength > 0.85f;
                }
                else
                {
                    // If not yet pinching, only consider pinching if finger confidence is high
                    IsPinching = pinchStrength > 0.5f && ovrHand.GetFingerConfidence(OVRHand.HandFinger.Index) == OVRHand.TrackingConfidence.High;
                }
            }

            isIndexGrabbing  = HandPoseUtils.IsIndexGrabbing(ControllerHandedness);
            isMiddleGrabbing = HandPoseUtils.IsMiddleGrabbing(ControllerHandedness);


            // Pinch was also used as grab, we want to allow hand-curl grab not just pinch.
            // Determine pinch and grab separately
            if (isTracked)
            {
                IsGrabbing = isIndexGrabbing && isMiddleGrabbing;
            }

            return(isTracked);
        }