コード例 #1
0
        /// <summary>
        /// Update the "Controller" input from the device
        /// </summary>
        /// <param name="interactionSourceState">The InteractionSourceState retrieved from the platform</param>
        private void UpdateControllerData(InteractionSourceState interactionSourceState)
        {
            var lastState  = TrackingState;
            var sourceKind = interactionSourceState.source.kind;

            lastControllerPose = currentControllerPose;

            if (sourceKind == InteractionSourceKind.Hand ||
                (sourceKind == InteractionSourceKind.Controller && interactionSourceState.source.supportsPointing))
            {
                // The source is either a hand or a controller that supports pointing.
                // We can now check for position and rotation.
                IsPositionAvailable = interactionSourceState.sourcePose.TryGetPosition(out currentControllerPosition);

                if (IsPositionAvailable)
                {
                    IsPositionApproximate = (interactionSourceState.sourcePose.positionAccuracy == InteractionSourcePositionAccuracy.Approximate);
                }
                else
                {
                    IsPositionApproximate = false;
                }

                IsRotationAvailable = interactionSourceState.sourcePose.TryGetRotation(out currentControllerRotation);

                // Devices are considered tracked if we receive position OR rotation data from the sensors.
                TrackingState = (IsPositionAvailable || IsRotationAvailable) ? TrackingState.Tracked : TrackingState.NotTracked;
            }
            else
            {
                // The input source does not support tracking.
                TrackingState = TrackingState.NotApplicable;
            }

            currentControllerPose.Position = currentControllerPosition;
            currentControllerPose.Rotation = currentControllerRotation;

            // Raise input system events if it is enabled.
            if (lastState != TrackingState)
            {
                MixedRealityToolkit.InputSystem?.RaiseSourceTrackingStateChanged(InputSource, this, TrackingState);
            }

            if (TrackingState == TrackingState.Tracked && lastControllerPose != currentControllerPose)
            {
                if (IsPositionAvailable && IsRotationAvailable)
                {
                    MixedRealityToolkit.InputSystem?.RaiseSourcePoseChanged(InputSource, this, currentControllerPose);
                }
                else if (IsPositionAvailable && !IsRotationAvailable)
                {
                    MixedRealityToolkit.InputSystem?.RaiseSourcePositionChanged(InputSource, this, currentControllerPosition);
                }
                else if (!IsPositionAvailable && IsRotationAvailable)
                {
                    MixedRealityToolkit.InputSystem?.RaiseSourceRotationChanged(InputSource, this, currentControllerRotation);
                }
            }
        }
コード例 #2
0
        /// <inheritdoc/>
        public bool TryGetJoint(TrackedHandJoint joint, out MixedRealityPose pose)
        {
#if (UNITY_WSA && DOTNETWINRT_PRESENT) || WINDOWS_UWP
            return(unityJointPoses.TryGetValue(joint, out pose));
#else
            pose = MixedRealityPose.ZeroIdentity;
            return(false);
#endif
        }
コード例 #3
0
ファイル: LuminController.cs プロジェクト: vimaec/Lumin
        private void UpdateControllerData()
        {
            var lastState = TrackingState;

            lastControllerPose = currentControllerPose;

            if (MlControllerReference.Type == MLInputControllerType.Control)
            {
                // The source is either a hand or a controller that supports pointing.
                // We can now check for position and rotation.
                IsPositionAvailable = MlControllerReference.Dof != MLInputControllerDof.None;

                if (IsPositionAvailable)
                {
                    IsPositionApproximate = MlControllerReference.CalibrationAccuracy <= MLControllerCalibAccuracy.Medium;
                }
                else
                {
                    IsPositionApproximate = false;
                }

                IsRotationAvailable = MlControllerReference.Dof == MLInputControllerDof.Dof6;

                // Devices are considered tracked if we receive position OR rotation data from the sensors.
                TrackingState = (IsPositionAvailable || IsRotationAvailable) ? TrackingState.Tracked : TrackingState.NotTracked;
            }
            else
            {
                // The input source does not support tracking.
                TrackingState = TrackingState.NotApplicable;
            }

            currentControllerPose.Position = MlControllerReference.Position;
            currentControllerPose.Rotation = MlControllerReference.Orientation;

            // Raise input system events if it is enabled.
            if (lastState != TrackingState)
            {
                MixedRealityToolkit.InputSystem?.RaiseSourceTrackingStateChanged(InputSource, this, TrackingState);
            }

            if (TrackingState == TrackingState.Tracked && lastControllerPose != currentControllerPose)
            {
                if (IsPositionAvailable && IsRotationAvailable)
                {
                    MixedRealityToolkit.InputSystem?.RaiseSourcePoseChanged(InputSource, this, currentControllerPose);
                }
                else if (IsPositionAvailable && !IsRotationAvailable)
                {
                    MixedRealityToolkit.InputSystem?.RaiseSourcePositionChanged(InputSource, this, currentControllerPose.Position);
                }
                else if (!IsPositionAvailable && IsRotationAvailable)
                {
                    MixedRealityToolkit.InputSystem?.RaiseSourceRotationChanged(InputSource, this, currentControllerPose.Rotation);
                }
            }
        }
コード例 #4
0
 public override bool TryGetJoint(TrackedHandJoint joint, out MixedRealityPose pose)
 {
     if (jointPoses.TryGetValue(joint, out pose))
     {
         return(true);
     }
     pose = MixedRealityPose.ZeroIdentity;
     return(false);
 }
コード例 #5
0
 public bool TryGetJoint(TrackedHandJoint joint, out MixedRealityPose pose)
 {
     if (jointPoses.TryGetValue(joint, out pose))
     {
         return(true);
     }
     pose = currentGripPose;
     return(true);
 }
コード例 #6
0
        /// <inheritdoc/>
        public bool TryGetJoint(TrackedHandJoint joint, out MixedRealityPose pose)
        {
#if WINDOWS_UWP
            return(unityJointPoses.TryGetValue(joint, out pose));
#else
            pose = MixedRealityPose.ZeroIdentity;
            return(false);
#endif
        }
コード例 #7
0
        /// <summary>
        /// Update the hand data from the device.
        /// </summary>
        /// <param name="interactionSourceState">The InteractionSourceState retrieved from the platform.</param>
        private void UpdateHandData(InteractionSourceState interactionSourceState)
        {
#if WINDOWS_UWP || DOTNETWINRT_PRESENT
            // Articulated hand support is only present in the 18362 version and beyond Windows
            // SDK (which contains the V8 drop of the Universal API Contract). In particular,
            // the HandPose related APIs are only present on this version and above.
            if (!articulatedHandApiAvailable)
            {
                return;
            }

            Profiler.BeginSample("[MRTK] WindowsMixedRealityArticulatedHand.UpdateHandData");

            PerceptionTimestamp perceptionTimestamp = PerceptionTimestampHelper.FromHistoricalTargetTime(DateTimeOffset.Now);
            IReadOnlyList <SpatialInteractionSourceState> sources = SpatialInteractionManager?.GetDetectedSourcesAtTimestamp(perceptionTimestamp);
            foreach (SpatialInteractionSourceState sourceState in sources)
            {
                if (sourceState.Source.Id.Equals(interactionSourceState.source.id))
                {
#if WINDOWS_UWP
                    handDefinition?.UpdateHandMesh(sourceState);
#endif // WINDOWS_UWP

                    HandPose handPose = sourceState.TryGetHandPose();

                    if (handPose != null && handPose.TryGetJoints(WindowsMixedRealityUtilities.SpatialCoordinateSystem, jointIndices, jointPoses))
                    {
                        for (int i = 0; i < jointPoses.Length; i++)
                        {
                            Vector3    jointPosition    = jointPoses[i].Position.ToUnityVector3();
                            Quaternion jointOrientation = jointPoses[i].Orientation.ToUnityQuaternion();

                            // We want the joints to follow the playspace, so fold in the playspace transform here to
                            // put the joint pose into world space.
                            jointPosition    = MixedRealityPlayspace.TransformPoint(jointPosition);
                            jointOrientation = MixedRealityPlayspace.Rotation * jointOrientation;

                            TrackedHandJoint handJoint = ConvertHandJointKindToTrackedHandJoint(jointIndices[i]);

                            if (handJoint == TrackedHandJoint.IndexTip)
                            {
                                lastIndexTipRadius = jointPoses[i].Radius;
                            }

                            unityJointPoses[handJoint] = new MixedRealityPose(jointPosition, jointOrientation);
                        }

                        handDefinition?.UpdateHandJoints(unityJointPoses);
                    }
                    break;
                }
            }

            Profiler.EndSample(); // UpdateHandData
#endif // WINDOWS_UWP || DOTNETWINRT_PRESENT
        }
コード例 #8
0
        /// <summary>
        /// Update the hand data from the device.
        /// </summary>
        /// <param name="interactionSourceState">The InteractionSourceState retrieved from the platform.</param>
        private void UpdateHandData(InputDevice inputDevice)
        {
            using (UpdateHandDataPerfMarker.Auto())
            {
#if WINDOWS_UWP && WMR_ENABLED
                XRSubsystemHelpers.InputSubsystem?.GetCurrentSourceStates(states);

                foreach (SpatialInteractionSourceState sourceState in states)
                {
                    if (sourceState.Source.Handedness.ToMRTKHandedness() == ControllerHandedness)
                    {
                        handMeshProvider?.UpdateHandMesh(sourceState);
                        break;
                    }
                }
#endif // WINDOWS_UWP && WMR_ENABLED

                Hand hand;
                if (inputDevice.TryGetFeatureValue(CommonUsages.handData, out hand))
                {
                    foreach (HandFinger finger in handFingers)
                    {
                        if (hand.TryGetFingerBones(finger, fingerBones))
                        {
                            for (int i = 0; i < fingerBones.Count; i++)
                            {
                                Bone bone = fingerBones[i];

                                bool positionAvailable = bone.TryGetPosition(out Vector3 position);
                                bool rotationAvailable = bone.TryGetRotation(out Quaternion rotation);

                                // If either position or rotation is available, use both pieces of data given.
                                // This might result in using a zeroed out position or rotation. Most likely,
                                // either both are available or both are unavailable.
                                if (positionAvailable || rotationAvailable)
                                {
                                    // We want input sources to follow the playspace, so fold in the playspace transform here to
                                    // put the controller pose into world space.
                                    position = MixedRealityPlayspace.TransformPoint(position);
                                    rotation = MixedRealityPlayspace.Rotation * rotation;

                                    unityJointPoses[ConvertToTrackedHandJoint(finger, i)] = new MixedRealityPose(position, rotation);
                                }
                            }

                            // Unity doesn't provide a palm joint, so we synthesize one here
                            MixedRealityPose palmPose = CurrentControllerPose;
                            palmPose.Rotation *= (ControllerHandedness == Handedness.Left ? leftPalmOffset : rightPalmOffset);
                            unityJointPoses[TrackedHandJoint.Palm] = palmPose;
                        }
                    }

                    handDefinition?.UpdateHandJoints(unityJointPoses);
                }
            }
        }
コード例 #9
0
ファイル: HandUtilities.cs プロジェクト: ltstein/XRTK-Core
        /// <summary>
        /// Gets an estimated <see cref="TrackedHandJoint.Palm"/> pose.
        /// Requires known <see cref="TrackedHandJoint.MiddleMetacarpal"/> and
        /// <see cref="TrackedHandJoint.MiddleProximal"/> poses.
        /// </summary>
        /// <param name="jointPoses">Known joint poses.</param>
        /// <returns>Estimated <see cref="TrackedHandJoint.Palm"/> pose.</returns>
        public static MixedRealityPose GetEstimatedPalmPose(MixedRealityPose[] jointPoses)
        {
            MixedRealityPose middleMetacarpalPose = GetEstimatedMiddleMetacarpalPose(jointPoses);
            MixedRealityPose middleProximalPose   = jointPoses[(int)TrackedHandJoint.MiddleProximal];

            Vector3    palmPosition = Vector3.Lerp(middleMetacarpalPose.Position, middleProximalPose.Position, .5f);
            Quaternion palmRotation = middleMetacarpalPose.Rotation;

            return(new MixedRealityPose(palmPosition, palmRotation));
        }
コード例 #10
0
        /// <summary>
        /// Gets updated joint <see cref="MixedRealityPose"/>s for all <see cref="TrackedHandJoint"/>s of a hand.
        /// </summary>
        /// <param name="platformJointPoses"><see cref="JointPose"/>s retrieved from the platform.</param>
        /// <param name="handRootPose">The hand's root <see cref="MixedRealityPose"/>.</param>
        /// <returns>Joint <see cref="MixedRealityPose"/>s in <see cref="TrackedHandJoint"/> ascending order.</returns>
        private MixedRealityPose[] GetJointPoses(JointPose[] platformJointPoses, MixedRealityPose handRootPose)
        {
            for (int i = 0; i < platformJointPoses.Length; i++)
            {
                var handJoint = jointIndices[i].ToTrackedHandJoint();
                jointPoses[(int)handJoint] = GetJointPose(handJoint, handRootPose, platformJointPoses[i]);
            }

            return(jointPoses);
        }
コード例 #11
0
ファイル: HandUtilities.cs プロジェクト: ltstein/XRTK-Core
        /// <summary>
        /// Gets an estimated <see cref="TrackedHandJoint.MiddleMetacarpal"/> pose.
        /// Requires known <see cref="TrackedHandJoint.ThumbMetacarpal"/> and
        /// <see cref="TrackedHandJoint.LittleMetacarpal"/> poses.
        /// </summary>
        /// <param name="jointPoses">Known joint poses.</param>
        /// <returns>Estimated <see cref="TrackedHandJoint.MiddleMetacarpal"/> pose.</returns>
        public static MixedRealityPose GetEstimatedMiddleMetacarpalPose(MixedRealityPose[] jointPoses)
        {
            MixedRealityPose thumbMetacarpalPose  = jointPoses[(int)TrackedHandJoint.ThumbMetacarpal];
            MixedRealityPose littleMetacarpalPose = jointPoses[(int)TrackedHandJoint.LittleMetacarpal];

            Vector3    middleMetacarpalPosition = Vector3.Lerp(thumbMetacarpalPose.Position, littleMetacarpalPose.Position, .5f);
            Quaternion middleMetacarpalRotation = jointPoses[(int)TrackedHandJoint.Wrist].Rotation;

            return(new MixedRealityPose(middleMetacarpalPosition, middleMetacarpalRotation));
        }
コード例 #12
0
        /// <summary>
        /// Update the hand data from the device.
        /// </summary>
        /// <param name="interactionSourceState">The InteractionSourceState retrieved from the platform.</param>
        private void UpdateHandData(InputDevice inputDevice)
        {
            Profiler.BeginSample("[MRTK] WindowsMixedRealityXRSDKArticulatdHand.UpdateHandData");

#if WINDOWS_UWP && WMR_ENABLED
            XRSDKSubsystemHelpers.InputSubsystem?.GetCurrentSourceStates(states);

            foreach (SpatialInteractionSourceState sourceState in states)
            {
                if (sourceState.Source.Handedness.ToMRTKHandedness() == ControllerHandedness)
                {
                    handDefinition?.UpdateHandMesh(sourceState);
                    break;
                }
            }
#endif // WINDOWS_UWP && WMR_ENABLED

            Hand hand;
            if (inputDevice.TryGetFeatureValue(CommonUsages.handData, out hand))
            {
                foreach (HandFinger finger in handFingers)
                {
                    if (hand.TryGetFingerBones(finger, fingerBones))
                    {
                        for (int i = 0; i < fingerBones.Count; i++)
                        {
                            TrackedHandJoint trackedHandJoint = ConvertToTrackedHandJoint(finger, i);
                            Bone             bone             = fingerBones[i];

                            Vector3    position = Vector3.zero;
                            Quaternion rotation = Quaternion.identity;

                            if (bone.TryGetPosition(out position) || bone.TryGetRotation(out rotation))
                            {
                                // We want input sources to follow the playspace, so fold in the playspace transform here to
                                // put the controller pose into world space.
                                position = MixedRealityPlayspace.TransformPoint(position);
                                rotation = MixedRealityPlayspace.Rotation * rotation;

                                unityJointPoses[trackedHandJoint] = new MixedRealityPose(position, rotation);
                            }
                        }

                        // Unity doesn't provide a palm joint, so we synthesize one here
                        MixedRealityPose palmPose = CurrentControllerPose;
                        palmPose.Rotation *= (ControllerHandedness == Handedness.Left ? leftPalmOffset : rightPalmOffset);
                        unityJointPoses[TrackedHandJoint.Palm] = palmPose;
                    }
                }

                handDefinition?.UpdateHandJoints(unityJointPoses);
            }

            Profiler.EndSample(); // UpdateHandData
        }
コード例 #13
0
        /// <inheritdoc/>
        public bool TryGetJoint(TrackedHandJoint joint, out MixedRealityPose pose)
        {
            if (unityJointPoses != null)
            {
                pose = unityJointPoses[(int)joint];
                return(pose != default(MixedRealityPose));
            }

            pose = MixedRealityPose.ZeroIdentity;
            return(false);
        }
        /// <summary>
        /// Setup the default interactions, then update the spatial pointer rotation with the preconfigured offset angle.
        /// </summary>
        public override void SetupDefaultInteractions()
        {
            base.SetupDefaultInteractions();

            Assert.AreEqual(Interactions[0].Description, "Spatial Pointer", "The first interaction mapping is no longer the Spatial Pointer. Please update.");

            MixedRealityPose startingRotation = MixedRealityPose.ZeroIdentity;

            startingRotation.Rotation *= Quaternion.AngleAxis(PointerOffsetAngle, Vector3.left);
            Interactions[0].PoseData   = startingRotation;
        }
コード例 #15
0
        private static void AddPoseKey(PoseCurves curves, float time, MixedRealityPose pose)
        {
            AddFloatKey(curves.PositionX, time, pose.Position.x);
            AddFloatKey(curves.PositionY, time, pose.Position.y);
            AddFloatKey(curves.PositionZ, time, pose.Position.z);

            AddFloatKey(curves.RotationX, time, pose.Rotation.x);
            AddFloatKey(curves.RotationY, time, pose.Rotation.y);
            AddFloatKey(curves.RotationZ, time, pose.Rotation.z);
            AddFloatKey(curves.RotationW, time, pose.Rotation.w);
        }
コード例 #16
0
 /// <summary>
 /// Update the position based on input.
 /// </summary>
 /// <returns>A Vector3 describing the desired position</returns>
 public Vector3 UpdateTransform(MixedRealityPose pointerCentroidPose, MixedRealityTransform currentTarget, bool isPointerAnchor, bool isNearManipulation)
 {
     if (isNearManipulation)
     {
         return(NearManipulationUpdate(pointerCentroidPose, currentTarget));
     }
     else
     {
         return(FarManipulationUpdate(pointerCentroidPose, currentTarget.Rotation, currentTarget.Scale, isPointerAnchor));
     }
 }
コード例 #17
0
        /// <summary>
        /// Setup function
        /// </summary>
        public void Setup(MixedRealityPose pointerCentroidPose, Vector3 grabCentroid, MixedRealityPose objectPose, Vector3 objectScale)
        {
            Quaternion worldToPointerRotation = Quaternion.Inverse(pointerCentroidPose.Rotation);

            pointerLocalGrabPoint = worldToPointerRotation * (grabCentroid - pointerCentroidPose.Position);

            objectLocalGrabPoint = Quaternion.Inverse(objectPose.Rotation) * (grabCentroid - objectPose.Position);
            objectLocalGrabPoint = objectLocalGrabPoint.Div(objectScale);

            grabToObject = objectPose.Position - grabCentroid;
        }
コード例 #18
0
        /// <summary>
        /// Update the hand data from the device.
        /// </summary>
        /// <param name="interactionSourceState">The InteractionSourceState retrieved from the platform.</param>
        private void UpdateHandData(InteractionSourceState interactionSourceState)
        {
#if WINDOWS_UWP || DOTNETWINRT_PRESENT
            using (UpdateHandDataPerfMarker.Auto())
            {
                // Articulated hand support is only present in the 18362 version and beyond Windows
                // SDK (which contains the V8 drop of the Universal API Contract). In particular,
                // the HandPose related APIs are only present on this version and above.
                if (!articulatedHandApiAvailable)
                {
                    return;
                }

                SpatialInteractionSourceState sourceState = interactionSourceState.source.GetSpatialInteractionSourceState();

                if (sourceState == null)
                {
                    return;
                }

#if WINDOWS_UWP
                handMeshProvider?.UpdateHandMesh(sourceState);
#endif // WINDOWS_UWP

                HandPose handPose = sourceState.TryGetHandPose();

                if (handPose != null && handPose.TryGetJoints(WindowsMixedRealityUtilities.SpatialCoordinateSystem, jointIndices, jointPoses))
                {
                    for (int i = 0; i < jointPoses.Length; i++)
                    {
                        Vector3    position = jointPoses[i].Position.ToUnityVector3();
                        Quaternion rotation = jointPoses[i].Orientation.ToUnityQuaternion();

                        // We want the joints to follow the playspace, so fold in the playspace transform here to
                        // put the joint pose into world space.
                        position = MixedRealityPlayspace.TransformPoint(position);
                        rotation = MixedRealityPlayspace.Rotation * rotation;

                        TrackedHandJoint trackedHandJoint = ConvertHandJointKindToTrackedHandJoint(jointIndices[i]);

                        if (trackedHandJoint == TrackedHandJoint.IndexTip)
                        {
                            lastIndexTipRadius = jointPoses[i].Radius;
                        }

                        unityJointPoses[trackedHandJoint] = new MixedRealityPose(position, rotation);
                    }

                    handDefinition?.UpdateHandJoints(unityJointPoses);
                }
            }
#endif // WINDOWS_UWP || DOTNETWINRT_PRESENT
        }
コード例 #19
0
        protected virtual void UpdateSixDofData(InputDevice inputDevice)
        {
            var lastState = TrackingState;

            LastControllerPose = CurrentControllerPose;

            // Check for position and rotation.
            IsPositionAvailable   = inputDevice.TryGetFeatureValue(CommonUsages.devicePosition, out CurrentControllerPosition);
            IsPositionApproximate = false;

            IsRotationAvailable = inputDevice.TryGetFeatureValue(CommonUsages.deviceRotation, out CurrentControllerRotation);

            // Devices are considered tracked if we receive position OR rotation data from the sensors.
            TrackingState = (IsPositionAvailable || IsRotationAvailable) ? TrackingState.Tracked : TrackingState.NotTracked;

            CurrentControllerPosition = MixedRealityPlayspace.TransformPoint(CurrentControllerPosition);
            CurrentControllerRotation = MixedRealityPlayspace.Rotation * CurrentControllerRotation;

            CurrentControllerPose.Position = CurrentControllerPosition;
            CurrentControllerPose.Rotation = CurrentControllerRotation;

            // Raise input system events if it is enabled.
            if (lastState != TrackingState)
            {
                CoreServices.InputSystem?.RaiseSourceTrackingStateChanged(InputSource, this, TrackingState);
            }

            if (TrackingState == TrackingState.Tracked && LastControllerPose != CurrentControllerPose)
            {
                if (IsPositionAvailable && IsRotationAvailable)
                {
                    CoreServices.InputSystem?.RaiseSourcePoseChanged(InputSource, this, CurrentControllerPose);
                }
                else if (IsPositionAvailable && !IsRotationAvailable)
                {
                    CoreServices.InputSystem?.RaiseSourcePositionChanged(InputSource, this, CurrentControllerPosition);
                }
                else if (!IsPositionAvailable && IsRotationAvailable)
                {
                    CoreServices.InputSystem?.RaiseSourceRotationChanged(InputSource, this, CurrentControllerRotation);
                }
            }

            for (int i = 0; i < Interactions?.Length; i++)
            {
                switch (Interactions[i].AxisType)
                {
                case AxisType.SixDof:
                    UpdatePoseData(Interactions[i], inputDevice);
                    break;
                }
            }
        }
コード例 #20
0
        /// Add a keyframe for one hand joint.
        private void AddHandJointKey(float time, TrackedHandJoint joint, MixedRealityPose jointPose, Dictionary <TrackedHandJoint, PoseCurves> jointCurves, float positionThreshold, float rotationThreshold)
        {
            if (!jointCurves.TryGetValue(joint, out PoseCurves curves))
            {
                curves = new PoseCurves();
                jointCurves.Add(joint, curves);
            }

            AddPoseKeyFiltered(curves, time, jointPose, positionThreshold, rotationThreshold);

            duration = Mathf.Max(duration, time);
        }
コード例 #21
0
        /// <summary>
        /// Record a keyframe at the given time for the main camera and tracked input devices.
        /// </summary>
        private void RecordKeyframe()
        {
            float time    = Time.time;
            var   profile = InputRecordingProfile;

            RecordInputHandData(Handedness.Left);
            RecordInputHandData(Handedness.Right);
            if (CameraCache.Main)
            {
                var cameraPose = new MixedRealityPose(CameraCache.Main.transform.position, CameraCache.Main.transform.rotation);
                recordingBuffer.AddCameraPoseKey(time, cameraPose, profile.CameraPositionThreshold, profile.CameraRotationThreshold);
            }
        }
コード例 #22
0
        protected void UpdateJointPose(TrackedHandJoint joint, Vector3 position, Quaternion rotation)
        {
            MixedRealityPose pose = new MixedRealityPose(position, rotation);

            if (!jointPoses.ContainsKey(joint))
            {
                jointPoses.Add(joint, pose);
            }
            else
            {
                jointPoses[joint] = pose;
            }
        }
コード例 #23
0
        private float IndexThumbSqrMagnitude()
        {
            MixedRealityPose indexPose = MixedRealityPose.ZeroIdentity;

            TryGetJoint(TrackedHandJoint.IndexTip, out indexPose);

            MixedRealityPose thumbPose = MixedRealityPose.ZeroIdentity;

            TryGetJoint(TrackedHandJoint.ThumbTip, out thumbPose);

            Vector3 distanceVector = indexPose.Position - thumbPose.Position;

            return(distanceVector.sqrMagnitude);
        }
コード例 #24
0
        private bool AreIndexAndThumbFarApart()
        {
            MixedRealityPose indexPose = MixedRealityPose.ZeroIdentity;

            TryGetJoint(TrackedHandJoint.IndexTip, out indexPose);

            MixedRealityPose thumbPose = MixedRealityPose.ZeroIdentity;

            TryGetJoint(TrackedHandJoint.ThumbTip, out thumbPose);

            Vector3 distanceVector = indexPose.Position - thumbPose.Position;

            return(distanceVector.sqrMagnitude > IndexThumbSqrMagnitudeThreshold);
        }
コード例 #25
0
        public void Test_07_02_MixedRealityPoseChangedAndUpdated()
        {
            var interaction  = InitializeMixedRealityPoseInteractionMapping();
            var initialValue = interaction.PoseData;
            var testValue1   = new MixedRealityPose(Vector3.up, Quaternion.identity);
            var testValue2   = new MixedRealityPose(Vector3.one, new Quaternion(45f, 45f, 45f, 45f));

            interaction.PoseData = testValue1;

            // Make sure the first query after value assignment is true
            Assert.IsTrue(interaction.Changed);
            Assert.IsTrue(interaction.Updated);

            var setValue1 = interaction.PoseData;

            // Check the values
            Assert.True(setValue1 == testValue1);
            // Make sure the second time we query it's false
            Assert.IsFalse(interaction.Changed);
            Assert.IsFalse(interaction.Changed);

            interaction.PoseData = testValue2;

            // Make sure the first query after value assignment is true
            Assert.IsTrue(interaction.Changed);
            Assert.IsTrue(interaction.Updated);

            var setValue2 = interaction.PoseData;

            // Check the values
            Assert.True(setValue2 == testValue2);
            // Make sure the second time we query it's false
            Assert.IsFalse(interaction.Changed);
            Assert.IsFalse(interaction.Changed);

            interaction.PoseData = initialValue;

            // Make sure the first query after value assignment is true
            Assert.IsTrue(interaction.Changed);
            Assert.IsTrue(interaction.Updated);

            var setValue3 = interaction.PoseData;

            // Check the values
            Assert.True(setValue3 == initialValue);
            // Make sure the second time we query it's false
            Assert.IsFalse(interaction.Changed);
            Assert.IsFalse(interaction.Changed);
        }
        /// <summary>
        /// Update the hand data from the device.
        /// </summary>
        /// <param name="interactionSourceState">The InteractionSourceState retrieved from the platform.</param>
        private void UpdateHandData(InputDevice inputDevice)
        {
            using (UpdateHandDataPerfMarker.Auto())
            {
                handMeshProvider?.UpdateHandMesh();

#if MSFT_OPENXR && (UNITY_STANDALONE_WIN || UNITY_WSA)
                if (handTracker != null && handTracker.TryLocateHandJoints(FrameTime.OnUpdate, locations))
                {
                    foreach (HandJoint handJoint in HandJoints)
                    {
                        HandJointLocation handJointLocation = locations[(int)handJoint];

                        // We want input sources to follow the playspace, so fold in the playspace transform here to
                        // put the pose into world space.
#if MSFT_OPENXR_0_2_0_OR_NEWER
                        Vector3    position = MixedRealityPlayspace.TransformPoint(handJointLocation.Pose.position);
                        Quaternion rotation = MixedRealityPlayspace.Rotation * handJointLocation.Pose.rotation;
#else
                        Vector3    position = MixedRealityPlayspace.TransformPoint(handJointLocation.Position);
                        Quaternion rotation = MixedRealityPlayspace.Rotation * handJointLocation.Rotation;
#endif // MSFT_OPENXR_0_2_0_OR_NEWER

                        unityJointPoses[ConvertToTrackedHandJoint(handJoint)] = new MixedRealityPose(position, rotation);
                    }
#else
                if (inputDevice.TryGetFeatureValue(CommonUsages.handData, out Hand hand))
                {
                    foreach (HandFinger finger in handFingers)
                    {
                        if (hand.TryGetRootBone(out Bone rootBone))
                        {
                            ReadHandJoint(TrackedHandJoint.Wrist, rootBone);
                        }

                        if (hand.TryGetFingerBones(finger, fingerBones))
                        {
                            for (int i = 0; i < fingerBones.Count; i++)
                            {
                                ReadHandJoint(ConvertToTrackedHandJoint(finger, i), fingerBones[i]);
                            }
                        }
                    }
#endif // MSFT_OPENXR && (UNITY_STANDALONE_WIN || UNITY_WSA)

                    handDefinition?.UpdateHandJoints(unityJointPoses);
                }
            }
        }
コード例 #27
0
        protected void UpdateJointPose(TrackedHandJoint joint, Vector3 position, Quaternion rotation)
        {
            MixedRealityPose pose = new MixedRealityPose(position, rotation);

            if (!jointPoses.ContainsKey(joint))
            {
                jointPoses.Add(joint, pose);
            }
            else
            {
                jointPoses[joint] = pose;
            }

            CoreServices.InputSystem?.RaiseHandJointsUpdated(InputSource, ControllerHandedness, jointPoses);
        }
コード例 #28
0
        private void ReadHandJoint(TrackedHandJoint trackedHandJoint, Bone bone)
        {
            bool positionAvailable = bone.TryGetPosition(out Vector3 position);
            bool rotationAvailable = bone.TryGetRotation(out Quaternion rotation);

            if (positionAvailable && rotationAvailable)
            {
                // We want input sources to follow the playspace, so fold in the playspace transform here to
                // put the pose into world space.
                position = MixedRealityPlayspace.TransformPoint(position);
                rotation = MixedRealityPlayspace.Rotation * rotation;

                unityJointPoses[trackedHandJoint] = new MixedRealityPose(position, rotation);
            }
        }
コード例 #29
0
        /// <summary>
        /// Setup function
        /// </summary>
        public void Setup(MixedRealityPose pointerCentroidPose, Vector3 grabCentroid, MixedRealityPose objectPose, Vector3 objectScale)
        {
            pointerRefDistance = GetDistanceToBody(pointerCentroidPose);

            pointerPosIndependentOfHead = pointerRefDistance != 0;

            Quaternion worldToPointerRotation = Quaternion.Inverse(pointerCentroidPose.Rotation);

            pointerLocalGrabPoint = worldToPointerRotation * (grabCentroid - pointerCentroidPose.Position);

            objectLocalGrabPoint = Quaternion.Inverse(objectPose.Rotation) * (grabCentroid - objectPose.Position);
            objectLocalGrabPoint = objectLocalGrabPoint.Div(objectScale);

            grabToObject = objectPose.Position - grabCentroid;
        }
コード例 #30
0
        /// <summary>
        /// Update the position based on input.
        /// </summary>
        /// <returns>A Vector3 describing the desired position</returns>
        public Vector3 Update(MixedRealityPose pointerCentroidPose, Quaternion objectRotation, Vector3 objectScale, bool usePointerRotation)
        {
            if (usePointerRotation)
            {
                Vector3 scaledGrabToObject    = Vector3.Scale(objectLocalGrabPoint, objectScale);
                Vector3 adjustedPointerToGrab = pointerLocalGrabPoint;
                adjustedPointerToGrab = pointerCentroidPose.Rotation * adjustedPointerToGrab;

                return(adjustedPointerToGrab - objectRotation * scaledGrabToObject + pointerCentroidPose.Position);
            }
            else
            {
                return(pointerCentroidPose.Position + (pointerCentroidPose.Rotation * pointerLocalGrabPoint + grabToObject));
            }
        }