/// <inheritdoc />
        public override void Update()
        {
            using (UpdatePerfMarker.Auto())
            {
                if (WindowsMixedRealityUtilities.SpatialCoordinateSystem == null || !eyesApiAvailable)
                {
                    return;
                }

                SpatialPointerPose pointerPose = SpatialPointerPose.TryGetAtTimestamp(WindowsMixedRealityUtilities.SpatialCoordinateSystem, PerceptionTimestampHelper.FromHistoricalTargetTime(DateTimeOffset.Now));
                if (pointerPose != null)
                {
                    var eyes = pointerPose.Eyes;
                    if (eyes != null)
                    {
                        Service?.EyeGazeProvider?.UpdateEyeTrackingStatus(this, eyes.IsCalibrationValid);

                        if (eyes.Gaze.HasValue)
                        {
                            Vector3 origin    = MixedRealityPlayspace.TransformPoint(eyes.Gaze.Value.Origin.ToUnityVector3());
                            Vector3 direction = MixedRealityPlayspace.TransformDirection(eyes.Gaze.Value.Direction.ToUnityVector3());

                            Ray newGaze = new Ray(origin, direction);

                            if (SmoothEyeTracking)
                            {
                                newGaze = SmoothGaze(newGaze);
                            }

                            Service?.EyeGazeProvider?.UpdateEyeGaze(this, newGaze, eyes.UpdateTimestamp.TargetTime.UtcDateTime);
                        }
                    }
                }
            }
        }
        /// <summary>
        /// Update the source input from the device.
        /// </summary>
        /// <param name="inputDevice">The InputDevice retrieved from the platform.</param>
        public void UpdateSourceData(InputDevice inputDevice)
        {
            using (UpdateSourceDataPerfMarker.Auto())
            {
                var lastState = TrackingState;
                LastControllerPose = CurrentControllerPose;

                // Check for position and rotation.
                IsPositionAvailable   = inputDevice.TryGetFeatureValue(CommonUsages.devicePosition, out CurrentControllerPosition);
                IsPositionApproximate = false;

                IsRotationAvailable = inputDevice.TryGetFeatureValue(CommonUsages.deviceRotation, out CurrentControllerRotation);

                // Devices are considered tracked if we receive position OR rotation data from the sensors.
                TrackingState = (IsPositionAvailable || IsRotationAvailable) ? TrackingState.Tracked : TrackingState.NotTracked;

                CurrentControllerPosition = MixedRealityPlayspace.TransformPoint(CurrentControllerPosition);
                CurrentControllerRotation = MixedRealityPlayspace.Rotation * CurrentControllerRotation;

                CurrentControllerPose.Position = CurrentControllerPosition;
                CurrentControllerPose.Rotation = CurrentControllerRotation;

                // Raise input system events if it is enabled.
                if (lastState != TrackingState)
                {
                    CoreServices.InputSystem?.RaiseSourceTrackingStateChanged(InputSource, this, TrackingState);
                }
            }
        }
        /// <summary>
        /// Update spatial pointer and spatial grip data.
        /// </summary>
        protected override void UpdatePoseData(MixedRealityInteractionMapping interactionMapping, InputDevice inputDevice)
        {
            Debug.Assert(interactionMapping.AxisType == AxisType.SixDof);

            base.UpdatePoseData(interactionMapping, inputDevice);

            // Update the interaction data source
            switch (interactionMapping.InputType)
            {
            case DeviceInputType.SpatialPointer:
                if (inputDevice.TryGetFeatureValue(WindowsMRUsages.PointerPosition, out currentPointerPosition))
                {
                    currentPointerPose.Position = MixedRealityPlayspace.TransformPoint(currentPointerPosition);
                }

                if (inputDevice.TryGetFeatureValue(WindowsMRUsages.PointerRotation, out currentPointerRotation))
                {
                    currentPointerPose.Rotation = MixedRealityPlayspace.Rotation * currentPointerRotation;
                }

                interactionMapping.PoseData = currentPointerPose;

                // If our value changed raise it.
                if (interactionMapping.Changed)
                {
                    // Raise input system event if it's enabled
                    CoreServices.InputSystem?.RaisePoseInputChanged(InputSource, ControllerHandedness, interactionMapping.MixedRealityInputAction, interactionMapping.PoseData);
                }
                break;

            default:
                return;
            }
        }
        private MixedRealityPose GetJointMixedRealityPose(WebXRJoint joint)
        {
            var position = MixedRealityPlayspace.TransformPoint(joint.Position);
            var rotation = MixedRealityPlayspace.Rotation * joint.Rotation;

            return(new MixedRealityPose(position, rotation));
        }
        /// <summary>
        /// Update the spatial pointer input from the device.
        /// </summary>
        /// <param name="interactionSourceState">The InteractionSourceState retrieved from the platform.</param>
        private void UpdatePointerData(InteractionSourceState interactionSourceState, MixedRealityInteractionMapping interactionMapping)
        {
            using (UpdatePointerDataPerfMarker.Auto())
            {
                if (interactionSourceState.source.supportsPointing)
                {
                    interactionSourceState.sourcePose.TryGetPosition(out currentPointerPosition, InteractionSourceNode.Pointer);
                    interactionSourceState.sourcePose.TryGetRotation(out currentPointerRotation, InteractionSourceNode.Pointer);

                    // We want the source to follow the Playspace, so fold in the playspace transform here to
                    // put the source pose into world space.
                    currentPointerPose.Position = MixedRealityPlayspace.TransformPoint(currentPointerPosition);
                    currentPointerPose.Rotation = MixedRealityPlayspace.Rotation * currentPointerRotation;
                }

                // Update the interaction data source
                interactionMapping.PoseData = currentPointerPose;

                // If our value changed raise it.
                if (interactionMapping.Changed)
                {
                    // Raise input system event if it's enabled
                    CoreServices.InputSystem?.RaisePoseInputChanged(InputSource, ControllerHandedness, interactionMapping.MixedRealityInputAction, currentPointerPose);
                }
            }
        }
        /// <summary>
        /// Update the hand data from the device.
        /// </summary>
        /// <param name="interactionSourceState">The InteractionSourceState retrieved from the platform.</param>
        private void UpdateHandData(InputDevice inputDevice)
        {
            using (UpdateHandDataPerfMarker.Auto())
            {
#if WINDOWS_UWP && WMR_ENABLED
                XRSubsystemHelpers.InputSubsystem?.GetCurrentSourceStates(states);

                foreach (SpatialInteractionSourceState sourceState in states)
                {
                    if (sourceState.Source.Handedness.ToMRTKHandedness() == ControllerHandedness)
                    {
                        handMeshProvider?.UpdateHandMesh(sourceState);
                        break;
                    }
                }
#endif // WINDOWS_UWP && WMR_ENABLED

                Hand hand;
                if (inputDevice.TryGetFeatureValue(CommonUsages.handData, out hand))
                {
                    foreach (HandFinger finger in handFingers)
                    {
                        if (hand.TryGetFingerBones(finger, fingerBones))
                        {
                            for (int i = 0; i < fingerBones.Count; i++)
                            {
                                TrackedHandJoint trackedHandJoint = ConvertToTrackedHandJoint(finger, i);
                                Bone             bone             = fingerBones[i];

                                Vector3    position = Vector3.zero;
                                Quaternion rotation = Quaternion.identity;

                                bool positionAvailable = bone.TryGetPosition(out position);
                                bool rotationAvailable = bone.TryGetRotation(out rotation);

                                // If either position or rotation is available, use both pieces of data given.
                                // This might result in using a zeroed out position or rotation. Most likely,
                                // either both are available or both are unavailable.
                                if (positionAvailable || rotationAvailable)
                                {
                                    // We want input sources to follow the playspace, so fold in the playspace transform here to
                                    // put the controller pose into world space.
                                    position = MixedRealityPlayspace.TransformPoint(position);
                                    rotation = MixedRealityPlayspace.Rotation * rotation;

                                    unityJointPoses[trackedHandJoint] = new MixedRealityPose(position, rotation);
                                }
                            }

                            // Unity doesn't provide a palm joint, so we synthesize one here
                            MixedRealityPose palmPose = CurrentControllerPose;
                            palmPose.Rotation *= (ControllerHandedness == Handedness.Left ? leftPalmOffset : rightPalmOffset);
                            unityJointPoses[TrackedHandJoint.Palm] = palmPose;
                        }
                    }

                    handDefinition?.UpdateHandJoints(unityJointPoses);
                }
            }
        }
        /// <summary>
        /// Update the spatial grip input from the device.
        /// </summary>
        /// <param name="interactionSourceState">The InteractionSourceState retrieved from the platform.</param>
        /// <param name="interactionMapping"></param>
        private void UpdateGripData(InteractionSourceState interactionSourceState, MixedRealityInteractionMapping interactionMapping)
        {
            switch (interactionMapping.AxisType)
            {
            case AxisType.SixDof:
            {
                interactionSourceState.sourcePose.TryGetPosition(out currentGripPosition, InteractionSourceNode.Grip);
                interactionSourceState.sourcePose.TryGetRotation(out currentGripRotation, InteractionSourceNode.Grip);

                currentGripPose.Position = MixedRealityPlayspace.TransformPoint(currentGripPosition);
                currentGripPose.Rotation = Quaternion.Euler(MixedRealityPlayspace.TransformDirection(currentGripRotation.eulerAngles));

                // Update the interaction data source
                interactionMapping.PoseData = currentGripPose;

                // If our value changed raise it.
                if (interactionMapping.Changed)
                {
                    // Raise input system Event if it enabled
                    InputSystem?.RaisePoseInputChanged(InputSource, ControllerHandedness, interactionMapping.MixedRealityInputAction, currentGripPose);
                }
            }
            break;
            }
        }
        /// <inheritdoc />
        protected override void UpdatePoseData(MixedRealityInteractionMapping interactionMapping, InputDevice inputDevice)
        {
            using (UpdatePoseDataPerfMarker.Auto())
            {
                switch (interactionMapping.InputType)
                {
                case DeviceInputType.SpatialPointer:
                    if (inputDevice.TryGetFeatureValue(CustomUsages.PointerPosition, out currentPointerPosition))
                    {
                        currentPointerPose.Position = MixedRealityPlayspace.TransformPoint(currentPointerPosition);
                    }

                    if (inputDevice.TryGetFeatureValue(CustomUsages.PointerRotation, out currentPointerRotation))
                    {
                        currentPointerPose.Rotation = MixedRealityPlayspace.Rotation * currentPointerRotation;
                    }

                    interactionMapping.PoseData = currentPointerPose;

                    // If our value changed raise it.
                    if (interactionMapping.Changed)
                    {
                        // Raise input system event if it's enabled
                        CoreServices.InputSystem?.RaisePoseInputChanged(InputSource, ControllerHandedness, interactionMapping.MixedRealityInputAction, interactionMapping.PoseData);
                    }
                    break;

                default:
                    base.UpdatePoseData(interactionMapping, inputDevice);
                    break;
                }
            }
        }
        /// <summary>
        /// Update the "Controller" input from the device
        /// </summary>
        protected void UpdateControllerData(XRNodeState state)
        {
            Profiler.BeginSample("[MRTK] GenericOpenVRController.UpdateControllerData");

            var lastState = TrackingState;

            LastControllerPose = CurrentControllerPose;

            if (nodeType == XRNode.LeftHand || nodeType == XRNode.RightHand)
            {
                // The source is either a hand or a controller that supports pointing.
                // We can now check for position and rotation.
                IsPositionAvailable   = state.TryGetPosition(out CurrentControllerPosition);
                IsPositionApproximate = false;

                IsRotationAvailable = state.TryGetRotation(out CurrentControllerRotation);

                // Devices are considered tracked if we receive position OR rotation data from the sensors.
                TrackingState = (IsPositionAvailable || IsRotationAvailable) ? TrackingState.Tracked : TrackingState.NotTracked;

                CurrentControllerPosition = MixedRealityPlayspace.TransformPoint(CurrentControllerPosition);
                CurrentControllerRotation = MixedRealityPlayspace.Rotation * CurrentControllerRotation;
            }
            else
            {
                // The input source does not support tracking.
                TrackingState = TrackingState.NotApplicable;
            }

            CurrentControllerPose.Position = CurrentControllerPosition;
            CurrentControllerPose.Rotation = CurrentControllerRotation;

            // Raise input system events if it is enabled.
            if (lastState != TrackingState)
            {
                CoreServices.InputSystem?.RaiseSourceTrackingStateChanged(InputSource, this, TrackingState);
            }

            if (TrackingState == TrackingState.Tracked && LastControllerPose != CurrentControllerPose)
            {
                if (IsPositionAvailable && IsRotationAvailable)
                {
                    CoreServices.InputSystem?.RaiseSourcePoseChanged(InputSource, this, CurrentControllerPose);
                }
                else if (IsPositionAvailable && !IsRotationAvailable)
                {
                    CoreServices.InputSystem?.RaiseSourcePositionChanged(InputSource, this, CurrentControllerPosition);
                }
                else if (!IsPositionAvailable && IsRotationAvailable)
                {
                    CoreServices.InputSystem?.RaiseSourceRotationChanged(InputSource, this, CurrentControllerRotation);
                }
            }

            Profiler.EndSample(); // UpdateControllerData
        }
Beispiel #10
0
        /// <summary>
        /// Update the hand data from the device.
        /// </summary>
        /// <param name="interactionSourceState">The InteractionSourceState retrieved from the platform.</param>
        private void UpdateHandData(InteractionSourceState interactionSourceState)
        {
#if WINDOWS_UWP || DOTNETWINRT_PRESENT
            // Articulated hand support is only present in the 18362 version and beyond Windows
            // SDK (which contains the V8 drop of the Universal API Contract). In particular,
            // the HandPose related APIs are only present on this version and above.
            if (!articulatedHandApiAvailable)
            {
                return;
            }

            Profiler.BeginSample("[MRTK] WindowsMixedRealityArticulatedHand.UpdateHandData");

            PerceptionTimestamp perceptionTimestamp = PerceptionTimestampHelper.FromHistoricalTargetTime(DateTimeOffset.Now);
            IReadOnlyList <SpatialInteractionSourceState> sources = SpatialInteractionManager?.GetDetectedSourcesAtTimestamp(perceptionTimestamp);
            foreach (SpatialInteractionSourceState sourceState in sources)
            {
                if (sourceState.Source.Id.Equals(interactionSourceState.source.id))
                {
#if WINDOWS_UWP
                    handDefinition?.UpdateHandMesh(sourceState);
#endif // WINDOWS_UWP

                    HandPose handPose = sourceState.TryGetHandPose();

                    if (handPose != null && handPose.TryGetJoints(WindowsMixedRealityUtilities.SpatialCoordinateSystem, jointIndices, jointPoses))
                    {
                        for (int i = 0; i < jointPoses.Length; i++)
                        {
                            Vector3    jointPosition    = jointPoses[i].Position.ToUnityVector3();
                            Quaternion jointOrientation = jointPoses[i].Orientation.ToUnityQuaternion();

                            // We want the joints to follow the playspace, so fold in the playspace transform here to
                            // put the joint pose into world space.
                            jointPosition    = MixedRealityPlayspace.TransformPoint(jointPosition);
                            jointOrientation = MixedRealityPlayspace.Rotation * jointOrientation;

                            TrackedHandJoint handJoint = ConvertHandJointKindToTrackedHandJoint(jointIndices[i]);

                            if (handJoint == TrackedHandJoint.IndexTip)
                            {
                                lastIndexTipRadius = jointPoses[i].Radius;
                            }

                            unityJointPoses[handJoint] = new MixedRealityPose(jointPosition, jointOrientation);
                        }

                        handDefinition?.UpdateHandJoints(unityJointPoses);
                    }
                    break;
                }
            }

            Profiler.EndSample(); // UpdateHandData
#endif // WINDOWS_UWP || DOTNETWINRT_PRESENT
        }
Beispiel #11
0
        /// <summary>
        /// Update the hand data from the device.
        /// </summary>
        /// <param name="interactionSourceState">The InteractionSourceState retrieved from the platform.</param>
        private void UpdateHandData(InputDevice inputDevice)
        {
            Profiler.BeginSample("[MRTK] WindowsMixedRealityXRSDKArticulatdHand.UpdateHandData");

#if WINDOWS_UWP && WMR_ENABLED
            XRSDKSubsystemHelpers.InputSubsystem?.GetCurrentSourceStates(states);

            foreach (SpatialInteractionSourceState sourceState in states)
            {
                if (sourceState.Source.Handedness.ToMRTKHandedness() == ControllerHandedness)
                {
                    handDefinition?.UpdateHandMesh(sourceState);
                    break;
                }
            }
#endif // WINDOWS_UWP && WMR_ENABLED

            Hand hand;
            if (inputDevice.TryGetFeatureValue(CommonUsages.handData, out hand))
            {
                foreach (HandFinger finger in handFingers)
                {
                    if (hand.TryGetFingerBones(finger, fingerBones))
                    {
                        for (int i = 0; i < fingerBones.Count; i++)
                        {
                            TrackedHandJoint trackedHandJoint = ConvertToTrackedHandJoint(finger, i);
                            Bone             bone             = fingerBones[i];

                            Vector3    position = Vector3.zero;
                            Quaternion rotation = Quaternion.identity;

                            if (bone.TryGetPosition(out position) || bone.TryGetRotation(out rotation))
                            {
                                // We want input sources to follow the playspace, so fold in the playspace transform here to
                                // put the controller pose into world space.
                                position = MixedRealityPlayspace.TransformPoint(position);
                                rotation = MixedRealityPlayspace.Rotation * rotation;

                                unityJointPoses[trackedHandJoint] = new MixedRealityPose(position, rotation);
                            }
                        }

                        // Unity doesn't provide a palm joint, so we synthesize one here
                        MixedRealityPose palmPose = CurrentControllerPose;
                        palmPose.Rotation *= (ControllerHandedness == Handedness.Left ? leftPalmOffset : rightPalmOffset);
                        unityJointPoses[TrackedHandJoint.Palm] = palmPose;
                    }
                }

                handDefinition?.UpdateHandJoints(unityJointPoses);
            }

            Profiler.EndSample(); // UpdateHandData
        }
Beispiel #12
0
        /// <summary>
        /// Update the hand data from the device.
        /// </summary>
        /// <param name="interactionSourceState">The InteractionSourceState retrieved from the platform.</param>
        private void UpdateHandData(InteractionSourceState interactionSourceState)
        {
#if WINDOWS_UWP || DOTNETWINRT_PRESENT
            using (UpdateHandDataPerfMarker.Auto())
            {
                // Articulated hand support is only present in the 18362 version and beyond Windows
                // SDK (which contains the V8 drop of the Universal API Contract). In particular,
                // the HandPose related APIs are only present on this version and above.
                if (!articulatedHandApiAvailable)
                {
                    return;
                }

                SpatialInteractionSourceState sourceState = interactionSourceState.source.GetSpatialInteractionSourceState();

                if (sourceState == null)
                {
                    return;
                }

#if WINDOWS_UWP
                handMeshProvider?.UpdateHandMesh(sourceState);
#endif // WINDOWS_UWP

                HandPose handPose = sourceState.TryGetHandPose();

                if (handPose != null && handPose.TryGetJoints(WindowsMixedRealityUtilities.SpatialCoordinateSystem, jointIndices, jointPoses))
                {
                    for (int i = 0; i < jointPoses.Length; i++)
                    {
                        Vector3    position = jointPoses[i].Position.ToUnityVector3();
                        Quaternion rotation = jointPoses[i].Orientation.ToUnityQuaternion();

                        // We want the joints to follow the playspace, so fold in the playspace transform here to
                        // put the joint pose into world space.
                        position = MixedRealityPlayspace.TransformPoint(position);
                        rotation = MixedRealityPlayspace.Rotation * rotation;

                        TrackedHandJoint trackedHandJoint = ConvertHandJointKindToTrackedHandJoint(jointIndices[i]);

                        if (trackedHandJoint == TrackedHandJoint.IndexTip)
                        {
                            lastIndexTipRadius = jointPoses[i].Radius;
                        }

                        unityJointPoses[trackedHandJoint] = new MixedRealityPose(position, rotation);
                    }

                    handDefinition?.UpdateHandJoints(unityJointPoses);
                }
            }
#endif // WINDOWS_UWP || DOTNETWINRT_PRESENT
        }
        protected virtual void UpdateSixDofData(InputDevice inputDevice)
        {
            var lastState = TrackingState;

            LastControllerPose = CurrentControllerPose;

            // Check for position and rotation.
            IsPositionAvailable   = inputDevice.TryGetFeatureValue(CommonUsages.devicePosition, out CurrentControllerPosition);
            IsPositionApproximate = false;

            IsRotationAvailable = inputDevice.TryGetFeatureValue(CommonUsages.deviceRotation, out CurrentControllerRotation);

            // Devices are considered tracked if we receive position OR rotation data from the sensors.
            TrackingState = (IsPositionAvailable || IsRotationAvailable) ? TrackingState.Tracked : TrackingState.NotTracked;

            CurrentControllerPosition = MixedRealityPlayspace.TransformPoint(CurrentControllerPosition);
            CurrentControllerRotation = MixedRealityPlayspace.Rotation * CurrentControllerRotation;

            CurrentControllerPose.Position = CurrentControllerPosition;
            CurrentControllerPose.Rotation = CurrentControllerRotation;

            // Raise input system events if it is enabled.
            if (lastState != TrackingState)
            {
                CoreServices.InputSystem?.RaiseSourceTrackingStateChanged(InputSource, this, TrackingState);
            }

            if (TrackingState == TrackingState.Tracked && LastControllerPose != CurrentControllerPose)
            {
                if (IsPositionAvailable && IsRotationAvailable)
                {
                    CoreServices.InputSystem?.RaiseSourcePoseChanged(InputSource, this, CurrentControllerPose);
                }
                else if (IsPositionAvailable && !IsRotationAvailable)
                {
                    CoreServices.InputSystem?.RaiseSourcePositionChanged(InputSource, this, CurrentControllerPosition);
                }
                else if (!IsPositionAvailable && IsRotationAvailable)
                {
                    CoreServices.InputSystem?.RaiseSourceRotationChanged(InputSource, this, CurrentControllerRotation);
                }
            }

            for (int i = 0; i < Interactions?.Length; i++)
            {
                switch (Interactions[i].AxisType)
                {
                case AxisType.SixDof:
                    UpdatePoseData(Interactions[i], inputDevice);
                    break;
                }
            }
        }
        /// <summary>
        /// Update the hand data from the device.
        /// </summary>
        /// <param name="interactionSourceState">The InteractionSourceState retrieved from the platform.</param>
        private void UpdateHandData(InputDevice inputDevice)
        {
            using (UpdateHandDataPerfMarker.Auto())
            {
                handMeshProvider?.UpdateHandMesh();

#if MSFT_OPENXR && (UNITY_STANDALONE_WIN || UNITY_WSA)
                if (handTracker != null && handTracker.TryLocateHandJoints(FrameTime.OnUpdate, locations))
                {
                    foreach (HandJoint handJoint in HandJoints)
                    {
                        HandJointLocation handJointLocation = locations[(int)handJoint];

                        // We want input sources to follow the playspace, so fold in the playspace transform here to
                        // put the pose into world space.
#if MSFT_OPENXR_0_2_0_OR_NEWER
                        Vector3    position = MixedRealityPlayspace.TransformPoint(handJointLocation.Pose.position);
                        Quaternion rotation = MixedRealityPlayspace.Rotation * handJointLocation.Pose.rotation;
#else
                        Vector3    position = MixedRealityPlayspace.TransformPoint(handJointLocation.Position);
                        Quaternion rotation = MixedRealityPlayspace.Rotation * handJointLocation.Rotation;
#endif // MSFT_OPENXR_0_2_0_OR_NEWER

                        unityJointPoses[ConvertToTrackedHandJoint(handJoint)] = new MixedRealityPose(position, rotation);
                    }
#else
                if (inputDevice.TryGetFeatureValue(CommonUsages.handData, out Hand hand))
                {
                    foreach (HandFinger finger in handFingers)
                    {
                        if (hand.TryGetRootBone(out Bone rootBone))
                        {
                            ReadHandJoint(TrackedHandJoint.Wrist, rootBone);
                        }

                        if (hand.TryGetFingerBones(finger, fingerBones))
                        {
                            for (int i = 0; i < fingerBones.Count; i++)
                            {
                                ReadHandJoint(ConvertToTrackedHandJoint(finger, i), fingerBones[i]);
                            }
                        }
                    }
#endif // MSFT_OPENXR && (UNITY_STANDALONE_WIN || UNITY_WSA)

                    handDefinition?.UpdateHandJoints(unityJointPoses);
                }
            }
        }
        private void ReadHandJoint(TrackedHandJoint trackedHandJoint, Bone bone)
        {
            bool positionAvailable = bone.TryGetPosition(out Vector3 position);
            bool rotationAvailable = bone.TryGetRotation(out Quaternion rotation);

            if (positionAvailable && rotationAvailable)
            {
                // We want input sources to follow the playspace, so fold in the playspace transform here to
                // put the pose into world space.
                position = MixedRealityPlayspace.TransformPoint(position);
                rotation = MixedRealityPlayspace.Rotation * rotation;

                unityJointPoses[trackedHandJoint] = new MixedRealityPose(position, rotation);
            }
        }
Beispiel #16
0
        /// <inheritdoc />
        public override void Update()
        {
            using (UpdatePerfMarker.Auto())
            {
                if (!eyeTrackingDevice.isValid)
                {
                    InputDevices.GetDevicesWithCharacteristics(InputDeviceCharacteristics.EyeTracking, InputDeviceList);
                    if (InputDeviceList.Count > 0)
                    {
                        eyeTrackingDevice = InputDeviceList[0];
                    }

                    if (!eyeTrackingDevice.isValid)
                    {
                        Service?.EyeGazeProvider?.UpdateEyeTrackingStatus(this, false);
                        return;
                    }
                }

#if UNITY_OPENXR
                if (eyeTrackingDevice.TryGetFeatureValue(CommonUsages.isTracked, out bool gazeAvailable))
                {
                    Service?.EyeGazeProvider?.UpdateEyeTrackingStatus(this, gazeAvailable);

                    if (gazeAvailable &&
                        eyeTrackingDevice.TryGetFeatureValue(EyeTrackingUsages.gazePosition, out Vector3 eyeGazePosition) &&
                        eyeTrackingDevice.TryGetFeatureValue(EyeTrackingUsages.gazeRotation, out Quaternion eyeGazeRotation))
                    {
                        Vector3 worldPosition = MixedRealityPlayspace.TransformPoint(eyeGazePosition);
                        Vector3 worldRotation = MixedRealityPlayspace.TransformDirection(eyeGazeRotation * Vector3.forward);

                        Ray newGaze = new Ray(worldPosition, worldRotation);

                        if (SmoothEyeTracking)
                        {
                            newGaze = SmoothGaze(newGaze);
                        }

                        Service?.EyeGazeProvider?.UpdateEyeGaze(this, newGaze, DateTime.UtcNow);
                    }
                }
                else
                {
                    Service?.EyeGazeProvider?.UpdateEyeTrackingStatus(this, false);
                }
#endif // UNITY_OPENXR
            }
        }
Beispiel #17
0
        public void UpdateHandJoints(Hand hand, ref MixedRealityPose[] jointPoses)
        {
#if MSFT_OPENXR && (UNITY_STANDALONE_WIN || UNITY_WSA)
            if (handTracker != null && handTracker.TryLocateHandJoints(FrameTime.OnUpdate, locations))
            {
                if (jointPoses == null)
                {
                    jointPoses = new MixedRealityPose[ArticulatedHandPose.JointCount];
                }

                foreach (HandJoint handJoint in HandJoints)
                {
                    HandJointLocation handJointLocation = locations[(int)handJoint];

                    // We want input sources to follow the playspace, so fold in the playspace transform here to
                    // put the pose into world space.
                    Vector3    position = MixedRealityPlayspace.TransformPoint(handJointLocation.Pose.position);
                    Quaternion rotation = MixedRealityPlayspace.Rotation * handJointLocation.Pose.rotation;

                    jointPoses[ConvertToArrayIndex(handJoint)] = new MixedRealityPose(position, rotation);
                }
#else
            if (jointPoses == null)
            {
                jointPoses = new MixedRealityPose[ArticulatedHandPose.JointCount];
            }

            foreach (HandFinger finger in HandFingers)
            {
                if (hand.TryGetRootBone(out Bone rootBone) && TryReadHandJoint(rootBone, out MixedRealityPose rootPose))
                {
                    jointPoses[(int)TrackedHandJoint.Palm] = rootPose;
                }

                if (hand.TryGetFingerBones(finger, fingerBones))
                {
                    for (int i = 0; i < fingerBones.Count; i++)
                    {
                        if (TryReadHandJoint(fingerBones[i], out MixedRealityPose pose))
                        {
                            jointPoses[ConvertToArrayIndex(finger, i)] = pose;
                        }
                    }
                }
#endif // MSFT_OPENXR && (UNITY_STANDALONE_WIN || UNITY_WSA)
            }
        }
Beispiel #18
0
        internal void UpdateController(MLInputController controller)
        {
            if (controller.Type == MLInputControllerType.Control)
            {
                CurrentControllerPosition = controller.Position;
                CurrentControllerRotation = controller.Orientation;

                // Update the interaction data source
                CurrentControllerPosition = MixedRealityPlayspace.TransformPoint(CurrentControllerPosition);
                CurrentControllerRotation = MixedRealityPlayspace.Rotation * CurrentControllerRotation;

                CurrentControllerPose.Position = CurrentControllerPosition;
                CurrentControllerPose.Rotation = CurrentControllerRotation;

                InputSystem?.RaiseSourcePoseChanged(InputSource, this, CurrentControllerPose);


                var interactionMapping = Interactions[0];
                interactionMapping.PoseData = CurrentControllerPose;
                if (interactionMapping.Changed)
                {
                    // Raise input system Event if it enabled
                    InputSystem?.RaisePoseInputChanged(InputSource, ControllerHandedness, interactionMapping.MixedRealityInputAction, interactionMapping.PoseData);
                }

                // Fire Select event
                var trigger = controller.TriggerValue;
                if (triggerDown)
                {
                    if (trigger < triggerUpThreshold)
                    {
                        triggerDown = false;
                        RaiseOnSelectUp();
                    }
                }
                else
                {
                    if (trigger > triggerDownThreshold)
                    {
                        triggerDown = true;
                        RaiseOnSelectDown();
                    }
                }
            }
        }
        private bool TryReadHandJoint(Bone bone, out MixedRealityPose pose)
        {
            bool positionAvailable = bone.TryGetPosition(out Vector3 position);
            bool rotationAvailable = bone.TryGetRotation(out Quaternion rotation);

            if (positionAvailable && rotationAvailable)
            {
                // We want input sources to follow the playspace, so fold in the playspace transform here to
                // put the pose into world space.
                position = MixedRealityPlayspace.TransformPoint(position);
                rotation = MixedRealityPlayspace.Rotation * rotation;

                pose = new MixedRealityPose(position, rotation);
                return(true);
            }

            pose = MixedRealityPose.ZeroIdentity;
            return(false);
        }
        /// <inheritdoc/>
        public bool TryGetRectangularBoundsParams(out Vector2 center, out float angle, out float width, out float height)
        {
            if (rectangularBounds == null || !rectangularBounds.IsValid)
            {
                center = EdgeUtilities.InvalidPoint;
                angle  = 0f;
                width  = 0f;
                height = 0f;
                return(false);
            }

            // Handle the user teleporting (boundary moves with them).
            Vector3 transformedCenter = MixedRealityPlayspace.TransformPoint(
                new Vector3(rectangularBounds.Center.x, 0f, rectangularBounds.Center.y));

            center = new Vector2(transformedCenter.x, transformedCenter.z);
            angle  = rectangularBounds.Angle;
            width  = rectangularBounds.Width;
            height = rectangularBounds.Height;
            return(true);
        }
        public void UpdateHandJoints(InputDevice inputDevice, Dictionary <TrackedHandJoint, MixedRealityPose> jointPoses)
        {
#if MSFT_OPENXR && (UNITY_STANDALONE_WIN || UNITY_WSA)
            if (handTracker != null && handTracker.TryLocateHandJoints(FrameTime.OnUpdate, locations))
            {
                foreach (HandJoint handJoint in HandJoints)
                {
                    HandJointLocation handJointLocation = locations[(int)handJoint];

                    // We want input sources to follow the playspace, so fold in the playspace transform here to
                    // put the pose into world space.
                    Vector3    position = MixedRealityPlayspace.TransformPoint(handJointLocation.Pose.position);
                    Quaternion rotation = MixedRealityPlayspace.Rotation * handJointLocation.Pose.rotation;

                    jointPoses[ConvertToTrackedHandJoint(handJoint)] = new MixedRealityPose(position, rotation);
                }
#else
            if (inputDevice.TryGetFeatureValue(CommonUsages.handData, out Hand hand))
            {
                foreach (HandFinger finger in HandFingers)
                {
                    if (hand.TryGetRootBone(out Bone rootBone) && TryReadHandJoint(rootBone, out MixedRealityPose rootPose))
                    {
                        jointPoses[TrackedHandJoint.Palm] = rootPose;
                    }

                    if (hand.TryGetFingerBones(finger, fingerBones))
                    {
                        for (int i = 0; i < fingerBones.Count; i++)
                        {
                            if (TryReadHandJoint(fingerBones[i], out MixedRealityPose pose))
                            {
                                jointPoses[ConvertToTrackedHandJoint(finger, i)] = pose;
                            }
                        }
                    }
                }
#endif // MSFT_OPENXR && (UNITY_STANDALONE_WIN || UNITY_WSA)
            }
        }
        public void UpdateHandMesh(SpatialInteractionSourceState sourceState)
        {
            using (UpdateHandMeshPerfMarker.Auto())
            {
                MixedRealityHandTrackingProfile handTrackingProfile = null;
                MixedRealityInputSystemProfile  inputSystemProfile  = CoreServices.InputSystem?.InputSystemProfile;
                if (inputSystemProfile != null)
                {
                    handTrackingProfile = inputSystemProfile.HandTrackingProfile;
                }

                if (handTrackingProfile == null || !handTrackingProfile.EnableHandMeshVisualization)
                {
                    // If hand mesh visualization is disabled make sure to destroy our hand mesh observer if it has already been created
                    if (handMeshObserver != null)
                    {
                        // Notify that hand mesh has been updated (cleared)
                        HandMeshInfo handMeshInfo = new HandMeshInfo();
                        CoreServices.InputSystem?.RaiseHandMeshUpdated(InputSource, Handedness, handMeshInfo);
                        hasRequestedHandMeshObserver = false;
                        handMeshObserver             = null;
                    }
                    return;
                }

                HandPose handPose = sourceState.TryGetHandPose();

                // Accessing the hand mesh data involves copying quite a bit of data, so only do it if application requests it.
                if (handMeshObserver == null && !hasRequestedHandMeshObserver)
                {
                    SetHandMeshObserver(sourceState);
                    hasRequestedHandMeshObserver = true;
                }

                if (handMeshObserver != null && handPose != null)
                {
                    if (handMeshTriangleIndices == null)
                    {
                        handMeshTriangleIndices      = new ushort[handMeshObserver.TriangleIndexCount];
                        handMeshTriangleIndicesUnity = new int[handMeshObserver.TriangleIndexCount];
                        handMeshObserver.GetTriangleIndices(handMeshTriangleIndices);

                        Array.Copy(handMeshTriangleIndices, handMeshTriangleIndicesUnity, (int)handMeshObserver.TriangleIndexCount);

                        // Compute neutral pose
                        Vector3[]           neutralPoseVertices     = new Vector3[handMeshObserver.VertexCount];
                        HandPose            neutralPose             = handMeshObserver.NeutralPose;
                        var                 neutralVertexAndNormals = new HandMeshVertex[handMeshObserver.VertexCount];
                        HandMeshVertexState handMeshVertexState     = handMeshObserver.GetVertexStateForPose(neutralPose);
                        handMeshVertexState.GetVertices(neutralVertexAndNormals);

                        Parallel.For(0, handMeshObserver.VertexCount, i =>
                        {
                            neutralVertexAndNormals[i].Position.ConvertToUnityVector3(ref neutralPoseVertices[i]);
                        });

                        // Compute UV mapping
                        InitializeUVs(neutralPoseVertices);
                    }

                    if (vertexAndNormals == null)
                    {
                        vertexAndNormals      = new HandMeshVertex[handMeshObserver.VertexCount];
                        handMeshVerticesUnity = new Vector3[handMeshObserver.VertexCount];
                        handMeshNormalsUnity  = new Vector3[handMeshObserver.VertexCount];
                    }

                    if (vertexAndNormals != null && handMeshTriangleIndices != null)
                    {
                        var handMeshVertexState = handMeshObserver.GetVertexStateForPose(handPose);
                        handMeshVertexState.GetVertices(vertexAndNormals);

                        var meshTransform = handMeshVertexState.CoordinateSystem.TryGetTransformTo(WindowsMixedRealityUtilities.SpatialCoordinateSystem);
                        if (meshTransform.HasValue)
                        {
                            System.Numerics.Matrix4x4.Decompose(meshTransform.Value,
                                                                out System.Numerics.Vector3 scale,
                                                                out System.Numerics.Quaternion rotation,
                                                                out System.Numerics.Vector3 translation);

                            Parallel.For(0, handMeshObserver.VertexCount, i =>
                            {
                                vertexAndNormals[i].Position.ConvertToUnityVector3(ref handMeshVerticesUnity[i]);
                                vertexAndNormals[i].Normal.ConvertToUnityVector3(ref handMeshNormalsUnity[i]);
                            });

                            /// Hands should follow the Playspace to accommodate teleporting, so fold in the Playspace transform.
                            Vector3    positionUnity = MixedRealityPlayspace.TransformPoint(translation.ToUnityVector3());
                            Quaternion rotationUnity = MixedRealityPlayspace.Rotation * rotation.ToUnityQuaternion();

                            HandMeshInfo handMeshInfo = new HandMeshInfo
                            {
                                vertices  = handMeshVerticesUnity,
                                normals   = handMeshNormalsUnity,
                                triangles = handMeshTriangleIndicesUnity,
                                uvs       = handMeshUVsUnity,
                                position  = positionUnity,
                                rotation  = rotationUnity
                            };

                            CoreServices.InputSystem?.RaiseHandMeshUpdated(InputSource, Handedness, handMeshInfo);
                        }
                    }
                }
            }
        }
        public void UpdateController()
        {
            if (!Enabled)
            {
                return;
            }

            // hand pose
            var lastState = TrackingState;

            TrackingState = (hand.IsTracked) ? TrackingState.Tracked : TrackingState.NotTracked;
            if (lastState != TrackingState)
            {
                CoreServices.InputSystem?.RaiseSourceTrackingStateChanged(InputSource, this, TrackingState);
            }
            if (TrackingState == TrackingState.Tracked)
            {
                var pose = new MixedRealityPose();
                pose.Position = MixedRealityPlayspace.TransformPoint(hand.transform.position);
                pose.Rotation = MixedRealityPlayspace.Rotation * hand.transform.rotation;
                CoreServices.InputSystem?.RaiseSourcePoseChanged(InputSource, this, pose);
            }

            // hand interaction
            if (Interactions == null)
            {
                Debug.LogError($"No interaction configuration for Oculus Quest Hand {ControllerHandedness} Source");
                Enabled = false;
            }
            if (TrackingState == TrackingState.Tracked)
            {
                for (int i = 0; i < Interactions?.Length; i++)
                {
                    var interaction = Interactions[i];
                    switch (interaction.InputType)
                    {
                    case DeviceInputType.None:
                        break;

                    case DeviceInputType.SpatialPointer:
                        // hand pointer
                        var pointer = new MixedRealityPose();
                        pointer.Position     = MixedRealityPlayspace.TransformPoint(hand.PointerPose.position);
                        pointer.Rotation     = MixedRealityPlayspace.Rotation * hand.PointerPose.rotation;
                        interaction.PoseData = pointer;
                        if (interaction.Changed)
                        {
                            CoreServices.InputSystem?.RaisePoseInputChanged(InputSource, ControllerHandedness, interaction.MixedRealityInputAction, pointer);
                        }
                        break;

                    case DeviceInputType.SpatialGrip:
                        if (interaction.AxisType == AxisType.SixDof)
                        {
                            var grip = new MixedRealityPose();
                            grip.Position        = MixedRealityPlayspace.TransformPoint(hand.transform.position);
                            grip.Rotation        = MixedRealityPlayspace.Rotation * hand.transform.rotation;
                            interaction.PoseData = grip;
                            if (interaction.Changed)
                            {
                                CoreServices.InputSystem?.RaisePoseInputChanged(InputSource, ControllerHandedness, interaction.MixedRealityInputAction, grip);
                            }
                        }
                        break;

                    case DeviceInputType.Select:
                    case DeviceInputType.TriggerPress:
                        interaction.BoolData = hand.GetFingerIsPinching(OVRHand.HandFinger.Index);
                        if (interaction.Changed)
                        {
                            if (interaction.BoolData)
                            {
                                CoreServices.InputSystem?.RaiseOnInputDown(InputSource, ControllerHandedness, interaction.MixedRealityInputAction);
                            }
                            else
                            {
                                CoreServices.InputSystem?.RaiseOnInputUp(InputSource, ControllerHandedness, interaction.MixedRealityInputAction);
                            }
                        }
                        break;

                    case DeviceInputType.IndexFinger:
                        if (jointPose.ContainsKey(TrackedHandJoint.IndexTip))
                        {
                            var indexFinger = jointPose[TrackedHandJoint.IndexTip];
                            interaction.PoseData = indexFinger;
                            if (interaction.Changed)
                            {
                                CoreServices.InputSystem?.RaisePoseInputChanged(InputSource, ControllerHandedness, interaction.MixedRealityInputAction, indexFinger);
                            }
                        }
                        break;
                    }
                }
            }

            // hand joint
            if (TrackingState == TrackingState.Tracked)
            {
                for (int i = 0; i < skeleton.Bones.Count; i++)
                {
                    var bones     = skeleton.Bones[i];
                    var handJoint = convertBoneIdToTrackedHandJoint(bones.Id);
                    var position  = MixedRealityPlayspace.TransformPoint(bones.Transform.position);
                    var rotation  = MixedRealityPlayspace.Rotation * bones.Transform.rotation;
                    if (jointPose.ContainsKey(handJoint))
                    {
                        jointPose[handJoint] = new MixedRealityPose(position, rotation);
                    }
                    else
                    {
                        jointPose.Add(handJoint, new MixedRealityPose(position, rotation));
                    }
                }
                CoreServices.InputSystem?.RaiseHandJointsUpdated(InputSource, ControllerHandedness, jointPose);
            }
        }
        public void UpdateController(WebXRInputSource controller)
        {
            if (!Enabled)
            {
                return;
            }

            IsPositionAvailable = IsRotationAvailable = controller.Hand.Available;


            jointPoses[TrackedHandJoint.Wrist] = GetJointMixedRealityPose(controller.Hand.Joints[WebXRHand.WRIST]);


            for (int i = WebXRHand.THUMB_METACARPAL; i < WebXRHand.JOINT_COUNT; i++)
            {
                var joint = controller.Hand.Joints[i];

                jointPoses[(TrackedHandJoint)(i + 2)] = GetJointMixedRealityPose(joint);
            }

            jointPoses[TrackedHandJoint.Palm] = new MixedRealityPose((jointPoses[TrackedHandJoint.MiddleMetacarpal].Position + jointPoses[TrackedHandJoint.MiddleMetacarpal].Position) / 2, jointPoses[TrackedHandJoint.MiddleMetacarpal].Rotation);

            var indexPose = jointPoses[TrackedHandJoint.IndexTip];

            bool             isSelecting;
            MixedRealityPose spatialPointerPose;


            if (controller.IsPositionTracked)
            {
                isSelecting        = controller.Selected;
                spatialPointerPose = new MixedRealityPose(MixedRealityPlayspace.TransformPoint(controller.Position), MixedRealityPlayspace.Rotation * controller.Rotation);
            }
            else
            {
                // Is selecting if thumb tip and index tip are close
                isSelecting = Vector3.Distance(controller.Hand.Joints[WebXRHand.THUMB_PHALANX_TIP].Position, controller.Hand.Joints[WebXRHand.INDEX_PHALANX_TIP].Position) < 0.04;

                // The hand ray starts from the middle of thumb tip and index tip
                HandRay.Update((controller.Hand.Joints[WebXRHand.THUMB_PHALANX_TIP].Position + controller.Hand.Joints[WebXRHand.INDEX_PHALANX_TIP].Position) / 2, new Vector3(0.3f, -0.4f, 0.9f), CameraCache.Main.transform, ControllerHandedness);

                Ray ray = HandRay.Ray;

                spatialPointerPose = new MixedRealityPose(ray.origin, Quaternion.LookRotation(ray.direction));
            }

            CoreServices.InputSystem?.RaiseSourcePoseChanged(InputSource, this, spatialPointerPose);

            CoreServices.InputSystem?.RaiseHandJointsUpdated(InputSource, ControllerHandedness, jointPoses);

            UpdateVelocity();

            for (int i = 0; i < Interactions?.Length; i++)
            {
                switch (Interactions[i].InputType)
                {
                case DeviceInputType.SpatialPointer:
                    Interactions[i].PoseData = spatialPointerPose;
                    if (Interactions[i].Changed)
                    {
                        CoreServices.InputSystem?.RaisePoseInputChanged(InputSource, ControllerHandedness, Interactions[i].MixedRealityInputAction, Interactions[i].PoseData);
                    }
                    break;

                case DeviceInputType.SpatialGrip:
                    Interactions[i].PoseData = indexPose;
                    if (Interactions[i].Changed)
                    {
                        CoreServices.InputSystem?.RaisePoseInputChanged(InputSource, ControllerHandedness, Interactions[i].MixedRealityInputAction, Interactions[i].PoseData);
                    }
                    break;

                case DeviceInputType.Select:
                    Interactions[i].BoolData = isSelecting || controller.TargetRayMode == WebXRTargetRayModes.Screen;

                    if (Interactions[i].Changed)
                    {
                        if (Interactions[i].BoolData)
                        {
                            CoreServices.InputSystem?.RaiseOnInputDown(InputSource, ControllerHandedness, Interactions[i].MixedRealityInputAction);
                        }
                        else
                        {
                            CoreServices.InputSystem?.RaiseOnInputUp(InputSource, ControllerHandedness, Interactions[i].MixedRealityInputAction);
                        }
                    }
                    break;

                case DeviceInputType.TriggerPress:
                    Interactions[i].BoolData = isSelecting;

                    if (Interactions[i].Changed)
                    {
                        if (Interactions[i].BoolData)
                        {
                            CoreServices.InputSystem?.RaiseOnInputDown(InputSource, ControllerHandedness, Interactions[i].MixedRealityInputAction);
                        }
                        else
                        {
                            CoreServices.InputSystem?.RaiseOnInputUp(InputSource, ControllerHandedness, Interactions[i].MixedRealityInputAction);
                        }
                    }
                    break;

                case DeviceInputType.IndexFinger:
                    Interactions[i].PoseData = indexPose;
                    if (Interactions[i].Changed)
                    {
                        CoreServices.InputSystem?.RaisePoseInputChanged(InputSource, ControllerHandedness, Interactions[i].MixedRealityInputAction, Interactions[i].PoseData);
                    }
                    break;

                case DeviceInputType.ThumbStick:
                    HandDefinition?.UpdateCurrentTeleportPose(Interactions[i]);
                    break;
                }
            }
        }
 /// <summary>
 /// Compute the world position corresponding to the input local position in playspace.
 /// </summary>
 /// <param name="localPosition">The local position.</param>
 /// <returns>The world position.</returns>
 public static Vector3 PositionRelativeToPlayspace(Vector3 localPosition)
 {
     return(MixedRealityPlayspace.TransformPoint(localPosition));
 }
        public void UpdateController(WebXRInputSource controller)
        {
            if (!Enabled)
            {
                return;
            }

            var position = MixedRealityPlayspace.TransformPoint(controller.Position);
            var rotation = MixedRealityPlayspace.Rotation * controller.Rotation;

            var pose = new MixedRealityPose(position, rotation);

            for (int i = 0; i < Interactions?.Length; i++)
            {
                switch (Interactions[i].InputType)
                {
                case DeviceInputType.SpatialPointer:
                    Interactions[i].PoseData = pose;
                    if (Interactions[i].Changed)
                    {
                        CoreServices.InputSystem?.RaisePoseInputChanged(InputSource, ControllerHandedness, Interactions[i].MixedRealityInputAction, pose);
                    }
                    break;

                case DeviceInputType.SpatialGrip:
                    Interactions[i].PoseData = pose;
                    if (Interactions[i].Changed)
                    {
                        CoreServices.InputSystem?.RaisePoseInputChanged(InputSource, ControllerHandedness, Interactions[i].MixedRealityInputAction, pose);
                    }
                    break;

                case DeviceInputType.Select:
                    Interactions[i].BoolData = controller.Selected;

                    if (Interactions[i].Changed)
                    {
                        if (Interactions[i].BoolData)
                        {
                            CoreServices.InputSystem?.RaiseOnInputDown(InputSource, ControllerHandedness, Interactions[i].MixedRealityInputAction);
                        }
                        else
                        {
                            CoreServices.InputSystem?.RaiseOnInputUp(InputSource, ControllerHandedness, Interactions[i].MixedRealityInputAction);
                        }
                    }
                    break;

                case DeviceInputType.TriggerPress:
                    Interactions[i].BoolData = controller.Squeezed;

                    if (Interactions[i].Changed)
                    {
                        if (Interactions[i].BoolData)
                        {
                            CoreServices.InputSystem?.RaiseOnInputDown(InputSource, ControllerHandedness, Interactions[i].MixedRealityInputAction);
                        }
                        else
                        {
                            CoreServices.InputSystem?.RaiseOnInputUp(InputSource, ControllerHandedness, Interactions[i].MixedRealityInputAction);
                        }
                    }
                    break;
                }
            }
        }
Beispiel #27
0
        /// <summary>
        /// Update the controller data from XR SDK.
        /// </summary>
        public virtual void UpdateController(InputDevice inputDevice)
        {
            using (UpdateControllerPerfMarker.Auto())
            {
                if (!Enabled)
                {
                    return;
                }

                if (Interactions == null)
                {
                    Debug.LogError($"No interaction configuration for {GetType().Name}");
                    Enabled = false;
                }

                var lastState = TrackingState;
                LastControllerPose = CurrentControllerPose;

                // Check for position and rotation.
                IsPositionAvailable   = inputDevice.TryGetFeatureValue(CommonUsages.devicePosition, out CurrentControllerPosition);
                IsPositionApproximate = false;

                IsRotationAvailable = inputDevice.TryGetFeatureValue(CommonUsages.deviceRotation, out CurrentControllerRotation);

                // Devices are considered tracked if we receive position OR rotation data from the sensors.
                TrackingState = (IsPositionAvailable || IsRotationAvailable) ? TrackingState.Tracked : TrackingState.NotTracked;

                CurrentControllerPosition = MixedRealityPlayspace.TransformPoint(CurrentControllerPosition);
                CurrentControllerRotation = MixedRealityPlayspace.Rotation * CurrentControllerRotation;

                CurrentControllerPose.Position = CurrentControllerPosition;
                CurrentControllerPose.Rotation = CurrentControllerRotation;

                // Raise input system events if it is enabled.
                if (lastState != TrackingState)
                {
                    CoreServices.InputSystem?.RaiseSourceTrackingStateChanged(InputSource, this, TrackingState);
                }

                if (TrackingState == TrackingState.Tracked && LastControllerPose != CurrentControllerPose)
                {
                    if (IsPositionAvailable && IsRotationAvailable)
                    {
                        CoreServices.InputSystem?.RaiseSourcePoseChanged(InputSource, this, CurrentControllerPose);
                    }
                    else if (IsPositionAvailable && !IsRotationAvailable)
                    {
                        CoreServices.InputSystem?.RaiseSourcePositionChanged(InputSource, this, CurrentControllerPosition);
                    }
                    else if (!IsPositionAvailable && IsRotationAvailable)
                    {
                        CoreServices.InputSystem?.RaiseSourceRotationChanged(InputSource, this, CurrentControllerRotation);
                    }
                }

                for (int i = 0; i < Interactions?.Length; i++)
                {
                    switch (Interactions[i].AxisType)
                    {
                    case AxisType.None:
                        break;

                    case AxisType.Digital:
                        UpdateButtonData(Interactions[i], inputDevice);
                        break;

                    case AxisType.SingleAxis:
                        UpdateSingleAxisData(Interactions[i], inputDevice);
                        break;

                    case AxisType.DualAxis:
                        UpdateDualAxisData(Interactions[i], inputDevice);
                        break;

                    case AxisType.SixDof:
                        UpdatePoseData(Interactions[i], inputDevice);
                        break;
                    }
                }
            }
        }
        /// <summary>
        /// Update the source input from the device.
        /// </summary>
        /// <param name="interactionSourceState">The InteractionSourceState retrieved from the platform.</param>
        private void UpdateSourceData(InteractionSourceState interactionSourceState)
        {
            var lastState  = TrackingState;
            var sourceKind = interactionSourceState.source.kind;

            lastSourcePose = currentSourcePose;

            if (sourceKind == InteractionSourceKind.Hand ||
                (sourceKind == InteractionSourceKind.Controller && interactionSourceState.source.supportsPointing))
            {
                // The source is either a hand or a controller that supports pointing.
                // We can now check for position and rotation.
                IsPositionAvailable = interactionSourceState.sourcePose.TryGetPosition(out currentSourcePosition);

                if (IsPositionAvailable)
                {
                    IsPositionApproximate = (interactionSourceState.sourcePose.positionAccuracy == InteractionSourcePositionAccuracy.Approximate);
                }
                else
                {
                    IsPositionApproximate = false;
                }

                IsRotationAvailable = interactionSourceState.sourcePose.TryGetRotation(out currentSourceRotation);

                // We want the source to follow the Playspace, so fold in the playspace transform here to
                // put the source pose into world space.
                currentSourcePosition = MixedRealityPlayspace.TransformPoint(currentSourcePosition);
                currentSourceRotation = MixedRealityPlayspace.Rotation * currentSourceRotation;

                // Devices are considered tracked if we receive position OR rotation data from the sensors.
                TrackingState = (IsPositionAvailable || IsRotationAvailable) ? TrackingState.Tracked : TrackingState.NotTracked;
            }
            else
            {
                // The input source does not support tracking.
                TrackingState = TrackingState.NotApplicable;
            }

            currentSourcePose.Position = currentSourcePosition;
            currentSourcePose.Rotation = currentSourceRotation;

            // Raise input system events if it is enabled.
            if (lastState != TrackingState)
            {
                InputSystem?.RaiseSourceTrackingStateChanged(InputSource, this, TrackingState);
            }

            if (TrackingState == TrackingState.Tracked && lastSourcePose != currentSourcePose)
            {
                if (IsPositionAvailable && IsRotationAvailable)
                {
                    InputSystem?.RaiseSourcePoseChanged(InputSource, this, currentSourcePose);
                }
                else if (IsPositionAvailable && !IsRotationAvailable)
                {
                    InputSystem?.RaiseSourcePositionChanged(InputSource, this, currentSourcePosition);
                }
                else if (!IsPositionAvailable && IsRotationAvailable)
                {
                    InputSystem?.RaiseSourceRotationChanged(InputSource, this, currentSourceRotation);
                }
            }
        }
Beispiel #29
0
        /// <summary>
        /// Update the hand data from the device.
        /// </summary>
        /// <param name="interactionSourceState">The InteractionSourceState retrieved from the platform.</param>
        private void UpdateHandData(InteractionSourceState interactionSourceState)
        {
#if WINDOWS_UWP || DOTNETWINRT_PRESENT
            // Articulated hand support is only present in the 18362 version and beyond Windows
            // SDK (which contains the V8 drop of the Universal API Contract). In particular,
            // the HandPose related APIs are only present on this version and above.
            if (!articulatedHandApiAvailable)
            {
                return;
            }

            PerceptionTimestamp perceptionTimestamp = PerceptionTimestampHelper.FromHistoricalTargetTime(DateTimeOffset.Now);
            IReadOnlyList <SpatialInteractionSourceState> sources = SpatialInteractionManager?.GetDetectedSourcesAtTimestamp(perceptionTimestamp);
            foreach (SpatialInteractionSourceState sourceState in sources)
            {
                if (sourceState.Source.Id.Equals(interactionSourceState.source.id))
                {
                    HandPose handPose = sourceState.TryGetHandPose();

#if WINDOWS_UWP
                    if (CoreServices.InputSystem.InputSystemProfile.HandTrackingProfile.EnableHandMeshVisualization)
                    {
                        // Accessing the hand mesh data involves copying quite a bit of data, so only do it if application requests it.
                        if (handMeshObserver == null && !hasRequestedHandMeshObserver)
                        {
                            SetHandMeshObserver(sourceState);
                            hasRequestedHandMeshObserver = true;
                        }

                        if (handMeshObserver != null && handMeshTriangleIndices == null)
                        {
                            uint     indexCount = handMeshObserver.TriangleIndexCount;
                            ushort[] indices    = new ushort[indexCount];
                            handMeshObserver.GetTriangleIndices(indices);
                            handMeshTriangleIndices = new int[indexCount];
                            Array.Copy(indices, handMeshTriangleIndices, (int)handMeshObserver.TriangleIndexCount);

                            // Compute neutral pose
                            Vector3[]           neutralPoseVertices = new Vector3[handMeshObserver.VertexCount];
                            HandPose            neutralPose         = handMeshObserver.NeutralPose;
                            var                 vertexAndNormals    = new HandMeshVertex[handMeshObserver.VertexCount];
                            HandMeshVertexState handMeshVertexState = handMeshObserver.GetVertexStateForPose(neutralPose);
                            handMeshVertexState.GetVertices(vertexAndNormals);

                            for (int i = 0; i < handMeshObserver.VertexCount; i++)
                            {
                                neutralPoseVertices[i] = WindowsMixedRealityUtilities.SystemVector3ToUnity(vertexAndNormals[i].Position);
                            }

                            // Compute UV mapping
                            InitializeUVs(neutralPoseVertices);
                        }

                        if (handPose != null && handMeshObserver != null && handMeshTriangleIndices != null)
                        {
                            var vertexAndNormals    = new HandMeshVertex[handMeshObserver.VertexCount];
                            var handMeshVertexState = handMeshObserver.GetVertexStateForPose(handPose);
                            handMeshVertexState.GetVertices(vertexAndNormals);

                            var meshTransform = handMeshVertexState.CoordinateSystem.TryGetTransformTo(WindowsMixedRealityUtilities.SpatialCoordinateSystem);
                            if (meshTransform.HasValue)
                            {
                                System.Numerics.Vector3    scale;
                                System.Numerics.Quaternion rotation;
                                System.Numerics.Vector3    translation;
                                System.Numerics.Matrix4x4.Decompose(meshTransform.Value, out scale, out rotation, out translation);

                                var handMeshVertices = new Vector3[handMeshObserver.VertexCount];
                                var handMeshNormals  = new Vector3[handMeshObserver.VertexCount];

                                for (int i = 0; i < handMeshObserver.VertexCount; i++)
                                {
                                    handMeshVertices[i] = WindowsMixedRealityUtilities.SystemVector3ToUnity(vertexAndNormals[i].Position);
                                    handMeshNormals[i]  = WindowsMixedRealityUtilities.SystemVector3ToUnity(vertexAndNormals[i].Normal);
                                }

                                /// Hands should follow the Playspace to accommodate teleporting, so fold in the Playspace transform.
                                Vector3 unityPosition = WindowsMixedRealityUtilities.SystemVector3ToUnity(translation);
                                unityPosition = MixedRealityPlayspace.TransformPoint(unityPosition);
                                Quaternion unityRotation = WindowsMixedRealityUtilities.SystemQuaternionToUnity(rotation);
                                unityRotation = MixedRealityPlayspace.Rotation * unityRotation;

                                HandMeshInfo handMeshInfo = new HandMeshInfo
                                {
                                    vertices  = handMeshVertices,
                                    normals   = handMeshNormals,
                                    triangles = handMeshTriangleIndices,
                                    uvs       = handMeshUVs,
                                    position  = unityPosition,
                                    rotation  = unityRotation
                                };

                                CoreServices.InputSystem?.RaiseHandMeshUpdated(InputSource, ControllerHandedness, handMeshInfo);
                            }
                        }
                    }
                    else
                    {
                        // if hand mesh visualization is disabled make sure to destroy our hand mesh observer if it has already been created
                        if (handMeshObserver != null)
                        {
                            // notify that hand mesh has been updated (cleared)
                            HandMeshInfo handMeshInfo = new HandMeshInfo();
                            CoreServices.InputSystem?.RaiseHandMeshUpdated(InputSource, ControllerHandedness, handMeshInfo);
                            hasRequestedHandMeshObserver = false;
                            handMeshObserver             = null;
                        }
                    }
#endif // WINDOWS_UWP

                    if (handPose != null && handPose.TryGetJoints(WindowsMixedRealityUtilities.SpatialCoordinateSystem, jointIndices, jointPoses))
                    {
                        for (int i = 0; i < jointPoses.Length; i++)
                        {
                            unityJointOrientations[i] = WindowsMixedRealityUtilities.SystemQuaternionToUnity(jointPoses[i].Orientation);
                            unityJointPositions[i]    = WindowsMixedRealityUtilities.SystemVector3ToUnity(jointPoses[i].Position);

                            // We want the controller to follow the Playspace, so fold in the playspace transform here to
                            // put the controller pose into world space.
                            unityJointPositions[i]    = MixedRealityPlayspace.TransformPoint(unityJointPositions[i]);
                            unityJointOrientations[i] = MixedRealityPlayspace.Rotation * unityJointOrientations[i];

                            if (jointIndices[i] == HandJointKind.IndexTip)
                            {
                                lastIndexTipRadius = jointPoses[i].Radius;
                            }

                            TrackedHandJoint handJoint = ConvertHandJointKindToTrackedHandJoint(jointIndices[i]);

                            if (!unityJointPoses.ContainsKey(handJoint))
                            {
                                unityJointPoses.Add(handJoint, new MixedRealityPose(unityJointPositions[i], unityJointOrientations[i]));
                            }
                            else
                            {
                                unityJointPoses[handJoint] = new MixedRealityPose(unityJointPositions[i], unityJointOrientations[i]);
                            }
                        }
                        CoreServices.InputSystem?.RaiseHandJointsUpdated(InputSource, ControllerHandedness, unityJointPoses);
                    }
                }
            }
#endif // WINDOWS_UWP || DOTNETWINRT_PRESENT
        }