protected virtual void PerformUpdate() { if (!enabled) { return; } Pose currentPose = new Pose(); currentPose = Pose.identity; PoseDataFlags poseFlags = GetPoseData(m_Device, m_PoseSource, out currentPose); if (poseFlags != PoseDataFlags.NoData) { Pose localPose = TransformPoseByOriginIfNeeded(currentPose); SetLocalTransform(localPose.position, localPose.rotation, poseFlags); } }
static internal PoseDataFlags GetNodePoseData(XR.XRNode node, out Pose resultPose) { PoseDataFlags retData = PoseDataFlags.NoData; XR.InputTracking.GetNodeStates(nodeStates); foreach (XR.XRNodeState nodeState in nodeStates) { if (nodeState.nodeType == node) { if (nodeState.TryGetPosition(out resultPose.position)) { retData |= PoseDataFlags.Position; } if (nodeState.TryGetRotation(out resultPose.rotation)) { retData |= PoseDataFlags.Rotation; } return(retData); } } resultPose = Pose.identity; return(retData); }
/// <summary> /// <signature><![CDATA[GetDataFromSource(TrackedPose,Pose)]]></signature> /// <summary>The GetDatafromSource method is used to query data from the XRNode subsystem based on the provided pose source.</summary> /// <param name = "poseSource" > The pose source to request data for.</param> /// <param name = "resultPose" > The resulting pose data. This function will return the Center Eye pose if the Color Camera pose is not available. </param> /// <returns>Retuns a bitflag which represents which data has been retrieved from the provided pose source</returns> static public PoseDataFlags GetDataFromSource(TrackedPoseDriver.TrackedPose poseSource, out Pose resultPose) { #if ENABLE_VR switch (poseSource) { case TrackedPoseDriver.TrackedPose.RemotePose: { PoseDataFlags retFlags = GetNodePoseData(XR.XRNode.RightHand, out resultPose); if (retFlags == PoseDataFlags.NoData) { return(GetNodePoseData(XR.XRNode.LeftHand, out resultPose)); } return(retFlags); } case TrackedPoseDriver.TrackedPose.LeftEye: { return(GetNodePoseData(XR.XRNode.LeftEye, out resultPose)); } case TrackedPoseDriver.TrackedPose.RightEye: { return(GetNodePoseData(XR.XRNode.RightEye, out resultPose)); } case TrackedPoseDriver.TrackedPose.Head: { return(GetNodePoseData(XR.XRNode.Head, out resultPose)); } case TrackedPoseDriver.TrackedPose.Center: { return(GetNodePoseData(XR.XRNode.CenterEye, out resultPose)); } case TrackedPoseDriver.TrackedPose.LeftPose: { return(GetNodePoseData(XR.XRNode.LeftHand, out resultPose)); } case TrackedPoseDriver.TrackedPose.RightPose: { return(GetNodePoseData(XR.XRNode.RightHand, out resultPose)); } case TrackedPoseDriver.TrackedPose.ColorCamera: { PoseDataFlags retFlags = TryGetTangoPose(out resultPose); if (retFlags == PoseDataFlags.NoData) { // We fall back to CenterEye because we can't currently extend the XRNode structure, nor are we ready to replace it. return(GetNodePoseData(XR.XRNode.CenterEye, out resultPose)); } return(retFlags); } default: { Debug.LogWarningFormat("Unable to retrieve pose data for poseSource: {0}", poseSource.ToString()); break; } } #endif resultPose = Pose.identity; return(PoseDataFlags.NoData); }
protected virtual void SetLocalTransform(Vector3 newPosition, Quaternion newRotation, PoseDataFlags poseFlags) { if ((m_TrackingType == TrackingType.RotationAndPosition || m_TrackingType == TrackingType.RotationOnly) && (poseFlags & PoseDataFlags.Rotation) > 0) { transform.localRotation = newRotation; } if ((m_TrackingType == TrackingType.RotationAndPosition || m_TrackingType == TrackingType.PositionOnly) && (poseFlags & PoseDataFlags.Position) > 0) { transform.localPosition = newPosition; } }
protected override void SetLocalTransform(Vector3 newPosition, Quaternion newRotation, PoseDataFlags poseFlags) { // calculate predictions if (enablePrediction) { if (canUpdate) // only update before render { if ((poseFlags & PoseDataFlags.Position) != 0) { // capture vel & store this pos var linearVel = newPosition - prevTransform.pos; prevTransform.pos = newPosition; // scale vel linearVel *= linearVelMul; // apply vel offset stronger the faster we're moving newPosition = Vector3.Lerp(newPosition, newPosition + linearVel, Mathf.Min(linearVel.magnitude * linearVelClampMul, 1.0f)); prevTransform.posPredicted = newPosition; // capture predicted pos } if ((poseFlags & PoseDataFlags.Rotation) != 0) { // capture vel & store this rot var angularVel = Quaternion.Inverse(newRotation) * prevTransform.rot; prevTransform.rot = newRotation; // scale vel angularVel.ToAngleAxis(out float angle, out var axis); angularVel = Quaternion.AngleAxis(angle * angularVelMul, axis); // apply vel offset stronger the faster we're moving newRotation = Quaternion.Lerp(newRotation, angularVel * newRotation, Mathf.Min(angle * angularVelClampMul, 1.0f)); prevTransform.rotPredicted = newRotation; // capture predicted rot } } else if (mode == PredictionMode.OnUpdate) { // apply predicted transform to object before drawing newPosition = prevTransform.posPredicted; newRotation = prevTransform.rotPredicted; } } // set modified transform base.SetLocalTransform(newPosition, newRotation, poseFlags); }