HMDUtils.FusionService.Vec Adapt(Vector3 In) { HMDUtils.FusionService.Vec Output = new HMDUtils.FusionService.Vec(In.x, In.y, In.z); return(Output); }
void LateUpdate() { try { double Time = HMDUtils.FusionService.GetTime(); uint ViconFrameNumber = Client.GetFrameNumber(); //position Output_GetSubjectRootSegmentName RootName = Client.GetSubjectRootSegmentName(HmdName); Output_GetSegmentLocalTranslation Translation = Client.GetSegmentTranslation(HmdName, RootName.SegmentName); // Raw Vicon position, scale is in mm. The data here is in the datastream default; x-forward, y-left, z-up for the global coordinate system HMDUtils.FusionService.Vec ViconPosition = new HMDUtils.FusionService.Vec(Translation.Translation[0], Translation.Translation[1], Translation.Translation[2]); //orientation. The local coordinate system of the HMD object is x-right, y-up, z-back Output_GetSegmentLocalRotationQuaternion Rot = Client.GetSegmentRotation(HmdName, RootName.SegmentName); // Raw Vicon orientation HMDUtils.FusionService.Quat ViconOrientation = new HMDUtils.FusionService.Quat(Rot.Rotation[0], Rot.Rotation[1], Rot.Rotation[2], Rot.Rotation[3]); // If we don't get a result, or the pose returned from the datastream is occluded, then we will use the last known good position that we received. bool bViconPoseValid = true; if (Rot.Result != ViconDataStreamSDK.CSharp.Result.Success || Rot.Occluded || Translation.Occluded) { // We use this flag to determine whether to initialize the fusion algorithm; we don't want to initialize it on occluded frames bViconPoseValid = false; if (m_LastGoodPose != null) { ViconPosition = m_LastGoodPose.Position; ViconOrientation = m_LastGoodPose.Rotation; } else { // If all else fails, we will return the origin :( ViconOrientation = new HMDUtils.FusionService.Quat(0, 0, 0, 1); } } else { if (m_LastGoodPose == null) { m_LastGoodPose = new HMDUtils.FusionService.Pose(ViconPosition, ViconOrientation); } else { m_LastGoodPose.Position = ViconPosition; m_LastGoodPose.Rotation = ViconOrientation; } } //Oculus space. We need to translate to the oculus coordinate system here so that the fusion algorithm can work with all data in the same coordinate system. // The Vicon data comes in as z-up, x-forward rhs. We convert to y-up, z-back rhs. The local coordinate system of the tracked Oculus object in the Vicon data is already y-up, z-back. // The conversion also scales from mm to m. // Vicon Oculus Unity // forward x -z z // up z y y // right -y x x // https://gamedev.stackexchange.com/questions/157946/converting-a-quaternion-in-a-right-to-left-handed-coordinate-system //HMDUtils.FusionService.Quat ViconOrientationInOculus = new HMDUtils.FusionService.Quat(-ViconOrientation.Y, ViconOrientation.Z, -ViconOrientation.X, ViconOrientation.W); //HMDUtils.FusionService.Vec ViconPositionInOculus = new HMDUtils.FusionService.Vec(-ViconPosition.Y * 0.001, ViconPosition.Z * 0.001, -ViconPosition.X * 0.001); //HMDUtils.FusionService.Pose ViconInOculus = new HMDUtils.FusionService.Pose( ViconPositionInOculus, ViconOrientationInOculus ); // For XR, convert the vicon data to Unity coordinate system //HMDUtils.FusionService.Quat Rotation = new HMDUtils.FusionService.Quat(ViconOrientation.Y, -ViconOrientation.Z, -ViconOrientation.X, ViconOrientation.W); //HMDUtils.FusionService.Vec Position = new HMDUtils.FusionService.Vec(-ViconPosition.Y * 0.001, ViconPosition.Z * 0.001, ViconPosition.X * 0.001); //HMDUtils.FusionService.Pose ViconInUnity = new HMDUtils.FusionService.Pose(Position, Rotation); // Otherwise convert to Oculus HMDUtils.FusionService.Pose ViconInOculus = HMDUtils.FusionService.GetMappedVicon(ViconOrientation, ViconPosition); bool bOK = false; HMDUtils.FusionService.Quat HmdOrtOculus = new HMDUtils.FusionService.Quat(0, 0, 0, 0); HMDUtils.FusionService.Vec HmdOrtVOculus = new HMDUtils.FusionService.Vec(0, 0, 0); HMDUtils.FusionService.Vec HmdOrtAOculus = new HMDUtils.FusionService.Vec(0, 0, 0); HMDUtils.FusionService.Vec HmdPosOculus = new HMDUtils.FusionService.Vec(0, 0, 0); HMDUtils.FusionService.Vec HmdPosVOculus = new HMDUtils.FusionService.Vec(0, 0, 0); HMDUtils.FusionService.Vec HmdPosAOculus = new HMDUtils.FusionService.Vec(0, 0, 0); // Not sure whether we actually require this, plus XR doesn't give it. double HmdTime = 0; if (!XRCalls) { // We have to assume this bOK = true; // The pose from the oculus; this is already in oculus coordinate system - y - up, z - back, rhs HMDUtils.OVRPluginServices.PoseStatef HMDState = HMDUtils.OVRPluginServices.ovrp_GetNodePoseState(HMDUtils.OVRPluginServices.Step.Render, HMDUtils.OVRPluginServices.Node.EyeCenter); HmdOrtOculus = Adapt(HMDState.Pose.Orientation); HmdOrtVOculus = Adapt(HMDState.AngularVelocity); HmdOrtAOculus = Adapt(HMDState.AngularAcceleration); HmdPosOculus = Adapt(HMDState.Pose.Position); HmdPosVOculus = Adapt(HMDState.Velocity); HmdPosAOculus = Adapt(HMDState.Acceleration); HmdTime = HMDState.Time; } else { List <UnityEngine.XR.XRNodeState> XRNodeStates = new List <UnityEngine.XR.XRNodeState>(); UnityEngine.XR.InputTracking.GetNodeStates(XRNodeStates); foreach (var State in XRNodeStates) { if (State.nodeType == UnityEngine.XR.XRNode.CenterEye) { Quaternion Ort = new Quaternion(); Vector3 OrtV = new Vector3(); Vector3 OrtA = new Vector3(); Vector3 Pos = new Vector3(); Vector3 PosV = new Vector3(); Vector3 PosA = new Vector3(); bOK = State.TryGetRotation(out Ort); // If this is present, we will use it. If not, we will calculate it. if (!State.TryGetAngularVelocity(out OrtV)) { OrtV = Vector3.zero; } // We don't use these, but continue to obtain them for logging purposes State.TryGetAngularAcceleration(out OrtA); State.TryGetPosition(out Pos); State.TryGetVelocity(out PosV); State.TryGetAcceleration(out PosA); if (bOK) { HmdOrtOculus = Adapt(Ort); HmdOrtVOculus = Adapt(OrtV); HmdOrtAOculus = Adapt(OrtA); HmdPosOculus = Adapt(Pos); HmdPosVOculus = Adapt(PosV); HmdPosAOculus = Adapt(PosA); } } } } if (bOK && m_Service != null) { HMDUtils.FusionService.Quat Output; bool bOutputOk = m_Service.GetUpdatedOrientation(Time, HmdOrtOculus, HmdOrtVOculus, ViconInOculus.Rotation, ViconInOculus.Position, bViconPoseValid, (float)1, (float)0.0137, (float)0.00175, out Output); if (bOutputOk) { if (Log) { m_Log.WriteLine("{0},{1},{2},{3},{4},{5},{6},{7},{8},{9},{10},{11},{12},{13},{14},{15},{16},{17},{18},{19},{20},{21},{22},{23},{24},{25},{26},{27},{28},{29},{30},{31},{32},{33},{34},{35}", Time, HmdTime, HmdOrtOculus.X, HmdOrtOculus.Y, HmdOrtOculus.Z, HmdOrtOculus.W, HmdOrtVOculus.X, HmdOrtVOculus.Y, HmdOrtVOculus.Z, HmdOrtAOculus.X, HmdOrtAOculus.Y, HmdOrtAOculus.Z, HmdPosOculus.X, HmdPosOculus.Y, HmdPosOculus.Z, HmdPosVOculus.X, HmdPosVOculus.Y, HmdPosVOculus.Z, HmdPosAOculus.X, HmdPosAOculus.Y, HmdPosAOculus.Z, ViconFrameNumber, ViconInOculus.Rotation.X, ViconInOculus.Rotation.Y, ViconInOculus.Rotation.Z, ViconInOculus.Rotation.W, ViconInOculus.Position.X, ViconInOculus.Position.Y, ViconInOculus.Position.Z, ViconOrientation.X, ViconOrientation.Y, ViconOrientation.Z, ViconOrientation.W, ViconPosition.X, ViconPosition.Y, ViconPosition.Z); } Quaternion OutputOrt = new Quaternion((float)-Output.X, (float)-Output.Y, (float)Output.Z, (float)Output.W); Quaternion OculusOrt = new Quaternion((float)-HmdOrtOculus.X, (float)-HmdOrtOculus.Y, (float)HmdOrtOculus.Z, (float)HmdOrtOculus.W); transform.localPosition = new Vector3((float)ViconInOculus.Position.X, (float)ViconInOculus.Position.Y, (float)-ViconInOculus.Position.Z); transform.localRotation = OutputOrt * Quaternion.Inverse(OculusOrt); } } } catch (DllNotFoundException ex) { Debug.LogError(string.Format("XR must be enabled for this project to use the HMD fusion script: Error {0}", ex.Message)); } }
HMDUtils.FusionService.Vec Adapt(HMDUtils.OVRPluginServices.Vector3f In) { HMDUtils.FusionService.Vec Output = new HMDUtils.FusionService.Vec(In.x, In.y, In.z); return(Output); }
void LateUpdate() { try { double Time = HMDUtils.FusionService.GetTime(); uint ViconFrameNumber = Client.GetFrameNumber(); //position Output_GetSubjectRootSegmentName RootName = Client.GetSubjectRootSegmentName(HmdName); Output_GetSegmentLocalTranslation Translation = Client.GetSegmentTranslation(HmdName, RootName.SegmentName); // Raw Vicon position, scale is in mm. The data here is in the datastream default; x-forward, y-left, z-up for the global coordinate system HMDUtils.FusionService.Vec ViconPosition = new HMDUtils.FusionService.Vec(Translation.Translation[0], Translation.Translation[1], Translation.Translation[2]); //orientation. The local coordinate system of the HMD object is x-right, y-up, z-back Output_GetSegmentLocalRotationQuaternion Rot = Client.GetSegmentRotation(HmdName, RootName.SegmentName); // Raw Vicon orientation HMDUtils.FusionService.Quat ViconOrientation = new HMDUtils.FusionService.Quat(Rot.Rotation[0], Rot.Rotation[1], Rot.Rotation[2], Rot.Rotation[3]); // If we don't get a result, or the pose returned from the datastream is occluded, then we will use the last known good position that we received. bool bViconPoseValid = true; if (Rot.Result != ViconDataStreamSDK.CSharp.Result.Success || Rot.Occluded || Translation.Occluded) { // We use this flag to determine whether to initialize the fusion algorithm; we don't want to initialize it on occluded frames bViconPoseValid = false; if (m_LastGoodPose != null) { ViconPosition = m_LastGoodPose.Position; ViconOrientation = m_LastGoodPose.Rotation; } else { // If all else fails, we will return the origin :( ViconOrientation = new HMDUtils.FusionService.Quat(0, 0, 0, 1); } } else { if (m_LastGoodPose == null) { m_LastGoodPose = new HMDUtils.FusionService.Pose(ViconPosition, ViconOrientation); } else { m_LastGoodPose.Position = ViconPosition; m_LastGoodPose.Rotation = ViconOrientation; } } //Oculus space. We need to translate to the oculus coordinate system here so that the fusion algorithm can work with all data in the same coordinate system. // The Vicon data comes in as z-up, x-forward rhs. We convert to y-up, z-back rhs. The local coordinate system of the tracked Oculus object in the Vicon data is already y-up, z-back. // The conversion also scales from mm to m. // Vicon Oculus Unity // forward x -z z // up z y y // right -y x x //HMDUtils.FusionService.Quat ViconOrientationInOculus = new HMDUtils.FusionService.Quat(-ViconOrientation.Y, ViconOrientation.Z, -ViconOrientation.X, ViconOrientation.W); //HMDUtils.FusionService.Vec ViconPositionInOculus = new HMDUtils.FusionService.Vec(-ViconPosition.Y * 0.001, ViconPosition.Z * 0.001, -ViconPosition.X * 0.001); //HMDUtils.FusionService.Pose ViconInOculus = new HMDUtils.FusionService.Pose( ViconPositionInOculus, ViconOrientationInOculus ); HMDUtils.FusionService.Pose ViconInOculus = HMDUtils.FusionService.GetMappedVicon(ViconOrientation, ViconPosition); // The pose from the oculus; this is already in oculus coordinate system - y-up, z-back, rhs HMDUtils.OVRPluginServices.PoseStatef HMDState = HMDUtils.OVRPluginServices.ovrp_GetNodePoseState(HMDUtils.OVRPluginServices.Step.Render, HMDUtils.OVRPluginServices.Node.EyeCenter); HMDUtils.OVRPluginServices.Quatf HmdOrt = HMDState.Pose.Orientation; HMDUtils.OVRPluginServices.Vector3f HmdOrtV = HMDState.AngularVelocity; HMDUtils.OVRPluginServices.Vector3f HmdOrtA = HMDState.AngularAcceleration; HMDUtils.OVRPluginServices.Vector3f HmdPos = HMDState.Pose.Position; HMDUtils.OVRPluginServices.Vector3f HmdPosV = HMDState.Velocity; HMDUtils.OVRPluginServices.Vector3f HmdPosA = HMDState.Acceleration; if (m_Service != null) { HMDUtils.FusionService.Quat Output; if (m_Service.GetUpdatedOrientation(Time, Adapt(HmdOrt), Adapt(HmdOrtV), Adapt(HmdOrtA), ViconInOculus.Rotation, ViconInOculus.Position, bViconPoseValid, (float)1, (float)0.0137, (float)0.00175, out Output)) { if (Log) { m_Log.WriteLine("{0},{1},{2},{3},{4},{5},{6},{7},{8},{9},{10},{11},{12},{13},{14},{15},{16},{17},{18},{19},{20},{21},{22},{23},{24},{25},{26},{27},{28},{29},{30},{31},{32},{33},{34},{35}", Time, HMDState.Time, HmdOrt.x, HmdOrt.y, HmdOrt.z, HmdOrt.w, HmdOrtV.x, HmdOrtV.y, HmdOrtV.z, HmdOrtA.x, HmdOrtA.y, HmdOrtA.z, HmdPos.x, HmdPos.y, HmdPos.z, HmdPosV.x, HmdPosV.y, HmdPosV.z, HmdPosA.x, HmdPosA.y, HmdPosA.z, ViconFrameNumber, ViconInOculus.Rotation.X, ViconInOculus.Rotation.Y, ViconInOculus.Rotation.Z, ViconInOculus.Rotation.W, ViconInOculus.Position.X, ViconInOculus.Position.Y, ViconInOculus.Position.Z, ViconOrientation.X, ViconOrientation.Y, ViconOrientation.Z, ViconOrientation.W, ViconPosition.X, ViconPosition.Y, ViconPosition.Z); } // We are in Oculus co-ordinate space: y-up, z-backward rhs. We need to convert to Unity, which is y-up, z-forward lhs - eg a reflection in the xy plane Quaternion OutputOrt = new Quaternion((float)-Output.X, (float)-Output.Y, (float)Output.Z, (float)Output.W); Quaternion OculusOrt = new Quaternion((float)-HmdOrt.x, (float)-HmdOrt.y, (float)HmdOrt.z, (float)HmdOrt.w); transform.localPosition = new Vector3((float)ViconInOculus.Position.X, (float)ViconInOculus.Position.Y, (float)-ViconInOculus.Position.Z); transform.localRotation = OutputOrt * Quaternion.Inverse(OculusOrt); } } } catch (DllNotFoundException ex) { Debug.LogError(string.Format("XR must be enabled for this project to use the HMD fusion script: Error {0}", ex.Message)); } }
Vector3 Adapt(HMDUtils.FusionService.Vec In) { return(new Vector3((float)In.X, (float)In.Y, (float)In.Z)); }
public override PoseDataFlags GetPoseFromProvider(out Pose output) { try { double Time = HMDUtils.FusionService.GetTime(); uint ViconFrameNumber = Client.GetFrameNumber(); //position Output_GetSubjectRootSegmentName RootName = Client.GetSubjectRootSegmentName(HmdName); Output_GetSegmentLocalTranslation Translation = Client.GetSegmentTranslation(HmdName, RootName.SegmentName); // Raw Vicon position, scale is in mm. The data here is in the datastream default; x-forward, y-left, z-up for the global coordinate system HMDUtils.FusionService.Vec ViconPosition = new HMDUtils.FusionService.Vec(Translation.Translation[0], Translation.Translation[1], Translation.Translation[2]); //orientation. The local coordinate system of the HMD object is x-right, y-up, z-back Output_GetSegmentLocalRotationQuaternion Rot = Client.GetSegmentRotation(HmdName, RootName.SegmentName); // Raw Vicon orientation HMDUtils.FusionService.Quat ViconOrientation = new HMDUtils.FusionService.Quat(Rot.Rotation[0], Rot.Rotation[1], Rot.Rotation[2], Rot.Rotation[3]); // If we don't get a result, or the pose returned from the datastream is occluded, then we will use the last known good position that we received. bool bViconPoseValid = true; if (Rot.Result != ViconDataStreamSDK.CSharp.Result.Success || Rot.Occluded || Translation.Occluded) { // We use this flag to determine whether to initialize the fusion algorithm; we don't want to initialize it on occluded frames bViconPoseValid = false; if (m_LastGoodPose != null) { ViconPosition = m_LastGoodPose.Position; ViconOrientation = m_LastGoodPose.Rotation; } else { // If all else fails, we will return the origin :( ViconOrientation = new HMDUtils.FusionService.Quat(0, 0, 0, 1); } } else { if (m_LastGoodPose == null) { m_LastGoodPose = new HMDUtils.FusionService.Pose(ViconPosition, ViconOrientation); } else { m_LastGoodPose.Position = ViconPosition; m_LastGoodPose.Rotation = ViconOrientation; } } // to headset HMDUtils.FusionService.Pose ViconInHMD = HMDUtils.FusionService.GetMappedVicon(ViconOrientation, ViconPosition); // headset to unity ViconOrientation = ViconInHMD.Rotation; ViconPosition = ViconInHMD.Position; HMDUtils.FusionService.Quat Rotation = new HMDUtils.FusionService.Quat(-ViconOrientation.X, -ViconOrientation.Y, ViconOrientation.Z, ViconOrientation.W); HMDUtils.FusionService.Vec Position = new HMDUtils.FusionService.Vec(ViconPosition.X, ViconPosition.Y, -ViconPosition.Z); HMDUtils.FusionService.Pose ViconInUnity = new HMDUtils.FusionService.Pose(Position, Rotation); HMDUtils.FusionService.Quat HmdOrtUnity = new HMDUtils.FusionService.Quat(0, 0, 0, 1); HMDUtils.FusionService.Vec HmdOrtVUnity = new HMDUtils.FusionService.Vec(0, 0, 0); HMDUtils.FusionService.Vec HmdOrtAUnity = new HMDUtils.FusionService.Vec(0, 0, 0); HMDUtils.FusionService.Vec HmdPosUnity = new HMDUtils.FusionService.Vec(0, 0, 0); HMDUtils.FusionService.Vec HmdPosVUnity = new HMDUtils.FusionService.Vec(0, 0, 0); HMDUtils.FusionService.Vec HmdPosAUnity = new HMDUtils.FusionService.Vec(0, 0, 0); List <UnityEngine.XR.XRNodeState> XRNodeStates = new List <UnityEngine.XR.XRNodeState>(); UnityEngine.XR.InputTracking.GetNodeStates(XRNodeStates); bool bOK = false; foreach (var State in XRNodeStates) { if (State.nodeType == UnityEngine.XR.XRNode.CenterEye) { Quaternion Ort = new Quaternion(); Vector3 OrtV = new Vector3(); Vector3 OrtA = new Vector3(); Vector3 Pos = new Vector3(); Vector3 PosV = new Vector3(); Vector3 PosA = new Vector3(); bOK = State.TryGetRotation(out Ort); // If this is present, we will use it. If not, we will calculate it. if (!State.TryGetAngularVelocity(out OrtV)) { OrtV = Vector3.zero; } // We don't use these, but continue to obtain them for logging purposes State.TryGetAngularAcceleration(out OrtA); State.TryGetPosition(out Pos); State.TryGetVelocity(out PosV); State.TryGetAcceleration(out PosA); if (bOK) { HmdOrtUnity = Adapt(Ort); HmdOrtVUnity = Adapt(OrtV); HmdOrtAUnity = Adapt(OrtA); HmdPosUnity = Adapt(Pos); HmdPosVUnity = Adapt(PosV); HmdPosAUnity = Adapt(PosA); } } } // Not sure whether we actually require this, plus XR doesn't give it. double HmdTime = 0; if (m_Service != null) { HMDUtils.FusionService.MathUtilsError FusionState = HMDUtils.FusionService.MathUtilsError.ENoVelocity; double V = HMDUtils.FusionService.ScalarVelocity(HmdOrtVUnity); if (V == 0 || bAlwaysCalculateVelocity) { bRequiredVelocityCalculation = true; FusionState = m_Service.GetVelocity(HmdOrtUnity, Time, out V); } else { bRequiredVelocityCalculation = false; FusionState = HMDUtils.FusionService.MathUtilsError.ESuccess; } HMDUtils.FusionService.Quat Output = new HMDUtils.FusionService.Quat(0, 0, 0, 1); if (FusionState == HMDUtils.FusionService.MathUtilsError.ESuccess) { if (bAltAlgo) { FusionState = m_Service.GetUpdatedOrientationWindowed(Time, HmdOrtUnity, true, ViconInUnity.Rotation, 0, true, out Output); } else { FusionState = m_Service.GetUpdatedOrientation(Time, HmdOrtUnity, V, ViconInUnity.Rotation, bViconPoseValid, out Output); } } if (FusionState != HMDUtils.FusionService.MathUtilsError.ESuccess) { //fall back to vicon rotation Output = ViconInUnity.Rotation; } if (Log) { m_Log.WriteLine("{0},{1},{2},{3},{4},{5},{6},{7},{8},{9},{10},{11},{12},{13},{14},{15},{16},{17},{18},{19},{20},{21},{22},{23},{24},{25},{26},{27},{28},{29},{30},{31},{32},{33},{34},{35},{36}", Time, HmdTime, HmdOrtUnity.X, HmdOrtUnity.Y, HmdOrtUnity.Z, HmdOrtUnity.W, HmdOrtVUnity.X, HmdOrtVUnity.Y, HmdOrtVUnity.Z, HmdOrtAUnity.X, HmdOrtAUnity.Y, HmdOrtAUnity.Z, HmdPosUnity.X, HmdPosUnity.Y, HmdPosUnity.Z, HmdPosVUnity.X, HmdPosVUnity.Y, HmdPosVUnity.Z, HmdPosAUnity.X, HmdPosAUnity.Y, HmdPosAUnity.Z, ViconFrameNumber, ViconInUnity.Rotation.X, ViconInUnity.Rotation.Y, ViconInUnity.Rotation.Z, ViconInUnity.Rotation.W, ViconInUnity.Position.X, ViconInUnity.Position.Y, ViconInUnity.Position.Z, ViconOrientation.X, ViconOrientation.Y, ViconOrientation.Z, ViconOrientation.W, ViconPosition.X, ViconPosition.Y, ViconPosition.Z, HMDUtils.FusionService.StateInString(FusionState, bRequiredVelocityCalculation)); } if (FusionState == HMDUtils.FusionService.MathUtilsError.ESuccess) { output = new Pose(Adapt(ViconInUnity.Position), Adapt(Output)); return(PoseDataFlags.Position | PoseDataFlags.Rotation); } } if (m_LastGoodPose == null) { output = new Pose(new Vector3(0, 0, 0), new Quaternion(0, 0, 0, 1)); return(PoseDataFlags.NoData); } else { output = new Pose(Adapt(m_LastGoodPose.Position), Adapt(m_LastGoodPose.Rotation)); Debug.LogWarning("using last postion"); return(PoseDataFlags.Position | PoseDataFlags.Rotation); } } catch (DllNotFoundException ex) { Debug.LogError(string.Format("XR must be enabled for this project to use the HMD fusion script: Error {0}", ex.Message)); output = new Pose(Adapt(m_LastGoodPose.Position), Adapt(m_LastGoodPose.Rotation)); return(PoseDataFlags.NoData); } }