private bool isRiftConnected() { Hmd hmd = OVR.Hmd.GetHmd(); ovrTrackingState ss = hmd.GetTrackingState(); return((ss.StatusFlags & (uint)ovrStatusBits.ovrStatus_HmdConnected) != 0); }
GetCameraPositionOrientation(ref Vector3 p, ref Quaternion o, double predictionTime = 0f) { if (HMD == null || !SupportedPlatform) { return(false); } float px = 0, py = 0, pz = 0, ow = 0, ox = 0, oy = 0, oz = 0; double abs_time_plus_pred = Hmd.GetTimeInSeconds() + predictionTime; ovrTrackingState ss = HMD.GetTrackingState(abs_time_plus_pred); px = ss.HeadPose.ThePose.Position.x; py = ss.HeadPose.ThePose.Position.y; pz = ss.HeadPose.ThePose.Position.z; ox = ss.HeadPose.ThePose.Orientation.x; oy = ss.HeadPose.ThePose.Orientation.y; oz = ss.HeadPose.ThePose.Orientation.z; ow = ss.HeadPose.ThePose.Orientation.w; p.x = px; p.y = py; p.z = -pz; o.w = ow; o.x = ox; o.y = oy; o.z = oz; // Convert to Left hand CS OrientSensor(ref o); return(true); }
/// <summary> /// Determines if is camera tracking. /// </summary> /// <returns><c>true</c> if is camera tracking; otherwise, <c>false</c>.</returns> public static bool IsCameraTracking() { if (HMD == null || !SupportedPlatform) { return(false); } ovrTrackingState ss = HMD.GetTrackingState(); return((ss.StatusFlags & (uint)ovrStatusBits.ovrStatus_PositionTracked) != 0); }
// * * * * * * * * * * * * // PUBLIC FUNCTIONS // * * * * * * * * * * * * /// <summary> /// Determines if is HMD present. /// </summary> /// <returns><c>true</c> if is HMD present; otherwise, <c>false</c>.</returns> public static bool IsHMDPresent() { if (HMD == null || !SupportedPlatform) { return(false); } ovrTrackingState ss = HMD.GetTrackingState(); return((ss.StatusFlags & (uint)ovrStatusBits.ovrStatus_HmdConnected) != 0); }
public unsafe override HmdPoseState WaitForPoses() { ovrSessionStatus sessionStatus; var result = ovr_GetSessionStatus(_session, &sessionStatus); if (result != ovrResult.Success) { throw new VeldridException($"Failed to retrieve Oculus session status: {result}"); } if (sessionStatus.ShouldRecenter) { ovr_RecenterTrackingOrigin(_session); } // Call ovr_GetRenderDesc each frame to get the ovrEyeRenderDesc, as the returned values (e.g. HmdToEyePose) may change at runtime. ovrEyeRenderDesc *eyeRenderDescs = stackalloc ovrEyeRenderDesc[2]; eyeRenderDescs[0] = ovr_GetRenderDesc2(_session, ovrEyeType.Left, _hmdDesc.DefaultEyeFov[0]); eyeRenderDescs[1] = ovr_GetRenderDesc2(_session, ovrEyeType.Right, _hmdDesc.DefaultEyeFov[1]); // Get both eye poses simultaneously, with IPD offset already included. EyePair_ovrPosef hmdToEyePoses = new EyePair_ovrPosef( eyeRenderDescs[0].HmdToEyePose, eyeRenderDescs[1].HmdToEyePose); double predictedTime = ovr_GetPredictedDisplayTime(_session, _frameIndex); ovrTrackingState trackingState = ovr_GetTrackingState(_session, predictedTime, true); double sensorSampleTime; // sensorSampleTime is fed into the layer later EyePair_Vector3 hmdToEyeOffset = new EyePair_Vector3( hmdToEyePoses.Left.Position, hmdToEyePoses.Right.Position); ovr_GetEyePoses(_session, _frameIndex, true, &hmdToEyeOffset, out _eyeRenderPoses, &sensorSampleTime); _sensorSampleTime = sensorSampleTime; // Render Scene to Eye Buffers for (int eye = 0; eye < 2; ++eye) { _rotations[eye] = _eyeRenderPoses[eye].Orientation; _positions[eye] = _eyeRenderPoses[eye].Position; Matrix4x4 proj = ovrMatrix4f_Projection(eyeRenderDescs[eye].Fov, 0.2f, 1000f, ovrProjectionModifier.None); _posTimewarpProjectionDesc = ovrTimewarpProjectionDesc_FromProjection(proj, ovrProjectionModifier.None); _projections[eye] = Matrix4x4.Transpose(proj); } return(new HmdPoseState( _projections[0], _projections[1], _positions[0], _positions[1], _rotations[0], _rotations[1])); }
/// <summary> /// Gets the angular velocity. /// </summary> /// <returns><c>true</c>, if angular velocity was gotten, <c>false</c> otherwise.</returns> /// <param name="x">The x coordinate.</param> /// <param name="y">The y coordinate.</param> /// <param name="z">The z coordinate.</param> public static bool GetAngularVelocity(ref float x, ref float y, ref float z) { if (HMD == null || !SupportedPlatform) { return(false); } ovrTrackingState ss = HMD.GetTrackingState(); x = ss.HeadPose.AngularVelocity.x; y = ss.HeadPose.AngularVelocity.y; z = ss.HeadPose.AngularVelocity.z; return(true); }
// Latest absolute sensor readings (note: in right-hand co-ordinates) /// <summary> /// Gets the acceleration. /// </summary> /// <returns><c>true</c>, if acceleration was gotten, <c>false</c> otherwise.</returns> /// <param name="x">The x coordinate.</param> /// <param name="y">The y coordinate.</param> /// <param name="z">The z coordinate.</param> public static bool GetAcceleration(ref float x, ref float y, ref float z) { if (HMD == null || !SupportedPlatform) { return(false); } ovrTrackingState ss = HMD.GetTrackingState(); x = ss.HeadPose.LinearAcceleration.x; y = ss.HeadPose.LinearAcceleration.y; z = ss.HeadPose.LinearAcceleration.z; return(true); }