public static Vector3 GetScale(this HmdMatrix34_t pose) { float m00 = pose.m0; float m01 = pose.m1; float m02 = -pose.m2; float m10 = pose.m4; float m11 = pose.m5; float m12 = -pose.m6; float m20 = pose.m8; float m21 = pose.m9; float m22 = -pose.m10; float x = (float)Math.Sqrt(m00 * m00 + m01 * m01 + m02 * m02); float y = (float)Math.Sqrt(m10 * m10 + m11 * m11 + m12 * m12); float z = (float)Math.Sqrt(m20 * m20 + m21 * m21 + m22 * m22); return(new Vector3(x, y, z)); }
private Quaternion GetRotation(HmdMatrix34_t rawMatrix) { if (IsRotationValid(rawMatrix)) { float w = Mathf.Sqrt(Mathf.Max(0, 1 + rawMatrix.m0 + rawMatrix.m5 + rawMatrix.m10)) / 2; float x = Mathf.Sqrt(Mathf.Max(0, 1 + rawMatrix.m0 - rawMatrix.m5 - rawMatrix.m10)) / 2; float y = Mathf.Sqrt(Mathf.Max(0, 1 - rawMatrix.m0 + rawMatrix.m5 - rawMatrix.m10)) / 2; float z = Mathf.Sqrt(Mathf.Max(0, 1 - rawMatrix.m0 - rawMatrix.m5 + rawMatrix.m10)) / 2; CopySign(ref x, rawMatrix.m6 - rawMatrix.m9); CopySign(ref y, rawMatrix.m8 - rawMatrix.m2); CopySign(ref z, rawMatrix.m4 - rawMatrix.m1); return(new Quaternion(x, y, z, w)); } return(Quaternion.identity); }
private static HmdMatrix34_t OpenTKMatrixToOpenVRMatrix(Matrix4 matrix) { var newmatrix = new HmdMatrix34_t(); newmatrix.m0 = matrix.M11; newmatrix.m1 = matrix.M21; newmatrix.m2 = matrix.M31; newmatrix.m3 = matrix.M41; newmatrix.m4 = matrix.M12; newmatrix.m5 = matrix.M22; newmatrix.m6 = matrix.M32; newmatrix.m7 = matrix.M42; newmatrix.m8 = matrix.M13; newmatrix.m9 = matrix.M23; newmatrix.m10 = matrix.M33; newmatrix.m11 = matrix.M43; return(newmatrix); }
public static DeviceState GetTrackerPose(int trackerIndex, ref Matrix pose, ref Vector3 velocity, ref Vector3 angVelocity) { var index = trackerIndex; HmdMatrix34_t openVRPose = DevicePoses[index].mDeviceToAbsoluteTracking; pose.M11 = openVRPose.m0; pose.M21 = openVRPose.m1; pose.M31 = openVRPose.m2; pose.M41 = openVRPose.m3; pose.M12 = openVRPose.m4; pose.M22 = openVRPose.m5; pose.M32 = openVRPose.m6; pose.M42 = openVRPose.m7; pose.M13 = openVRPose.m8; pose.M23 = openVRPose.m9; pose.M33 = openVRPose.m10; pose.M43 = openVRPose.m11; HmdVector3_t vel = DevicePoses[index].vVelocity; velocity.X = vel.v0; velocity.Y = vel.v1; velocity.Z = vel.v2; HmdVector3_t avel = DevicePoses[index].vAngularVelocity; angVelocity.X = avel.v0; angVelocity.Y = avel.v1; angVelocity.Z = avel.v2; var state = DeviceState.Invalid; if (DevicePoses[index].bDeviceIsConnected && DevicePoses[index].bPoseIsValid) { state = DeviceState.Valid; } else if (DevicePoses[index].bDeviceIsConnected && !DevicePoses[index].bPoseIsValid && DevicePoses[index].eTrackingResult == ETrackingResult.Running_OutOfRange) { state = DeviceState.OutOfRange; } return(state); }
private static void Convert(ref HmdMatrix34_t source, ref Matrix destination) { destination.M11 = source.m0; destination.M21 = source.m1; destination.M31 = source.m2; destination.M41 = source.m3; destination.M12 = source.m4; destination.M22 = source.m5; destination.M32 = source.m6; destination.M42 = source.m7; destination.M13 = source.m8; destination.M23 = source.m9; destination.M33 = source.m10; destination.M43 = source.m11; //destination.M14 = 0.0f; //destination.M24 = 0.0f; //destination.M34 = 0.0f; //destination.M44 = 1.0f; }
// Token: 0x06005FD4 RID: 24532 RVA: 0x0021B510 File Offset: 0x00219910 public RigidTransform(HmdMatrix34_t pose) { Matrix4x4 identity = Matrix4x4.identity; identity[0, 0] = pose.m0; identity[0, 1] = pose.m1; identity[0, 2] = -pose.m2; identity[0, 3] = pose.m3; identity[1, 0] = pose.m4; identity[1, 1] = pose.m5; identity[1, 2] = -pose.m6; identity[1, 3] = pose.m7; identity[2, 0] = -pose.m8; identity[2, 1] = -pose.m9; identity[2, 2] = pose.m10; identity[2, 3] = -pose.m11; this.pos = identity.GetPosition(); this.rot = identity.GetRotation(); }
private static void PoseToViewMatrix(ref Matrix mat, ref HmdMatrix34_t pose) { mat.M11 = pose.m[0]; mat.M12 = pose.m[1]; mat.M13 = pose.m[2]; mat.M43 = pose.m[3]; mat.M21 = pose.m[4]; mat.M22 = pose.m[5]; mat.M23 = pose.m[6]; mat.M42 = pose.m[7]; mat.M31 = pose.m[8]; mat.M32 = pose.m[9]; mat.M33 = pose.m[10]; mat.M41 = pose.m[11]; mat.M44 = 1; }
public void SetDeviceAttachment(uint index, Vector3 position, Vector3 rotation) { if (!_ingame) { throw new Exception("Cannot set attachment for dashboard overlay"); } _position = position; _rotation = rotation; HmdMatrix34_t matrix = GetMatrixFromPositionAndRotation(position, rotation); EVROverlayError err = NexHudEngine.OverlayManager.SetOverlayTransformTrackedDeviceRelative(_handle, index, ref matrix); if (err != EVROverlayError.None) { NexHudEngine.Log("Failed to attach " + Key + " to Device " + index + " failed: " + err.ToString()); } }
public static unsafe void SetOverlayParams(ulong overlayId, Matrix transform, bool followsHead, Vector2 surfaceSize) { Valve.VR.OpenVR.Overlay.SetOverlayWidthInMeters(overlayId, 1.0f); transform = Matrix.Scaling(new Vector3(surfaceSize.X, surfaceSize.Y, 1.0f)) * transform; if (followsHead) { HmdMatrix34_t pose = new HmdMatrix34_t(); Utilities.CopyMemory((IntPtr)Interop.Fixed(ref pose), (IntPtr)Interop.Fixed(ref transform), Utilities.SizeOf <HmdMatrix34_t>()); Valve.VR.OpenVR.Overlay.SetOverlayTransformTrackedDeviceRelative(overlayId, 0, ref pose); } else { HmdMatrix34_t pose = new HmdMatrix34_t(); Utilities.CopyMemory((IntPtr)Interop.Fixed(ref pose), (IntPtr)Interop.Fixed(ref transform), Utilities.SizeOf <HmdMatrix34_t>()); Valve.VR.OpenVR.Overlay.SetOverlayTransformAbsolute(overlayId, ETrackingUniverseOrigin.TrackingUniverseSeated, ref pose); } }
public RigidTransform(HmdMatrix34_t pose) { Matrix4x4 identity = Matrix4x4.identity; identity.m00 = pose.m0; identity.m01 = pose.m1; identity.m02 = -pose.m2; identity.m03 = pose.m3; identity.m10 = pose.m4; identity.m11 = pose.m5; identity.m12 = -pose.m6; identity.m13 = pose.m7; identity.m20 = -pose.m8; identity.m21 = -pose.m9; identity.m22 = pose.m10; identity.m23 = -pose.m11; this.pos = identity.GetPosition(); this.rot = identity.GetRotation(); }
/// <summary> /// Set the overlay transform in SteamVR /// </summary> /// <param name="newPos">The new position</param> /// <param name="newRot">The new rotation</param> /// <param name="setTarget">Should this be the new target position, or is this temporary?</param> public void SetTransform(Vector3 newPos, Vector3 newRot, bool setTarget = true) { pos = newPos; rot = newRot; if (setTarget) { targetPos = newPos; targetRot = newRot; } matrixConverter.pos = newPos; matrixConverter.rot = Quaternion.Euler(newRot); _overlayTransform = matrixConverter.ToHmdMatrix34(); _overlayTransformAbsoluteTrackingOrigin = SteamVRManager.trackingSpace; if (!CheckValid()) { return; } switch (_overlayTransformType) { default: case VROverlayTransformType.VROverlayTransform_Absolute: error = OpenVR.Overlay.SetOverlayTransformAbsolute(handle, _overlayTransformAbsoluteTrackingOrigin, ref _overlayTransform); break; case VROverlayTransformType.VROverlayTransform_TrackedDeviceRelative: error = OpenVR.Overlay.SetOverlayTransformTrackedDeviceRelative(handle, _overlayTransformTrackedDeviceRelativeIndex, ref _overlayTransform); break; } }
// Calculate the current HMD-Translations Value public HmdVector3_t GetRotationEuler(HmdMatrix34_t matrix) { HmdVector3_t v = new HmdVector3_t(); float r2d = 180 / (float)Math.PI; if (matrix.m0 == 1 || matrix.m0 == -1) { v.v0 = -((-((float)Math.Atan2(matrix.m2, matrix.m11)) * r2d)); v.v1 = 0; v.v2 = 0; } else { v.v0 = -(((-(float)Math.Atan2(matrix.m8, matrix.m0)) * r2d)); v.v1 = -(((float)Math.Atan2(matrix.m6, matrix.m5)) * r2d); v.v2 = (((float)Math.Asin(matrix.m4)) * r2d); } return(v); }
private static void PoseToTransMatrixD(ref MatrixD mat, ref HmdMatrix34_t pose) { mat.M11 = pose.m[0]; mat.M12 = pose.m[4]; mat.M13 = pose.m[8]; mat.M21 = pose.m[1]; mat.M22 = pose.m[5]; mat.M23 = pose.m[9]; mat.M31 = pose.m[2]; mat.M32 = pose.m[6]; mat.M33 = pose.m[10]; mat.M41 = pose.m[3]; mat.M42 = pose.m[7] - FloorOffset; mat.M43 = pose.m[11]; mat.M44 = 1; }
void OnEnable() { #if UNITY_2018 RenderPipeline.beginCameraRendering += RenderPipeline_beginCameraRendering; #else RenderPipelineManager.beginCameraRendering += RenderPipelineManager_beginCameraRendering; #endif bool isMultipass = (XRSettings.stereoRenderingMode == XRSettings.StereoRenderingMode.MultiPass); m_Camera = GetComponent <Camera>(); HmdMatrix34_t eyeToHeadL = SteamVR.instance.hmd.GetEyeToHeadTransform(EVREye.Eye_Left); if (eyeToHeadL.m0 < 1) //m0 = 1 for parallel projections { isCantedFov = true; float l_left = 0.0f, l_right = 0.0f, l_top = 0.0f, l_bottom = 0.0f; SteamVR.instance.hmd.GetProjectionRaw(EVREye.Eye_Left, ref l_left, ref l_right, ref l_top, ref l_bottom); float eyeYawAngle = Mathf.Acos(eyeToHeadL.m0); //since there are no x or z rotations, this is y only. 10 deg on Pimax if (isMultipass) { eyeYawAngle *= 2; //for multipass left eye frustum is used twice? causing right eye to end up 20 deg short } float eyeHalfFov = Mathf.Atan(SteamVR.instance.tanHalfFov.x); float tanCorrectedEyeHalfFovH = Mathf.Tan(eyeYawAngle + eyeHalfFov); //increase horizontal fov by the eye rotation angles projectionMatrix.m00 = 1 / tanCorrectedEyeHalfFovH; //m00 = 0.1737 for Pimax //because of canting, vertical fov increases towards the corners. calculate the new maximum fov otherwise culling happens too early at corners float eyeFovLeft = Mathf.Atan(-l_left); float tanCorrectedEyeHalfFovV = SteamVR.instance.tanHalfFov.y * Mathf.Cos(eyeFovLeft) / Mathf.Cos(eyeFovLeft + eyeYawAngle); projectionMatrix.m11 = 1 / tanCorrectedEyeHalfFovV; //m11 = 0.3969 for Pimax //set the near and far clip planes projectionMatrix.m22 = -(m_Camera.farClipPlane + m_Camera.nearClipPlane) / (m_Camera.farClipPlane - m_Camera.nearClipPlane); projectionMatrix.m23 = -2 * m_Camera.farClipPlane * m_Camera.nearClipPlane / (m_Camera.farClipPlane - m_Camera.nearClipPlane); projectionMatrix.m32 = -1; } else { isCantedFov = false; } }
public static HmdMatrix34_t OpenTKMatrixToOpenVRMatrix(Matrix3x4 matrix) { HmdMatrix34_t newmatrix = new HmdMatrix34_t(); newmatrix.m0 = matrix.M11; newmatrix.m1 = matrix.M12; newmatrix.m2 = matrix.M13; newmatrix.m3 = matrix.M14; newmatrix.m4 = matrix.M21; newmatrix.m5 = matrix.M22; newmatrix.m6 = matrix.M23; newmatrix.m7 = matrix.M24; newmatrix.m8 = matrix.M31; newmatrix.m9 = matrix.M32; newmatrix.m10 = matrix.M33; newmatrix.m11 = matrix.M34; return(newmatrix); }
private void SetRoomMatrixButton(object sender, RoutedEventArgs e) { HmdMatrix34_t m = new HmdMatrix34_t(); OpenVR.ChaperoneSetup.GetWorkingStandingZeroPoseToRawTrackingPose(ref m); if (SetRoomMatrixTemporaryCheckBox.IsChecked.Value) { osc.Send(new OscMessage("/VMT/SetRoomMatrix/Temporary", m.m0, m.m1, m.m2, m.m3, m.m4, m.m5, m.m6, m.m7, m.m8, m.m9, m.m10, m.m11)); } else { osc.Send(new OscMessage("/VMT/SetRoomMatrix", m.m0, m.m1, m.m2, m.m3, m.m4, m.m5, m.m6, m.m7, m.m8, m.m9, m.m10, m.m11)); } }
public static Matrix4 OpenVRMatrixToOpenTKMatrix4(HmdMatrix34_t matrix) { var newmatrix = new Matrix4(); newmatrix[0, 0] = matrix.m0; newmatrix[1, 0] = matrix.m1; newmatrix[2, 0] = -matrix.m2; newmatrix[3, 0] = matrix.m3; newmatrix[0, 1] = matrix.m4; newmatrix[1, 1] = matrix.m5; newmatrix[2, 1] = -matrix.m6; newmatrix[3, 1] = matrix.m7; newmatrix[0, 2] = -matrix.m8; newmatrix[1, 2] = -matrix.m9; newmatrix[2, 2] = matrix.m10; newmatrix[3, 2] = -matrix.m11; return(newmatrix); }
// Borrowed from https://github.com/Marlamin/VROverlayTest/blob/master/VROverlayTest/Program.cs public static Matrix3x4 OpenVRMatrixToOpenTKMatrix(HmdMatrix34_t matrix) { var newmatrix = new Matrix3x4(); newmatrix.M11 = matrix.m0; newmatrix.M12 = matrix.m1; newmatrix.M13 = matrix.m2; newmatrix.M14 = matrix.m3; newmatrix.M21 = matrix.m4; newmatrix.M22 = matrix.m5; newmatrix.M23 = matrix.m6; newmatrix.M24 = matrix.m7; newmatrix.M31 = matrix.m8; newmatrix.M32 = matrix.m9; newmatrix.M33 = matrix.m10; newmatrix.M34 = matrix.m11; return(newmatrix); }
public TrackedCamera() { Info.print("Tracked Camera Firmware: {0}", VR.getTrackedDeviceString(ETrackedDeviceProperty.Prop_CameraFirmwareDescription_String)); if (OpenVR.TrackedCamera.GetCameraFrameSize(OpenVR.k_unTrackedDeviceIndex_Hmd, myFrameType, ref myFrameWidth, ref myFrameHeight, ref myFrameBufferSize) != EVRTrackedCameraError.None) { Warn.print("GetCameraFrameSize error"); } myFrameBuffer = new byte[myFrameBufferSize]; myFrameFlipBuffer = new byte[myFrameBufferSize]; myTexture = new Texture((int)myFrameWidth, (int)myFrameHeight); myTexture.setWrapping(TextureWrapMode.ClampToBorder, TextureWrapMode.ClampToBorder); myTexture.setMinMagFilters(TextureMinFilter.Linear, TextureMagFilter.Linear); var err = ETrackedPropertyError.TrackedProp_Success; HmdMatrix34_t mat = VR.vrSystem.GetMatrix34TrackedDeviceProperty(OpenVR.k_unTrackedDeviceIndex_Hmd, ETrackedDeviceProperty.Prop_CameraToHeadTransform_Matrix34, ref err); myHeadToCameraMatrix = VR.convertToMatrix4(mat); }
public static void ToMatrix4(ref HmdMatrix34_t matrix, out Matrix4 result) { result = new Matrix4( matrix.m0, matrix.m4, matrix.m8, 0, matrix.m1, matrix.m5, matrix.m9, 0, matrix.m2, matrix.m6, matrix.m10, 0, matrix.m3, matrix.m7, matrix.m11, 1 ); }
/// <summary> /// Converts <see cref="HmdMatrix34_t"/> to <see cref="Matrix"/> /// </summary> /// <param name="ovrMatrix34f">The input.</param> /// <param name="matrix">The output.</param> public static void ToMatrix(this HmdMatrix34_t ovrMatrix34f, out Matrix matrix) { // NOTE: automatic conversion from RH to LH matrix.M11 = ovrMatrix34f.m0; matrix.M12 = ovrMatrix34f.m4; matrix.M13 = -ovrMatrix34f.m8; matrix.M14 = 0f; matrix.M21 = ovrMatrix34f.m1; matrix.M22 = ovrMatrix34f.m5; matrix.M23 = -ovrMatrix34f.m9; matrix.M24 = 0f; matrix.M31 = -ovrMatrix34f.m2; matrix.M32 = -ovrMatrix34f.m6; matrix.M33 = ovrMatrix34f.m10; matrix.M34 = 0f; matrix.M41 = ovrMatrix34f.m3; matrix.M42 = ovrMatrix34f.m7; matrix.M43 = -ovrMatrix34f.m11; matrix.M44 = 1f; }
//Automatically set floor height based on controller coordinatess void AddOffsetToUniverseCenter(float offset, ref HmdMatrix34_t referenceMatrix) { if (offset != 0f) { HmdMatrix34_t currentMatrix = new HmdMatrix34_t(); CVRChaperoneSetup setup = OpenVR.ChaperoneSetup; setup.RevertWorkingCopy(); setup.GetWorkingStandingZeroPoseToRawTrackingPose(ref currentMatrix); currentMatrix.m3 += currentMatrix.m1 * offset; currentMatrix.m7 += currentMatrix.m5 * offset; currentMatrix.m11 += currentMatrix.m9 * offset; setup.SetWorkingStandingZeroPoseToRawTrackingPose(ref currentMatrix); referenceMatrix = currentMatrix; setup.CommitWorkingCopy(EChaperoneConfigFile.Live); universePitchRollVariance = GetUniverseOffset(); ResetAttributes(); FloorFixed.Trigger(); } }
private static Matrix4x4 ToMatrix4x4(HmdMatrix34_t pose) { var m = Matrix4x4.identity; m[0, 0] = pose.m0; m[0, 1] = pose.m1; m[0, 2] = -pose.m2; m[0, 3] = pose.m3; m[1, 0] = pose.m4; m[1, 1] = pose.m5; m[1, 2] = -pose.m6; m[1, 3] = pose.m7; m[2, 0] = -pose.m8; m[2, 1] = -pose.m9; m[2, 2] = pose.m10; m[2, 3] = -pose.m11; return(m); }
public static HmdMatrix34_t ConvertMatrixUnityToOpenVR(Matrix4x4 m) { HmdMatrix34_t openvrMat = new HmdMatrix34_t(); openvrMat.m0 = m[0, 0]; openvrMat.m1 = m[0, 1]; openvrMat.m2 = m[0, 2]; openvrMat.m3 = m[0, 3]; openvrMat.m4 = m[1, 0]; openvrMat.m5 = m[1, 1]; openvrMat.m6 = m[1, 2]; openvrMat.m7 = m[1, 3]; openvrMat.m8 = m[2, 0]; openvrMat.m9 = m[2, 1]; openvrMat.m10 = m[2, 2]; openvrMat.m11 = m[2, 3]; return(openvrMat); }
public HmdMatrix34_t ToHmdMatrix34() { var m = Matrix4x4.TRS(pos, rot, Vector3.one); var pose = new HmdMatrix34_t(); pose.m0 = m[0, 0]; pose.m1 = m[0, 1]; pose.m2 = -m[0, 2]; pose.m3 = m[0, 3]; pose.m4 = m[1, 0]; pose.m5 = m[1, 1]; pose.m6 = -m[1, 2]; pose.m7 = m[1, 3]; pose.m8 = -m[2, 0]; pose.m9 = -m[2, 1]; pose.m10 = m[2, 2]; pose.m11 = -m[2, 3]; return(pose); }
void ShowNotification(string msg) { Bitmap bitmap = new Bitmap(2160, 1440); Graphics g = Graphics.FromImage(bitmap); Font textFont = new Font("Microsoft JhengHei", m_nFontSize); SizeF textSize = g.MeasureString(msg, textFont); g.SmoothingMode = SmoothingMode.AntiAlias; g.InterpolationMode = InterpolationMode.HighQualityBicubic; g.PixelOffsetMode = PixelOffsetMode.HighQuality; Rectangle rect = new Rectangle(0, 0, (int)textSize.Width + 20, (int)textSize.Height + 20); g.FillRectangle(m_BackgroundBrush, rect); RectangleF rectf = new RectangleF(10, 10, textSize.Width, textSize.Height); g.DrawString(msg, textFont, m_TextBrush, rectf); g.Flush(); Texture glTexture = new Texture(bitmap); Gl.BindTexture(glTexture); Texture_t texture = new Texture_t(); texture.eType = EGraphicsAPIConvention.API_OpenGL; texture.eColorSpace = EColorSpace.Auto; texture.handle = (IntPtr)glTexture.TextureID; EVROverlayError error = OpenVR.Overlay.ClearOverlayTexture(m_ulOverlayHandle); HmdMatrix34_t matrix34 = CreateMatrix34(txtPos.Text); OpenVR.Overlay.SetOverlayTransformTrackedDeviceRelative(m_ulOverlayHandle, OpenVR.k_unTrackedDeviceIndex_Hmd, ref matrix34); error = OpenVR.Overlay.SetOverlayTexture(m_ulOverlayHandle, ref texture); OpenVR.Overlay.ShowOverlay(m_ulOverlayHandle); //ClearAllTimeout(); //SetTimeout(() => { // OpenVR.Overlay.HideOverlay(m_ulOverlayHandle); //}, m_nNotifyTime); }
public void GetPositionAndRotation(HmdMatrix34_t rawMatrix, out Vector3 position, out Quaternion rotation) { position = new Vector3(rawMatrix.m3, rawMatrix.m7, -rawMatrix.m11); if (IsRotationValid(rawMatrix)) { float w = Mathf.Sqrt(Mathf.Max(0, 1 + rawMatrix.m0 + rawMatrix.m5 + rawMatrix.m10)) / 2; float x = Mathf.Sqrt(Mathf.Max(0, 1 + rawMatrix.m0 - rawMatrix.m5 - rawMatrix.m10)) / 2; float y = Mathf.Sqrt(Mathf.Max(0, 1 - rawMatrix.m0 + rawMatrix.m5 - rawMatrix.m10)) / 2; float z = Mathf.Sqrt(Mathf.Max(0, 1 - rawMatrix.m0 - rawMatrix.m5 + rawMatrix.m10)) / 2; CopySign(ref x, rawMatrix.m6 - rawMatrix.m9); CopySign(ref y, rawMatrix.m8 - rawMatrix.m2); CopySign(ref z, rawMatrix.m4 - rawMatrix.m1); rotation = new Quaternion(x, y, z, w); } else { rotation = Quaternion.identity; } }
void RemoveErrorOffset(TrackedDevicePose_t[] poses) { Debug.Log("Hold controllers in front of you. Removing error offset ...."); HmdMatrix34_t controllerPos = poses [GetReferenceControllerIndex(poses)].mDeviceToAbsoluteTracking; float alpha = controllerPos.m7 - offsetToFloor; float error = 0f; float height = 0f; if (alpha > 0f) { error = alpha - controllerPosOnFloor; height = GetHeight() + error; } else if (alpha < 0f) { error = controllerPosOnFloor - alpha; height = GetHeight() - error; } FixFloor(height); }
public RigidTransform(HmdMatrix34_t pose) { var m = Matrix4x4.identity; m[0, 0] = pose.m0; m[0, 1] = pose.m1; m[0, 2] = -pose.m2; m[0, 3] = pose.m3; m[1, 0] = pose.m4; m[1, 1] = pose.m5; m[1, 2] = -pose.m6; m[1, 3] = pose.m7; m[2, 0] = -pose.m8; m[2, 1] = -pose.m9; m[2, 2] = pose.m10; m[2, 3] = -pose.m11; this.pos = m.GetPosition(); this.rot = m.GetRotation(); }
// from SteamVr public static void RigidTransform(HmdMatrix34_t pose, ref Vector3 Position, ref Quaternion Rotation) { var m = Matrix4x4.identity; m[0, 0] = pose.m0; m[0, 1] = pose.m1; m[0, 2] = -pose.m2; m[0, 3] = pose.m3; m[1, 0] = pose.m4; m[1, 1] = pose.m5; m[1, 2] = -pose.m6; m[1, 3] = pose.m7; m[2, 0] = -pose.m8; m[2, 1] = -pose.m9; m[2, 2] = pose.m10; m[2, 3] = -pose.m11; Position = GetPosition(m); Rotation = GetRotation(m); }
public abstract EVROverlayError GetTransformForOverlayCoordinates(ulong ulOverlayHandle,ETrackingUniverseOrigin eTrackingOrigin,HmdVector2_t coordinatesInOverlay,ref HmdMatrix34_t pmatTransform);
public override EVROverlayError GetTransformForOverlayCoordinates(ulong ulOverlayHandle,ETrackingUniverseOrigin eTrackingOrigin,HmdVector2_t coordinatesInOverlay,ref HmdMatrix34_t pmatTransform) { CheckIfUsable(); EVROverlayError result = VRNativeEntrypoints.VR_IVROverlay_GetTransformForOverlayCoordinates(m_pVROverlay,ulOverlayHandle,eTrackingOrigin,coordinatesInOverlay,ref pmatTransform); return result; }
public abstract void SetWorkingStandingZeroPoseToRawTrackingPose(ref HmdMatrix34_t pMatStandingZeroPoseToRawTrackingPose);
public abstract VROverlayError SetOverlayTransformTrackedDeviceRelative(ulong ulOverlayHandle,uint unTrackedDevice,ref HmdMatrix34_t pmatTrackedDeviceToOverlayTransform);
public abstract VROverlayError SetOverlayTransformAbsolute(ulong ulOverlayHandle,TrackingUniverseOrigin eTrackingOrigin,ref HmdMatrix34_t pmatTrackingOriginToOverlayTransform);
public abstract bool GetLiveSeatedZeroPoseToRawTrackingPose(ref HmdMatrix34_t pmatSeatedZeroPoseToRawTrackingPose);
public abstract void SetKeyboardTransformAbsolute(ETrackingUniverseOrigin eTrackingOrigin,ref HmdMatrix34_t pmatTrackingOriginToKeyboardTransform);
/** Passes back the pose matrix from the last successful call to GetTrackerFromHeadPose(). Returns true if that matrix is * valid (because there has been a previous successful pose.) */ public bool GetLastTrackerFromHeadPose(ref HmdMatrix34_t pmPose) { return SteamVR.GetLastTrackerFromHeadPose(m_pNativeObject, ref pmPose); }
public static extern bool GetLastTrackerFromHeadPose(System.IntPtr pHmd, ref HmdMatrix34_t pmPose);
public override bool GetWorkingTagPoses(out HmdMatrix34_t [] pTagPosesBuffer) { CheckIfUsable(); uint punTagPosesCount = 0; bool result = VRNativeEntrypoints.VR_IVRChaperoneSetup_GetWorkingTagPoses(m_pVRChaperoneSetup,null,ref punTagPosesCount); pTagPosesBuffer= new HmdMatrix34_t[punTagPosesCount]; result = VRNativeEntrypoints.VR_IVRChaperoneSetup_GetWorkingTagPoses(m_pVRChaperoneSetup,pTagPosesBuffer,ref punTagPosesCount); return result; }
public override bool GetWorkingTagPoseByName(string pchTagName,ref HmdMatrix34_t pmatTagPose,ref float pflScale) { CheckIfUsable(); pflScale = 0; bool result = VRNativeEntrypoints.VR_IVRChaperoneSetup_GetWorkingTagPoseByName(m_pVRChaperoneSetup,pchTagName,ref pmatTagPose,ref pflScale); return result; }
public override bool GetWorkingStandingZeroPoseToRawTrackingPose(ref HmdMatrix34_t pmatStandingZeroPoseToRawTrackingPose) { CheckIfUsable(); bool result = VRNativeEntrypoints.VR_IVRChaperoneSetup_GetWorkingStandingZeroPoseToRawTrackingPose(m_pVRChaperoneSetup,ref pmatStandingZeroPoseToRawTrackingPose); return result; }
public override void SetWorkingStandingZeroPoseToRawTrackingPose(ref HmdMatrix34_t pMatStandingZeroPoseToRawTrackingPose) { CheckIfUsable(); VRNativeEntrypoints.VR_IVRChaperoneSetup_SetWorkingStandingZeroPoseToRawTrackingPose(m_pVRChaperoneSetup,ref pMatStandingZeroPoseToRawTrackingPose); }
public abstract void ApplyTransform(ref TrackedDevicePose_t pOutputPose,ref TrackedDevicePose_t pTrackedDevicePose,ref HmdMatrix34_t pTransform);
public override void SetKeyboardTransformAbsolute(ETrackingUniverseOrigin eTrackingOrigin,ref HmdMatrix34_t pmatTrackingOriginToKeyboardTransform) { CheckIfUsable(); VRNativeEntrypoints.VR_IVROverlay_SetKeyboardTransformAbsolute(m_pVROverlay,eTrackingOrigin,ref pmatTrackingOriginToKeyboardTransform); }
public abstract bool GetWorkingTagPoseByName(string pchTagName,ref HmdMatrix34_t pmatTagPose,ref float pflScale);
public abstract bool GetWorkingTagPoses(out HmdMatrix34_t [] pTagPosesBuffer);
public override VROverlayError SetOverlayTransformAbsolute(ulong ulOverlayHandle,TrackingUniverseOrigin eTrackingOrigin,ref HmdMatrix34_t pmatTrackingOriginToOverlayTransform) { CheckIfUsable(); VROverlayError result = VRNativeEntrypoints.VR_IVROverlay_SetOverlayTransformAbsolute(m_pVROverlay,ulOverlayHandle,eTrackingOrigin,ref pmatTrackingOriginToOverlayTransform); return result; }
public override void ApplyTransform(ref TrackedDevicePose_t pOutputPose,ref TrackedDevicePose_t pTrackedDevicePose,ref HmdMatrix34_t pTransform) { CheckIfUsable(); VRNativeEntrypoints.VR_IVRSystem_ApplyTransform(m_pVRSystem,ref pOutputPose,ref pTrackedDevicePose,ref pTransform); }
public static extern bool GetTrackerFromHeadPose(System.IntPtr pHmd, float fPredictedSecondsFromNow, ref HmdMatrix34_t pmPose, ref HmdTrackingResult peResult);
public override VROverlayError SetOverlayTransformTrackedDeviceRelative(ulong ulOverlayHandle,uint unTrackedDevice,ref HmdMatrix34_t pmatTrackedDeviceToOverlayTransform) { CheckIfUsable(); VROverlayError result = VRNativeEntrypoints.VR_IVROverlay_SetOverlayTransformTrackedDeviceRelative(m_pVROverlay,ulOverlayHandle,unTrackedDevice,ref pmatTrackedDeviceToOverlayTransform); return result; }
// ------------------------------------ // Tracking Methods // ------------------------------------ /** The pose that the tracker thinks that the HMD will be in at the specified * number of seconds into the future. Pass 0 to get the current state. * * This is roughly analogous to the inverse of the view matrix in most applications, though * many games will need to do some additional rotation or translation on top of the rotation * and translation provided by the head pose. * * If this function returns true the pose has been populated with a pose that can be used by the application. * Check peResult for details about the pose, including messages that should be displayed to the user. */ public bool GetTrackerFromHeadPose(float fPredictedSecondsFromNow, ref HmdMatrix34_t pmPose, ref HmdTrackingResult peResult) { return SteamVR.GetTrackerFromHeadPose(m_pNativeObject, fPredictedSecondsFromNow, ref pmPose, ref peResult); }
public abstract bool GetWorkingStandingZeroPoseToRawTrackingPose(ref HmdMatrix34_t pmatStandingZeroPoseToRawTrackingPose);