private void PrepareFrameForEye(EVREye eye) { if (Time.frameCount % 2 == 0) { m_skipLeftEye = true; } else { m_skipLeftEye = false; } DoUglyCameraHack(); FixHeadAttachedFlashlightPos(); m_fpsCamera.m_cullingCamera.RunVisibilityOnPreCull(); m_fpsCamera.m_preRenderCmds.Clear(); m_fpsCamera.m_beforeForwardAlpahCmds.Clear(); if (VRConfig.configAlternateEyeRendering.Value) { if (m_skipLeftEye && eye == EVREye.Eye_Left) { return; } if (!m_skipLeftEye && eye == EVREye.Eye_Right) { return; } } PrepareFrame(); }
private void RenderEye(SteamVR vr, EVREye eye) { SteamVR_Render.eye = eye; if (this.cameraMask != null) { this.cameraMask.Set(vr, eye); } foreach (SteamVR_Camera steamVR_Camera in this.cameras) { steamVR_Camera.transform.localPosition = vr.eyes[(int)eye].pos; steamVR_Camera.transform.localRotation = vr.eyes[(int)eye].rot; this.cameraMask.transform.position = steamVR_Camera.transform.position; Camera camera = steamVR_Camera.camera; camera.targetTexture = SteamVR_Camera.GetSceneTexture(false); int cullingMask = camera.cullingMask; if (eye == EVREye.Eye_Left) { camera.cullingMask &= ~this.rightMask; camera.cullingMask |= this.leftMask; } else { camera.cullingMask &= ~this.leftMask; camera.cullingMask |= this.rightMask; } camera.Render(); camera.cullingMask = cullingMask; } }
/// <summary> /// Updates the game cameras to the correct position, according to the given HMD eye pose. /// </summary> /// <param name="eyePosition">Position of the HMD eye, in the device space coordinate system</param> /// <param name="eyeRotation">Rotation of the HMD eye, in the device space coordinate system</param> public void UpdateScene( EVREye eye, SteamVR_Utils.RigidTransform hmdTransform, SteamVR_Utils.RigidTransform hmdEyeTransform) { switch (HighLogic.LoadedScene) { case GameScenes.FLIGHT: if (CameraManager.Instance.currentCameraMode == CameraManager.CameraMode.IVA) { UpdateFlightIvaScene(eye, hmdTransform, hmdEyeTransform); } else if (FlightGlobals.ActiveVessel.isEVA) { UpdateFlightEvaScene(eye, hmdTransform, hmdEyeTransform); } break; case GameScenes.EDITOR: UpdateEditorScene(eye, hmdTransform, hmdEyeTransform); break; default: throw new Exception("Cannot setup VR scene, current scene \"" + HighLogic.LoadedScene + "\" is invalid."); } HmdPosition = CurrentPosition + CurrentRotation * hmdTransform.pos; HmdRotation = CurrentRotation * hmdTransform.rot; }
private void UpdateFlightScene( EVREye eye, SteamVR_Utils.RigidTransform hmdTransform, SteamVR_Utils.RigidTransform hmdEyeTransform) { // in flight, don't allow movement of the origin point CurrentPosition = InitialPosition; CurrentRotation = InitialRotation; // get position of your eyeball Vector3 positionToHmd = hmdTransform.pos; Vector3 positionToEye = hmdTransform.pos + hmdTransform.rot * hmdEyeTransform.pos; // translate device space to Unity space, with world scaling Vector3 updatedPosition = DevicePoseToWorld(positionToEye); Quaternion updatedRotation = DevicePoseToWorld(hmdTransform.rot); // in flight, update the internal and flight cameras InternalCamera.Instance.transform.position = updatedPosition; InternalCamera.Instance.transform.rotation = updatedRotation; FlightCamera.fetch.transform.position = InternalSpace.InternalToWorld(InternalCamera.Instance.transform.position); FlightCamera.fetch.transform.rotation = InternalSpace.InternalToWorld(InternalCamera.Instance.transform.rotation); // store the eyeball position HmdEyePosition[(int)eye] = updatedPosition; HmdEyeRotation[(int)eye] = updatedRotation; }
private void UpdateMainMenuScene( EVREye eye, SteamVR_Utils.RigidTransform hmdTransform, SteamVR_Utils.RigidTransform hmdEyeTransform) { // lock in the initial rotation CurrentRotation = InitialRotation; // position should be based on where we need to look at the main menu. need // to keep track of when the stage position changes CurrentPosition = Vector3.MoveTowards(CurrentPosition, mainMenuLogic.camPivots[mainMenuLogic.currentStage].targetPoint.position, 0.1f); // get position of your eyeball // Vector3 positionToHmd = hmdTransform.pos; Vector3 positionToEye = hmdTransform.pos + hmdTransform.rot * hmdEyeTransform.pos; // translate device space to Unity space, with world scaling Vector3 updatedPosition = DevicePoseToWorld(positionToEye); Quaternion updatedRotation = DevicePoseToWorld(hmdTransform.rot); // update the menu scene landscapeCamera.transform.position = updatedPosition; landscapeCamera.transform.rotation = updatedRotation; // update the sky cameras galaxyCamera.transform.rotation = updatedRotation; // store the eyeball position HmdEyePosition[(int)eye] = updatedPosition; HmdEyeRotation[(int)eye] = updatedRotation; // update the UI screen mainMenuUiScreen.transform.position = CurrentPosition + new Vector3(1f, 0f, 1f); }
/// <summary> /// Renders a set of cameras onto a RenderTexture, and submit the frame to the HMD. /// </summary> private void RenderHmdCameras( EVREye eye, SteamVR_Utils.RigidTransform hmdTransform, SteamVR_Utils.RigidTransform hmdEyeTransform, RenderTexture hmdEyeRenderTexture, Texture_t hmdEyeTexture) { /** * hmdEyeTransform is in a coordinate system that follows the headset, where * the origin is the headset device position. Therefore the eyes are at a constant * offset from the device. hmdEyeTransform does not change (per eye). * hmdEyeTransform.x+ towards the right of the headset * hmdEyeTransform.y+ towards the top the headset * hmdEyeTransform.z+ towards the front of the headset * * hmdTransform is in a coordinate system set in physical space, where the * origin is the initial seated position. Or for room-scale, the physical origin of the room. * hmdTransform.x+ towards the right * hmdTransform.y+ upwards * hmdTransform.z+ towards the front * * Scene.InitialPosition and Scene.InitialRotation are the Unity world coordinates where * we initialize the VR scene, i.e. the origin of a coordinate system that maps * 1-to-1 with physical space. * * 1. Calculate the position of the eye in the physical coordinate system. * 2. Transform the calculated position into Unity world coordinates, offset from * InitialPosition and InitialRotation. */ // position of the eye in the VR reference frame Vector3 positionToEye = hmdTransform.pos + hmdTransform.rot * hmdEyeTransform.pos; // update position of the cameras Scene.Instance.UpdateScene(eye, hmdTransform, hmdEyeTransform); // render the set of cameras for (int i = 0; i < Scene.Instance.NumVRCameras; i++) { Types.CameraData camData = Scene.Instance.VRCameras[i]; // set projection matrix camData.camera.projectionMatrix = (eye == EVREye.Eye_Left) ? camData.hmdProjectionMatrixL : camData.hmdProjectionMatrixR; // set texture to render to, then render camData.camera.targetTexture = hmdEyeRenderTexture; camData.camera.Render(); } hmdEyeTexture.handle = hmdEyeRenderTexture.GetNativeTexturePtr(); // Submit frames to HMD EVRCompositorError vrCompositorError = OpenVR.Compositor.Submit(eye, ref hmdEyeTexture, ref hmdTextureBounds, EVRSubmitFlags.Submit_Default); if (vrCompositorError != EVRCompositorError.None) { throw new Exception("Submit (" + eye + ") failed: (" + (int)vrCompositorError + ") " + vrCompositorError.ToString()); } }
public void ShouldGetMirrorTextureSuccessfully(EVREye eye) { var bitmap = compositor.GetMirrorImage(eye); bitmap.Height.Should().BeGreaterThan(0); bitmap.Width.Should().BeGreaterThan(0); }
public void Set(SteamVR vr, EVREye eye) { if (SteamVR_CameraMask.hiddenAreaMeshes[(int)eye] == null) { SteamVR_CameraMask.hiddenAreaMeshes[(int)eye] = SteamVR_Utils.CreateHiddenAreaMesh(vr.hmd.GetHiddenAreaMesh(eye), vr.textureBounds[(int)eye]); } this.meshFilter.mesh = SteamVR_CameraMask.hiddenAreaMeshes[(int)eye]; }
// Token: 0x06001ED8 RID: 7896 RVA: 0x0009C883 File Offset: 0x0009AA83 public void GetProjectionRaw(EVREye eEye, ref float pfLeft, ref float pfRight, ref float pfTop, ref float pfBottom) { pfLeft = 0f; pfRight = 0f; pfTop = 0f; pfBottom = 0f; this.FnTable.GetProjectionRaw(eEye, ref pfLeft, ref pfRight, ref pfTop, ref pfBottom); }
public void Set(SteamVR vr, EVREye eye) { if (SteamVR_CameraMask.hiddenAreaMeshes[(int)eye] == null) { SteamVR_CameraMask.hiddenAreaMeshes[(int)eye] = SteamVR_CameraMask.CreateHiddenAreaMesh(vr.hmd.GetHiddenAreaMesh(eye, EHiddenAreaMeshType.k_eHiddenAreaMesh_Standard), vr.textureBounds[(int)eye]); } this.meshFilter.mesh = SteamVR_CameraMask.hiddenAreaMeshes[(int)eye]; }
public void GetEyeOutputViewport(EVREye eEye, ref uint pnX, ref uint pnY, ref uint pnWidth, ref uint pnHeight) { pnX = 0u; pnY = 0u; pnWidth = 0u; pnHeight = 0u; this.FnTable.GetEyeOutputViewport(eEye, ref pnX, ref pnY, ref pnWidth, ref pnHeight); }
public Vector3 GetEyeTransform(EVREye eye) { if (!SysExists) { return(Vector3.zero); } return((new OVR_Utils.RigidTransform(VRSystem.GetEyeToHeadTransform(eye))).pos); }
/// <summary> /// <inheritdoc/><br/><br/> /// /// <strong>Warning:</strong> this is a pretty slow method. /// It's fine to use this for one-off captures, but if you require /// something like a constant stream of the headset view, I recommend /// digging into a lower-level implementation. /// </summary> /// <inheritdoc/> public Bitmap GetMirrorImage(EVREye eye = EVREye.Eye_Left) { var srvPtr = IntPtr.Zero; var result = OpenVR.Compositor.GetMirrorTextureD3D11(eye, device.NativePointer, ref srvPtr); if (result != EVRCompositorError.None) { throw new OpenVRSystemException <EVRCompositorError>("Failed to get mirror texture from OpenVR", result); } var srv = new ShaderResourceView(srvPtr); var tex = srv.Resource.QueryInterface <Texture2D>(); var texDesc = tex.Description; var bitmap = new Bitmap(texDesc.Width, texDesc.Height); var boundsRect = new Rectangle(0, 0, texDesc.Width, texDesc.Height); using (var cpuTex = new Texture2D(device, new Texture2DDescription { CpuAccessFlags = CpuAccessFlags.Read, BindFlags = BindFlags.None, Format = texDesc.Format, Width = texDesc.Width, Height = texDesc.Height, OptionFlags = ResourceOptionFlags.None, MipLevels = 1, ArraySize = 1, SampleDescription = { Count = 1, Quality = 0 }, Usage = ResourceUsage.Staging })) { // Copy texture to RAM so CPU can read from it device.ImmediateContext.CopyResource(tex, cpuTex); OpenVR.Compositor.ReleaseMirrorTextureD3D11(srvPtr); var mapSource = device.ImmediateContext.MapSubresource(cpuTex, 0, MapMode.Read, MapFlags.None); var mapDest = bitmap.LockBits(boundsRect, ImageLockMode.WriteOnly, bitmap.PixelFormat); var sourcePtr = mapSource.DataPointer; var destPtr = mapDest.Scan0; for (int y = 0; y < texDesc.Height; y++) { Utilities.CopyMemory(destPtr, sourcePtr, texDesc.Width * 4); sourcePtr = IntPtr.Add(sourcePtr, mapSource.RowPitch); destPtr = IntPtr.Add(destPtr, mapDest.Stride); } bitmap.UnlockBits(mapDest); device.ImmediateContext.UnmapSubresource(cpuTex, 0); } FlipChannels(ref bitmap); return(bitmap); }
public void SetHiddenAreaMeshXform(Transform hidden_area_mesh_xform, EVREye eye) { if (eye == EVREye.Eye_Left) { m_hidden_area_mesh_left_xform = hidden_area_mesh_xform; } else { m_hidden_area_mesh_right_xform = hidden_area_mesh_xform; } }
private Matrix4 CurrentViewProjMatrix(EVREye eye) { switch (eye) { case EVREye.Eye_Left: return(hmdViewMatrix * leftEyeView * leftEyeProj); case EVREye.Eye_Right: return(hmdViewMatrix * rightEyeView * rightEyeProj); default: throw new ArgumentOutOfRangeException(nameof(eye)); } }
private Matrix GetProjection(EVREye eye) { // Dunno why _vrSystem.GetProjectionMatrix doesn't work. Maybe different semantics of the matrix? float left = 0, right = 0, top = 0, bottom = 0; var near = 0.5f; var far = 20.0f; _vrSystem.GetProjectionRaw(eye, ref left, ref right, ref top, ref bottom); left *= near; right *= near; top *= near; bottom *= near; return(Matrix.CreatePerspectiveOffCenter(left, right, bottom, top, near, far)); }
private Matrix GetViewMatrix(EVREye eye) { var hmdPose = poses[OpenVR.k_unTrackedDeviceIndex_Hmd]; var hmdTransform = hmdPose.mDeviceToAbsoluteTracking.Convert(); hmdTransform.Invert(); Matrix eyeTransform = OpenVR.System.GetEyeToHeadTransform(eye).Convert(); eyeTransform.Invert(); Matrix viewTransform = hmdTransform * eyeTransform; return(viewTransform); }
private void SubmitTexture(CVRCompositor compositor, GPUTexture colorTex, EVREye eye) { Texture_t texT; var renderer = GPUDevice.RendererType; if (renderer == RendererType.DirectX10 || renderer == RendererType.DirectX10_1 || renderer == RendererType.DirectX11) { texT.handle = colorTex.NativePtr; texT.eColorSpace = EColorSpace.Gamma; texT.eType = ETextureType.DirectX; } else { throw new Exception($"Renderer '{renderer}' is not yet supported"); } /*if (rt == RendererType.DirectX12) * { * texT.handle = colorTex.NativePtr; * texT.eColorSpace = EColorSpace.Gamma; * texT.eType = ETextureType.DirectX12; * } * * if(rt == RendererType.Vulkan) * { * texT.handle = colorTex.NativePtr; * texT.eColorSpace = EColorSpace.Gamma; * texT.eType = ETextureType.Vulkan; * }*/ VRTextureBounds_t boundsT; boundsT.uMin = 0; boundsT.uMax = 1; boundsT.vMin = 0; boundsT.vMax = 1; EVRCompositorError compositorError = EVRCompositorError.None; compositorError = compositor.Submit(eye, ref texT, ref boundsT, EVRSubmitFlags.Submit_Default); if (compositorError != EVRCompositorError.None) { throw new Exception($"Failed to submit to the OpenVR Compositor: {compositorError}"); } }
public void SetHiddenAreaMesh(Mesh hidden_area_mesh, EVREye eye) { if (eye == EVREye.Eye_Left) { m_hidden_area_mesh_left = hidden_area_mesh; } else { m_hidden_area_mesh_right = hidden_area_mesh; } if (m_hidden_area_mesh_left != null && m_hidden_area_mesh_right != null) { m_draw_hidden_area_mesh = true; } }
void RenderEye(SteamVR vr, EVREye eye) { eyePreRenderCallback?.Invoke(eye); int i = (int)eye; SteamVR_Render.eye = eye; if (cameraMask != null) { cameraMask.Set(vr, eye); } foreach (var c in cameras) { c.transform.localPosition = vr.eyes[i].pos; c.transform.localRotation = vr.eyes[i].rot; // Update position to keep from getting culled cameraMask.transform.position = c.transform.position; var camera = c.camera; camera.targetTexture = SteamVR_Camera.GetSceneTexture(camera.allowHDR); int cullingMask = camera.cullingMask; if (eye == EVREye.Eye_Left) { camera.cullingMask &= ~rightMask; camera.cullingMask |= leftMask; } else { camera.cullingMask &= ~leftMask; camera.cullingMask |= rightMask; } camera.Render(); if (SteamVR_Camera.doomp) { Debug.Log(Time.frameCount.ToString() + $"/Render{eye}_OnRenderImage_src.png"); SteamVR_Camera.DumpRenderTexture(camera.targetTexture, Application.streamingAssetsPath + $"/Render{eye}_OnRenderImage_src.png"); } camera.cullingMask = cullingMask; } eyePostRenderCallback?.Invoke(eye); }
void RenderEye(SteamVR vr, EVREye eye) { try { int i = (int)eye; SteamVR_Render.eye = eye; if (cameraMask != null) { cameraMask.Set(vr, eye); } foreach (var c in cameras) { c.transform.localPosition = vr.eyes[i].pos; c.transform.localRotation = vr.eyes[i].rot; // Update position to keep from getting culled cameraMask.transform.position = c.transform.position; var camera = c.GetComponent <Camera>(); camera.targetTexture = SteamVR_Camera.GetSceneTexture(camera.hdr); int cullingMask = camera.cullingMask; if (eye == EVREye.Eye_Left) { camera.cullingMask &= ~rightMask; camera.cullingMask |= leftMask; } else { camera.cullingMask &= ~leftMask; camera.cullingMask |= rightMask; } camera.Render(); camera.cullingMask = cullingMask; } } catch (Exception e) { Console.WriteLine(e); } }
private void UpdateEditorScene( EVREye eye, SteamVR_Utils.RigidTransform hmdTransform, SteamVR_Utils.RigidTransform hmdEyeTransform) { // get position of your eyeball Vector3 positionToHmd = hmdTransform.pos; Vector3 positionToEye = hmdTransform.pos + hmdTransform.rot * hmdEyeTransform.pos; // translate device space to Unity space, with world scaling Vector3 updatedPosition = DevicePoseToWorld(positionToEye); Quaternion updatedRotation = DevicePoseToWorld(hmdTransform.rot); // update the editor camera position EditorCamera.Instance.transform.position = updatedPosition; EditorCamera.Instance.transform.rotation = updatedRotation; // store the eyeball position HmdEyePosition[(int)eye] = updatedPosition; HmdEyeRotation[(int)eye] = updatedRotation; }
private void SubmitEye(EVREye eye, IPreparedFrame preparedFrame) { immediateContext.WithEvent($"VRApp::SubmitEye({eye})", () => { HiddenAreaMesh hiddenAreaMesh = hiddenAreaMeshes.GetMesh(eye); Matrix viewMatrix = GetViewMatrix(eye); Matrix projectionMatrix = GetProjectionMatrix(eye); var resultTexture = RenderView(preparedFrame, hiddenAreaMesh, viewMatrix, projectionMatrix); VRTextureBounds_t bounds; bounds.uMin = 0; bounds.uMax = 1; bounds.vMin = 0; bounds.vMax = 1; Texture_t eyeTexture; eyeTexture.handle = resultTexture.NativePointer; eyeTexture.eType = ETextureType.DirectX; eyeTexture.eColorSpace = EColorSpace.Auto; OpenVR.Compositor.Submit(eye, ref eyeTexture, ref bounds, EVRSubmitFlags.Submit_Default); }); }
public Bitmap GetMirrorImage(EVREye eye = EVREye.Eye_Left) { uint textureId = 0; var handle = new IntPtr(); var result = OpenVR.Compositor.GetMirrorTextureGL(eye, ref textureId, handle); if (result != EVRCompositorError.None) { throw new OpenVRSystemException <EVRCompositorError>("Failed to get mirror texture from OpenVR", result); } OpenVR.Compositor.LockGLSharedTextureForAccess(handle); GL.BindTexture(TextureTarget.Texture2d, new TextureHandle((int)textureId)); var height = 0; GL.GetTexParameteri(TextureTarget.Texture2d, GetTextureParameter.TextureHeight, ref height); var width = 0; GL.GetTexParameteri(TextureTarget.Texture2d, GetTextureParameter.TextureWidth, ref width); var bitmap = new Bitmap(width, height); var data = bitmap.LockBits(new Rectangle(0, 0, width, height), ImageLockMode.WriteOnly, PixelFormat.Format24bppRgb); GL.Finish(); GL.ReadPixels(0, 0, width, height, OpenTK.Graphics.OpenGLES3.PixelFormat.Rgb, PixelType.UnsignedByte, data.Scan0); bitmap.UnlockBits(data); bitmap.RotateFlip(RotateFlipType.RotateNoneFlipY); OpenVR.Compositor.UnlockGLSharedTextureForAccess(handle); OpenVR.Compositor.ReleaseSharedGLTexture(textureId, handle); return(bitmap); }
private void Render(EVREye eye, ref BasicEffect effect, ref RenderTarget2D renderTarget, ref IntPtr renderTargetHandle, GraphicsDevice graphicsDevice, TrackedDevicePose_t device, Action <BasicEffect> render) { if (effect == null) { effect = new BasicEffect(graphicsDevice); } if (renderTarget == null) { renderTarget = new RenderTarget2D(graphicsDevice, (int)_width, (int)_height, false, SurfaceFormat.Color, DepthFormat.Depth16, 1, RenderTargetUsage.DiscardContents, false); renderTargetHandle = (IntPtr)GetGlHandle(renderTarget); } // I have absolutely no idea why inverse+transpose is here. var viewMatrix = Matrix.Invert(Matrix.Transpose(ToMonogameMatrix(device.mDeviceToAbsoluteTracking))); var modelMatrix = _vrControls.GetMatrix(); var matrix = modelMatrix * viewMatrix * GetEyeView(eye) * GetProjection(eye); ViewModel = modelMatrix * viewMatrix; effect.Projection = matrix; effect.View = Matrix.Identity; effect.World = Matrix.Identity; effect.VertexColorEnabled = true; graphicsDevice.SetRenderTarget(renderTarget); render(effect); graphicsDevice.SetRenderTarget(null); var tex = new Texture_t { eColorSpace = EColorSpace.Auto, eType = ETextureType.OpenGL, handle = renderTargetHandle, }; Check(_vrCompositor.Submit(eye, ref tex, IntPtr.Zero, EVRSubmitFlags.Submit_Default)); // TODO: GlRenderBuffer? }
public static HiddenAreaMesh Make(Device device, EVREye eye) { HiddenAreaMesh_t hiddenAreaMeshDefinition = OpenVR.System.GetHiddenAreaMesh(eye, EHiddenAreaMeshType.k_eHiddenAreaMesh_Standard); int triangleCount = (int)hiddenAreaMeshDefinition.unTriangleCount; if (triangleCount == 0) { return(null); } int vertexCount = triangleCount * 3; Buffer vertexBuffer = new Buffer(device, hiddenAreaMeshDefinition.pVertexData, new BufferDescription { SizeInBytes = vertexCount * Vector2.SizeInBytes, BindFlags = BindFlags.VertexBuffer, Usage = ResourceUsage.Immutable }); VertexBufferBinding vertexBufferBinding = new VertexBufferBinding(vertexBuffer, Vector2.SizeInBytes, 0); return(new HiddenAreaMesh(vertexCount, vertexBufferBinding)); }
/// <summary> /// Updates the game cameras to the correct position, according to the given HMD eye pose. /// </summary> /// <param name="eyePosition">Position of the HMD eye, in the device space coordinate system</param> /// <param name="eyeRotation">Rotation of the HMD eye, in the device space coordinate system</param> public void UpdateScene( EVREye eye, SteamVR_Utils.RigidTransform hmdTransform, SteamVR_Utils.RigidTransform hmdEyeTransform) { switch (HighLogic.LoadedScene) { case GameScenes.FLIGHT: UpdateFlightScene(eye, hmdTransform, hmdEyeTransform); break; case GameScenes.EDITOR: UpdateEditorScene(eye, hmdTransform, hmdEyeTransform); break; default: throw new Exception("Cannot setup VR scene, current scene \"" + HighLogic.LoadedScene + "\" is invalid."); } HmdPosition = CurrentPosition + CurrentRotation * hmdTransform.pos; HmdRotation = CurrentRotation * hmdTransform.rot; }
internal static extern HmdMatrix44_t VR_IVRSystem_GetProjectionMatrix(IntPtr instancePtr, EVREye eEye, float fNearZ, float fFarZ, EGraphicsAPIConvention eProjType);
public HmdMatrix44_t GetProjectionMatrix(EVREye eEye,float fNearZ,float fFarZ,EGraphicsAPIConvention eProjType) { HmdMatrix44_t result = FnTable.GetProjectionMatrix(eEye,fNearZ,fFarZ,eProjType); return result; }
public void GetProjectionRaw(EVREye eEye,ref float pfLeft,ref float pfRight,ref float pfTop,ref float pfBottom) { pfLeft = 0; pfRight = 0; pfTop = 0; pfBottom = 0; FnTable.GetProjectionRaw(eEye,ref pfLeft,ref pfRight,ref pfTop,ref pfBottom); }
public override HmdMatrix34_t GetEyeToHeadTransform(EVREye eEye) { CheckIfUsable(); HmdMatrix34_t result = VRNativeEntrypoints.VR_IVRSystem_GetEyeToHeadTransform(m_pVRSystem,eEye); return result; }
public override void GetProjectionRaw(EVREye eEye,ref float pfLeft,ref float pfRight,ref float pfTop,ref float pfBottom) { CheckIfUsable(); pfLeft = 0; pfRight = 0; pfTop = 0; pfBottom = 0; VRNativeEntrypoints.VR_IVRSystem_GetProjectionRaw(m_pVRSystem,eEye,ref pfLeft,ref pfRight,ref pfTop,ref pfBottom); }
public void GetEyeOutputViewport(EVREye eEye,ref uint pnX,ref uint pnY,ref uint pnWidth,ref uint pnHeight) { pnX = 0; pnY = 0; pnWidth = 0; pnHeight = 0; FnTable.GetEyeOutputViewport(eEye,ref pnX,ref pnY,ref pnWidth,ref pnHeight); }
internal static extern HmdMatrix34_t VR_IVRSystem_GetEyeToHeadTransform(IntPtr instancePtr, EVREye eEye);
public abstract HmdMatrix34_t GetEyeToHeadTransform(EVREye eEye);
public abstract void GetProjectionRaw(EVREye eEye,ref float pfLeft,ref float pfRight,ref float pfTop,ref float pfBottom);
public DistortionCoordinates_t ComputeDistortion(EVREye eEye,float fU,float fV) { DistortionCoordinates_t result = FnTable.ComputeDistortion(eEye,fU,fV); return result; }
public HmdMatrix34_t GetEyeToHeadTransform(EVREye eEye) { HmdMatrix34_t result = FnTable.GetEyeToHeadTransform(eEye); return result; }
void RenderEye(SteamVR vr, EVREye eye) { int i = (int)eye; SteamVR_Render.eye = eye; if (cameraMask != null) cameraMask.Set(vr, eye); foreach (var c in cameras) { c.transform.localPosition = vr.eyes[i].pos; c.transform.localRotation = vr.eyes[i].rot; // Update position to keep from getting culled cameraMask.transform.position = c.transform.position; var camera = c.GetComponent<Camera>(); camera.targetTexture = SteamVR_Camera.GetSceneTexture(camera.hdr); int cullingMask = camera.cullingMask; if (eye == EVREye.Eye_Left) { camera.cullingMask &= ~rightMask; camera.cullingMask |= leftMask; } else { camera.cullingMask &= ~leftMask; camera.cullingMask |= rightMask; } camera.Render(); camera.cullingMask = cullingMask; } }
public EVRCompositorError Submit(EVREye eEye,ref Texture_t pTexture,ref VRTextureBounds_t pBounds,EVRSubmitFlags nSubmitFlags) { EVRCompositorError result = FnTable.Submit(eEye,ref pTexture,ref pBounds,nSubmitFlags); return result; }
internal static extern void VR_IVRSystem_GetProjectionRaw(IntPtr instancePtr, EVREye eEye, ref float pfLeft, ref float pfRight, ref float pfTop, ref float pfBottom);
public abstract void GetEyeOutputViewport(EVREye eEye,ref uint pnX,ref uint pnY,ref uint pnWidth,ref uint pnHeight);
internal static extern DistortionCoordinates_t VR_IVRSystem_ComputeDistortion(IntPtr instancePtr, EVREye eEye, float fU, float fV);
internal static extern HiddenAreaMesh_t VR_IVRSystem_GetHiddenAreaMesh(IntPtr instancePtr, EVREye eEye);
public abstract HmdMatrix44_t GetProjectionMatrix(EVREye eEye,float fNearZ,float fFarZ,EGraphicsAPIConvention eProjType);
public HiddenAreaMesh_t GetHiddenAreaMesh(EVREye eEye) { HiddenAreaMesh_t result = FnTable.GetHiddenAreaMesh(eEye); return result; }
public abstract DistortionCoordinates_t ComputeDistortion(EVREye eEye,float fU,float fV);
// Token: 0x06001F74 RID: 8052 RVA: 0x0009D7DC File Offset: 0x0009B9DC public EVRCompositorError GetMirrorTextureD3D11(EVREye eEye, IntPtr pD3D11DeviceOrResource, ref IntPtr ppD3D11ShaderResourceView) { return(this.FnTable.GetMirrorTextureD3D11(eEye, pD3D11DeviceOrResource, ref ppD3D11ShaderResourceView)); }
public abstract HiddenAreaMesh_t GetHiddenAreaMesh(EVREye eEye);
// Token: 0x06001F76 RID: 8054 RVA: 0x0009D804 File Offset: 0x0009BA04 public EVRCompositorError GetMirrorTextureGL(EVREye eEye, ref uint pglTextureId, IntPtr pglSharedTextureHandle) { pglTextureId = 0U; return(this.FnTable.GetMirrorTextureGL(eEye, ref pglTextureId, pglSharedTextureHandle)); }
public abstract EVRCompositorError Submit(EVREye eEye,ref Texture_t pTexture,ref VRTextureBounds_t pBounds,EVRSubmitFlags nSubmitFlags);
// Token: 0x06001F58 RID: 8024 RVA: 0x0009D5C7 File Offset: 0x0009B7C7 public EVRCompositorError Submit(EVREye eEye, ref Texture_t pTexture, ref VRTextureBounds_t pBounds, EVRSubmitFlags nSubmitFlags) { return(this.FnTable.Submit(eEye, ref pTexture, ref pBounds, nSubmitFlags)); }
public override HmdMatrix44_t GetProjectionMatrix(EVREye eEye,float fNearZ,float fFarZ,EGraphicsAPIConvention eProjType) { CheckIfUsable(); HmdMatrix44_t result = VRNativeEntrypoints.VR_IVRSystem_GetProjectionMatrix(m_pVRSystem,eEye,fNearZ,fFarZ,eProjType); return result; }
internal static extern void VR_IVRExtendedDisplay_GetEyeOutputViewport(IntPtr instancePtr, EVREye eEye, ref uint pnX, ref uint pnY, ref uint pnWidth, ref uint pnHeight);
public override DistortionCoordinates_t ComputeDistortion(EVREye eEye,float fU,float fV) { CheckIfUsable(); DistortionCoordinates_t result = VRNativeEntrypoints.VR_IVRSystem_ComputeDistortion(m_pVRSystem,eEye,fU,fV); return result; }
public override void GetEyeOutputViewport(EVREye eEye,ref uint pnX,ref uint pnY,ref uint pnWidth,ref uint pnHeight) { CheckIfUsable(); pnX = 0; pnY = 0; pnWidth = 0; pnHeight = 0; VRNativeEntrypoints.VR_IVRExtendedDisplay_GetEyeOutputViewport(m_pVRExtendedDisplay,eEye,ref pnX,ref pnY,ref pnWidth,ref pnHeight); }
public override HiddenAreaMesh_t GetHiddenAreaMesh(EVREye eEye) { CheckIfUsable(); HiddenAreaMesh_t result = VRNativeEntrypoints.VR_IVRSystem_GetHiddenAreaMesh(m_pVRSystem,eEye); return result; }
public override EVRCompositorError Submit(EVREye eEye,ref Texture_t pTexture,ref VRTextureBounds_t pBounds,EVRSubmitFlags nSubmitFlags) { CheckIfUsable(); EVRCompositorError result = VRNativeEntrypoints.VR_IVRCompositor_Submit(m_pVRCompositor,eEye,ref pTexture,ref pBounds,nSubmitFlags); return result; }
public HiddenAreaMesh GetMesh(EVREye eye) { return(eye == EVREye.Eye_Left ? leftEyeMesh : rightEyeMesh); }
internal static extern EVRCompositorError VR_IVRCompositor_Submit(IntPtr instancePtr, EVREye eEye, ref Texture_t pTexture, ref VRTextureBounds_t pBounds, EVRSubmitFlags nSubmitFlags);