private void ConfigureEyeDesc(OVREye eye) { Vector2 texSize = Vector2.zero; Vector2 fovSize = Vector2.zero; #if !UNITY_ANDROID || UNITY_EDITOR if (!OVRManager.instance.isVRPresent) { return; } FovPort fovPort = OVRManager.capiHmd.GetDesc().DefaultEyeFov[(int)eye]; fovPort.LeftTan = fovPort.RightTan = Mathf.Max(fovPort.LeftTan, fovPort.RightTan); fovPort.UpTan = fovPort.DownTan = Mathf.Max(fovPort.UpTan, fovPort.DownTan); texSize = OVRManager.capiHmd.GetFovTextureSize((Ovr.Eye)eye, fovPort, OVRManager.instance.nativeTextureScale).ToVector2(); fovSize = new Vector2(2f * Mathf.Rad2Deg * Mathf.Atan(fovPort.LeftTan), 2f * Mathf.Rad2Deg * Mathf.Atan(fovPort.UpTan)); #else texSize = new Vector2(1024, 1024) * OVRManager.instance.nativeTextureScale; fovSize = new Vector2(90, 90); #endif eyeDescs[(int)eye] = new EyeRenderDesc() { resolution = texSize, fov = fovSize }; }
private void SETUP_eyeRender(int eyeIndex) { GraphicsDevice.SetRasterizerState(GraphicsDevice.RasterizerStates.Default); EyeType eye = hmd.EyeRenderOrder[eyeIndex]; EyeRenderDesc renderDesc = eyeRenderDesc[(int)eye]; Rect renderViewport = eyeRenderViewport[(int)eye]; UpdateFromHmd(eye); renderPose[(int)eye] = config.player.hmd.PoseF; // Calculate view matrix var finalRollPitchYaw = config.player.Rotation; var finalUp = finalRollPitchYaw.Transform(Vector3.UnitY); //var finalUp = Vector3.UnitY var finalForward = finalRollPitchYaw.Transform(-Vector3.UnitZ); var shiftedEyePos = config.player.Position; eyeView = Matrix.Translation(renderDesc.HmdToEyeViewOffset) * config.player.LookAtRH; // Calculate projection matrix eyeProjection = OVR.MatrixProjection(renderDesc.Fov, 0.001f, -1000.0f, true); eyeProjection.Transpose(); eyeWorld = Matrix.Identity; // Set Viewport for our eye GraphicsDevice.SetViewport(renderViewport.ToViewportF()); }
private void ConfigureEyeDesc(OVREye eye) { Vector2 texSize = Vector2.zero; Vector2 fovSize = Vector2.zero; #if !UNITY_ANDROID || UNITY_EDITOR if (!OVRManager.instance.isVRPresent) { return; } OVRPlugin.Sizei size = OVRPlugin.GetEyeTextureSize((OVRPlugin.Eye)eye); OVRPlugin.Frustumf frustum = OVRPlugin.GetEyeFrustum((OVRPlugin.Eye)eye); texSize = new Vector2(size.w, size.h); fovSize = Mathf.Rad2Deg * new Vector2(frustum.fovX, frustum.fovY); #else texSize = new Vector2(1024, 1024) * OVRManager.instance.nativeTextureScale; fovSize = new Vector2(90, 90); #endif eyeDescs[(int)eye] = new EyeRenderDesc() { resolution = texSize, fov = fovSize }; }
private void ConfigureEyeDesc(OVREye eye) { #if !UNITY_ANDROID || UNITY_EDITOR HmdDesc desc = OVRManager.capiHmd.GetDesc(); FovPort fov = desc.DefaultEyeFov[(int)eye]; fov.LeftTan = fov.RightTan = Mathf.Max(fov.LeftTan, fov.RightTan); fov.UpTan = fov.DownTan = Mathf.Max(fov.UpTan, fov.DownTan); // Configure Stereo settings. Default pixel density is one texel per pixel. float desiredPixelDensity = 1f; Sizei texSize = OVRManager.capiHmd.GetFovTextureSize((Ovr.Eye)eye, fov, desiredPixelDensity); float fovH = 2f * Mathf.Rad2Deg * Mathf.Atan(fov.LeftTan); float fovV = 2f * Mathf.Rad2Deg * Mathf.Atan(fov.UpTan); eyeDescs[(int)eye] = new EyeRenderDesc() { resolution = texSize.ToVector2(), fov = new Vector2(fovH, fovV) }; #else eyeDescs[(int)eye] = new EyeRenderDesc() { resolution = new Vector2(1024, 1024), fov = new Vector2(90, 90) }; #endif }
protected void InitializeHMD(GraphicsDevice graphicsDevice, IntPtr windowHandle) { // Attach the HMD to window to direct back buffer output from the window to the HMD HMD.AttachToWindow(windowHandle); // Configure DirectX 11 var device = (SharpDX.Direct3D11.Device)graphicsDevice; var d3D11Cfg = new D3D11ConfigData { Header = { API = RenderAPIType.D3D11, BackBufferSize = HMD.Resolution, Multisample = 1 }, pDevice = device.NativePointer, pDeviceContext = device.ImmediateContext.NativePointer, pBackBufferRT = ((RenderTargetView)graphicsDevice.BackBuffer).NativePointer, pSwapChain = ((SwapChain)graphicsDevice.Presenter.NativePresenter).NativePointer }; // Configure the HMD's rendering settings EyeRenderDesc = new EyeRenderDesc[2]; if (!HMD.ConfigureRendering(d3D11Cfg, DistortionCapabilities.Chromatic | DistortionCapabilities.TimeWarp, HMD.DefaultEyeFov, EyeRenderDesc)) { throw new Exception("Failed to configure HMD"); } // IPD EyeOffset[0] = EyeRenderDesc[0].HmdToEyeViewOffset; EyeOffset[1] = EyeRenderDesc[1].HmdToEyeViewOffset; // Enable low persistence and dynamic prediction features HMD.EnabledCaps = HMDCapabilities.LowPersistence | HMDCapabilities.DynamicPrediction; // Enable all DK2 tracking features HMD.ConfigureTracking(TrackingCapabilities.Orientation | TrackingCapabilities.Position | TrackingCapabilities.MagYawCorrection, TrackingCapabilities.None); // Dismiss the Heatlh and Safety Window HMD.DismissHSWDisplay(); // Get HMD output display var adapter = (Adapter)graphicsDevice.Adapter; var hmdOutput = adapter.Outputs.FirstOrDefault(o => HMD.DeviceName.StartsWith(o.Description.DeviceName, StringComparison.OrdinalIgnoreCase)); if (hmdOutput != null) { // Set game to fullscreen on rift var swapChain = (SwapChain)graphicsDevice.Presenter.NativePresenter; var description = swapChain.Description.ModeDescription; swapChain.ResizeTarget(ref description); swapChain.SetFullscreenState(true, hmdOutput); } }
private void ConfigureEyeTexture(int eyeBufferIndex, OVREye eye, float scale) { int eyeIndex = eyeBufferIndex + (int)eye; EyeRenderDesc eyeDesc = eyeDescs[(int)eye]; int w = (int)(eyeDesc.resolution.x * scale); int h = (int)(eyeDesc.resolution.y * scale); eyeTextures[eyeIndex] = new RenderTexture(w, h, OVRManager.instance.eyeTextureDepth, OVRManager.instance.eyeTextureFormat); eyeTextures[eyeIndex].antiAliasing = (QualitySettings.antiAliasing == 0) ? 1 : QualitySettings.antiAliasing; eyeTextures[eyeIndex].Create(); eyeTextureIds[eyeIndex] = eyeTextures[eyeIndex].GetNativeTextureID(); }
private void ConfigureEyeTexture(int eyeBufferIndex, OVREye eye) { int eyeIndex = eyeBufferIndex + (int)eye; EyeRenderDesc eyeDesc = eyeDescs[(int)eye]; eyeTextures[eyeIndex] = new RenderTexture( (int)eyeDesc.resolution.x, (int)eyeDesc.resolution.y, (int)OVRManager.instance.eyeTextureDepth, OVRManager.instance.eyeTextureFormat); eyeTextures[eyeIndex].antiAliasing = (int)OVRManager.instance.eyeTextureAntiAliasing; eyeTextures[eyeIndex].Create(); eyeTextureIds[eyeIndex] = eyeTextures[eyeIndex].GetNativeTextureID(); }
private void ConfigureEyeTextures() { if (!OVRManager.instance.isVRPresent) { return; } ConfigureEyeDesc(OVREye.Left); ConfigureEyeDesc(OVREye.Right); if (eyeDescs[0].resolution.x == 0) { return; } if (!needsConfigureTexture) { return; } for (int eyeBufferIndex = 0; eyeBufferIndex < eyeTextureCount; eyeBufferIndex += 2) { foreach (var eye in new OVREye[] { OVREye.Left, OVREye.Right }) { int eyeIndex = eyeBufferIndex + (int)eye; EyeRenderDesc eyeDesc = eyeDescs[(int)eye]; eyeTextures[eyeIndex] = new RenderTexture( (int)eyeDesc.resolution.x, (int)eyeDesc.resolution.y, (int)OVRManager.instance.eyeTextureDepth, OVRManager.instance.eyeTextureFormat); eyeTextures[eyeIndex].antiAliasing = (int)OVRManager.instance.eyeTextureAntiAliasing; eyeTextures[eyeIndex].Create(); eyeTextureIds[eyeIndex] = eyeTextures[eyeIndex].GetNativeTextureID(); } } needsSetTexture = true; needsConfigureTexture = false; #if !UNITY_ANDROID || UNITY_EDITOR timeWarpViewNumber = OVR_GetFirstFrameID(); #endif }
private void ConfigureEyeDesc(VR.VRNode eye) { if (!OVRManager.isHmdPresent) { return; } OVRPlugin.Sizei size = OVRPlugin.GetEyeTextureSize((OVRPlugin.Eye)eye); OVRPlugin.Frustumf frust = OVRPlugin.GetEyeFrustum((OVRPlugin.Eye)eye); eyeDescs[(int)eye] = new EyeRenderDesc() { resolution = new Vector2(size.w, size.h), fov = Mathf.Rad2Deg * new Vector2(frust.fovX, frust.fovY), }; }
private void ConfigureEyeDesc(UnityEngine.VR.VRNode eye) #endif { if (!OVRManager.isHmdPresent) { return; } int eyeTextureWidth = Settings.eyeTextureWidth; int eyeTextureHeight = Settings.eyeTextureHeight; eyeDescs[(int)eye] = new EyeRenderDesc(); eyeDescs[(int)eye].resolution = new Vector2(eyeTextureWidth, eyeTextureHeight); OVRPlugin.Frustumf2 frust; if (OVRPlugin.GetNodeFrustum2((OVRPlugin.Node)eye, out frust)) { eyeDescs[(int)eye].fullFov.LeftFov = Mathf.Rad2Deg * Mathf.Atan(frust.Fov.LeftTan); eyeDescs[(int)eye].fullFov.RightFov = Mathf.Rad2Deg * Mathf.Atan(frust.Fov.RightTan); eyeDescs[(int)eye].fullFov.UpFov = Mathf.Rad2Deg * Mathf.Atan(frust.Fov.UpTan); eyeDescs[(int)eye].fullFov.DownFov = Mathf.Rad2Deg * Mathf.Atan(frust.Fov.DownTan); } else { OVRPlugin.Frustumf frustOld = OVRPlugin.GetEyeFrustum((OVRPlugin.Eye)eye); eyeDescs[(int)eye].fullFov.LeftFov = Mathf.Rad2Deg * frustOld.fovX * 0.5f; eyeDescs[(int)eye].fullFov.RightFov = Mathf.Rad2Deg * frustOld.fovX * 0.5f; eyeDescs[(int)eye].fullFov.UpFov = Mathf.Rad2Deg * frustOld.fovY * 0.5f; eyeDescs[(int)eye].fullFov.DownFov = Mathf.Rad2Deg * frustOld.fovY * 0.5f; } // Symmetric Fov uses the maximum fov angle float maxFovX = Mathf.Max(eyeDescs[(int)eye].fullFov.LeftFov, eyeDescs[(int)eye].fullFov.RightFov); float maxFovY = Mathf.Max(eyeDescs[(int)eye].fullFov.UpFov, eyeDescs[(int)eye].fullFov.DownFov); eyeDescs[(int)eye].fov.x = maxFovX * 2.0f; eyeDescs[(int)eye].fov.y = maxFovY * 2.0f; if (!OVRPlugin.AsymmetricFovEnabled) { eyeDescs[(int)eye].fullFov.LeftFov = maxFovX; eyeDescs[(int)eye].fullFov.RightFov = maxFovX; eyeDescs[(int)eye].fullFov.UpFov = maxFovY; eyeDescs[(int)eye].fullFov.DownFov = maxFovY; } }
private void ConfigureEyeDesc(UnityEngine.VR.VRNode eye) #endif { if (!OVRManager.isHmdPresent) return: OVRPlugin.Sizei size = OVRPlugin.GetEyeTextureSize((OVRPlugin.Eye)eye): eyeDescs[(int)eye] = new EyeRenderDesc(): eyeDescs[(int)eye].resolution = new Vector2(size.w, size.h): OVRPlugin.Frustumf2 frust: if (OVRPlugin.GetNodeFrustum2((OVRPlugin.Node)eye, out frust)) { eyeDescs[(int)eye].fullFov.LeftFov = Mathf.Rad2Deg * Mathf.Atan(frust.Fov.LeftTan): eyeDescs[(int)eye].fullFov.RightFov = Mathf.Rad2Deg * Mathf.Atan(frust.Fov.RightTan): eyeDescs[(int)eye].fullFov.UpFov = Mathf.Rad2Deg * Mathf.Atan(frust.Fov.UpTan): eyeDescs[(int)eye].fullFov.DownFov = Mathf.Rad2Deg * Mathf.Atan(frust.Fov.DownTan): } else { OVRPlugin.Frustumf frustOld = OVRPlugin.GetEyeFrustum((OVRPlugin.Eye)eye): eyeDescs[(int)eye].fullFov.LeftFov = Mathf.Rad2Deg * frustOld.fovX * 0.5f: eyeDescs[(int)eye].fullFov.RightFov = Mathf.Rad2Deg * frustOld.fovX * 0.5f: eyeDescs[(int)eye].fullFov.UpFov = Mathf.Rad2Deg * frustOld.fovY * 0.5f: eyeDescs[(int)eye].fullFov.DownFov = Mathf.Rad2Deg * frustOld.fovY * 0.5f: } // Symmetric Fov uses the maximum fov angle float maxFovX = Mathf.Max(eyeDescs[(int)eye].fullFov.LeftFov, eyeDescs[(int)eye].fullFov.RightFov): float maxFovY = Mathf.Max(eyeDescs[(int)eye].fullFov.UpFov, eyeDescs[(int)eye].fullFov.DownFov): eyeDescs[(int)eye].fov.x = maxFovX * 2.0f: eyeDescs[(int)eye].fov.y = maxFovY * 2.0f: if (!OVRPlugin.AsymmetricFovEnabled) { eyeDescs[(int)eye].fullFov.LeftFov = maxFovX: eyeDescs[(int)eye].fullFov.RightFov = maxFovX: eyeDescs[(int)eye].fullFov.UpFov = maxFovY: eyeDescs[(int)eye].fullFov.DownFov = maxFovY: } }
public void Session_GetRenderDesc() { IntPtr sessionPtr = CreateSession(); Assert.AreNotEqual(IntPtr.Zero, sessionPtr); // Define field of view (This is used for both left and right eye). FovPort fieldOfView = new FovPort(); fieldOfView.DownTan = (float)Math.Tan(0.523598776); // 0.523598776 radians = 30 degrees. fieldOfView.UpTan = (float)Math.Tan(0.523598776); // 0.523598776 radians = 30 degrees. fieldOfView.LeftTan = (float)Math.Tan(0.785398163); // 0.785398163 radians = 45 degrees. fieldOfView.RightTan = (float)Math.Tan(0.785398163); // 0.785398163 radians = 45 degrees. EyeRenderDesc renderDesc = OVR.GetRenderDesc(sessionPtr, EyeType.Left, fieldOfView); Assert.AreEqual(renderDesc.Fov, renderDesc.Fov); // Test that the GetRenderDesc is returning the correct eye position (or that the workaround is working) Assert.AreNotEqual(renderDesc.HmdToEyePose.Position.X, 0.0f); }
/// <summary> /// Creates a simple Direct3D graphics engine, used in unit tests. /// </summary> /// <param name="session">Existing session used to retrieve the size of the test engine.</param> /// <returns>Created test engine.</returns> /// <remarks>Remember to dispose the created test engine, after use.</remarks> private TestEngine CreateTestEngine(IntPtr session) { // Define field of view (This is used for both left and right eye). FovPort fieldOfView = new FovPort(); fieldOfView.DownTan = (float)Math.Tan(0.523598776); // 0.523598776 radians = 30 degrees. fieldOfView.UpTan = (float)Math.Tan(0.523598776); // 0.523598776 radians = 30 degrees. fieldOfView.LeftTan = (float)Math.Tan(0.785398163); // 0.785398163 radians = 45 degrees. fieldOfView.RightTan = (float)Math.Tan(0.785398163); // 0.785398163 radians = 45 degrees. EyeRenderDesc renderDescLeft = OVR.GetRenderDesc(session, EyeType.Left, fieldOfView); EyeRenderDesc renderDescRight = OVR.GetRenderDesc(session, EyeType.Left, fieldOfView); // Determine texture size matching the field of view. Sizei sizeLeft = OVR.GetFovTextureSize(session, EyeType.Left, fieldOfView, 1.0f); Sizei sizeRight = OVR.GetFovTextureSize(session, EyeType.Right, fieldOfView, 1.0f); TestEngine testEngine = new TestEngine(sizeLeft.Width + sizeRight.Width, sizeLeft.Height); return(testEngine); }
private void ConfigureEyeDesc(VR.VRNode eye) { if (!VR.VRDevice.isPresent) return; OVRPlugin.Sizei size = OVRPlugin.GetEyeTextureSize((OVRPlugin.Eye)eye); OVRPlugin.Frustumf frust = OVRPlugin.GetEyeFrustum((OVRPlugin.Eye)eye); eyeDescs[(int)eye] = new EyeRenderDesc() { resolution = new Vector2(size.w, size.h), fov = Mathf.Rad2Deg * new Vector2(frust.fovX, frust.fovY), }; }
public void Session_SubmitFrame() { IntPtr sessionPtr = CreateSession(); Assert.AreNotEqual(IntPtr.Zero, sessionPtr); // Define field of view (This is used for both left and right eye). FovPort fieldOfView = new FovPort(); fieldOfView.DownTan = (float)Math.Tan(0.523598776); // 0.523598776 radians = 30 degrees. fieldOfView.UpTan = (float)Math.Tan(0.523598776); // 0.523598776 radians = 30 degrees. fieldOfView.LeftTan = (float)Math.Tan(0.785398163); // 0.785398163 radians = 45 degrees. fieldOfView.RightTan = (float)Math.Tan(0.785398163); // 0.785398163 radians = 45 degrees. EyeRenderDesc renderDescLeft = OVR.GetRenderDesc(sessionPtr, EyeType.Left, fieldOfView); EyeRenderDesc renderDescRight = OVR.GetRenderDesc(sessionPtr, EyeType.Left, fieldOfView); var viewScaleDesc = new ViewScaleDesc(); viewScaleDesc.HmdToEyePose0 = renderDescLeft.HmdToEyePose; viewScaleDesc.HmdToEyePose1 = renderDescRight.HmdToEyePose; viewScaleDesc.HmdSpaceToWorldScaleInMeters = 1; // Determine texture size matching the field of view. Sizei sizeLeft = OVR.GetFovTextureSize(sessionPtr, EyeType.Left, fieldOfView, 1.0f); Sizei sizeRight = OVR.GetFovTextureSize(sessionPtr, EyeType.Right, fieldOfView, 1.0f); var hmdToEyeViewOffset = new Vector3f[2]; var poses = new Posef[2]; double sensorSampleTime; hmdToEyeViewOffset[0].X = -0.1f; hmdToEyeViewOffset[1].X = 0.1f; OVR.GetEyePoses(sessionPtr, 0, true, hmdToEyeViewOffset, ref poses, out sensorSampleTime); // Create a set of layers to submit. LayerEyeFov layer = new LayerEyeFov(); layer.Header.Type = LayerType.EyeFov; Result result; using (TestEngine testEngine = CreateTestEngine(sessionPtr)) { try { // Create a texture for the left eye. layer.ColorTextureLeft = CreateTextureSwapChain(sessionPtr, testEngine); layer.ViewportLeft.Position = new Vector2i(0, 0); layer.ViewportLeft.Size = sizeLeft; layer.FovLeft = fieldOfView; layer.RenderPoseLeft = poses[0]; // Create a texture for the right eye. layer.ColorTextureRight = CreateTextureSwapChain(sessionPtr, testEngine); layer.ViewportRight.Position = new Vector2i(0, 0); layer.ViewportRight.Size = sizeLeft; layer.FovRight = fieldOfView; layer.RenderPoseRight = poses[1]; // The created texture swap chain must be committed to the Oculus SDK, before using it in the // call to ovr_SubmitFrame, otherwise ovr_SubmitFrame will fail. result = OVR.CommitTextureSwapChain(sessionPtr, layer.ColorTextureLeft); Assert.IsTrue(result >= Result.Success); result = OVR.CommitTextureSwapChain(sessionPtr, layer.ColorTextureRight); Assert.IsTrue(result >= Result.Success); // SubmitFrame requires pointer to an array of pointers to Layer objects var layerPointers = new IntPtr[1]; GCHandle layerHandle = GCHandle.Alloc(layer, GCHandleType.Pinned); GCHandle layerPointersHandle = GCHandle.Alloc(layerPointers, GCHandleType.Pinned); layerPointers[0] = layerHandle.AddrOfPinnedObject(); result = OVR.SubmitFrame(sessionPtr, 0L, IntPtr.Zero, layerPointersHandle.AddrOfPinnedObject(), 1); Assert.IsTrue(result >= Result.Success); layerPointersHandle.Free(); layerHandle.Free(); } finally { if (layer.ColorTextureLeft != IntPtr.Zero) { OVR.DestroyTextureSwapChain(sessionPtr, layer.ColorTextureLeft); } if (layer.ColorTextureRight != IntPtr.Zero) { OVR.DestroyTextureSwapChain(sessionPtr, layer.ColorTextureRight); } } } }