/// <summary> /// Gets the FOV and resolution. /// </summary> public static void GetImageInfo(ref int resH, ref int resV, ref float fovH, ref float fovV) { // Always set to safe values :) resH = 1280; resV = 800; fovH = fovV = 90.0f; if (HMD == null || !SupportedPlatform) { return; } ovrHmdDesc desc = HMD.GetDesc(); ovrFovPort fov = desc.DefaultEyeFov[0]; fov.LeftTan = fov.RightTan = Mathf.Max(fov.LeftTan, fov.RightTan); fov.UpTan = fov.DownTan = Mathf.Max(fov.UpTan, fov.DownTan); // Configure Stereo settings. Default pixel density is 1.0f. float desiredPixelDensity = 1.0f; ovrSizei texSize = HMD.GetFovTextureSize(ovrEyeType.ovrEye_Left, fov, desiredPixelDensity); resH = texSize.w; resV = texSize.h; fovH = 2f * Mathf.Rad2Deg * Mathf.Atan(fov.LeftTan); fovV = 2f * Mathf.Rad2Deg * Mathf.Atan(fov.UpTan); }
/// <summary> /// /// </summary> public override void CreateDisplayResources() { base.CreateDisplayResources(); backbufferColor = new RenderTarget2D(device, swapChain.GetBackBuffer <D3D.Texture2D>(0)); // Configure Stereo settings. OVR.Sizei recommenedTex0Size = hmd.GetFovTextureSize(OVR.EyeType.Left, hmd.DefaultEyeFov[0], 1.0f); OVR.Sizei recommenedTex1Size = hmd.GetFovTextureSize(OVR.EyeType.Right, hmd.DefaultEyeFov[1], 1.0f); int ms = backbufferColor.SampleCount; backbufferColor1 = new RenderTarget2D(device, ColorFormat.Rgba8, recommenedTex0Size.Width, recommenedTex0Size.Height, ms); backbufferDepth1 = new DepthStencil2D(device, DepthFormat.D24S8, recommenedTex0Size.Width, recommenedTex0Size.Height, ms); backbufferColor2 = new RenderTarget2D(device, ColorFormat.Rgba8, recommenedTex1Size.Width, recommenedTex1Size.Height, ms); backbufferDepth2 = new DepthStencil2D(device, DepthFormat.D24S8, recommenedTex1Size.Width, recommenedTex1Size.Height, ms); if (ms > 1) { backbufferColor1Resolved = new RenderTarget2D(device, ColorFormat.Rgba8, recommenedTex0Size.Width, recommenedTex0Size.Height); backbufferColor2Resolved = new RenderTarget2D(device, ColorFormat.Rgba8, recommenedTex1Size.Width, recommenedTex1Size.Height); } OVR.FovPort[] eyeFov = new OVR.FovPort[] { hmd.DefaultEyeFov[0], hmd.DefaultEyeFov[1] }; OVR.Sizei size1 = new OVR.Sizei(recommenedTex0Size.Width, recommenedTex0Size.Height); OVR.Sizei size2 = new OVR.Sizei(recommenedTex1Size.Width, recommenedTex1Size.Height); OVR.Recti[] eyeRenderViewport = new OVR.Recti[2]; eyeRenderViewport[0].Position = new OVR.Vector2i(0, 0); eyeRenderViewport[0].Size = size1; eyeRenderViewport[1].Position = new OVR.Vector2i(0, 0);; eyeRenderViewport[1].Size = size2; // Query D3D texture data. eyeTexture = new OVR.D3D11.D3D11TextureData[2]; eyeTexture[0].Header.API = OVR.RenderAPIType.D3D11; eyeTexture[0].Header.TextureSize = size1; eyeTexture[0].Header.RenderViewport = eyeRenderViewport[0]; // Right eye uses the same texture, but different rendering viewport. eyeTexture[1] = eyeTexture[0]; eyeTexture[1].Header.RenderViewport = eyeRenderViewport[1]; if (ms > 1) { eyeTexture[0].Texture = backbufferColor1Resolved.Surface.Resource.NativePointer; eyeTexture[0].ShaderResourceView = backbufferColor1Resolved.SRV.NativePointer; eyeTexture[1].Texture = backbufferColor2Resolved.Surface.Resource.NativePointer; eyeTexture[1].ShaderResourceView = backbufferColor2Resolved.SRV.NativePointer; } else { eyeTexture[0].Texture = backbufferColor1.Surface.Resource.NativePointer; eyeTexture[0].ShaderResourceView = backbufferColor1.SRV.NativePointer; eyeTexture[1].Texture = backbufferColor2.Surface.Resource.NativePointer; eyeTexture[1].ShaderResourceView = backbufferColor2.SRV.NativePointer; } // Configure d3d11. OVR.D3D11.D3D11ConfigData d3d11cfg = new OVR.D3D11.D3D11ConfigData(); d3d11cfg.Header.API = OVR.RenderAPIType.D3D11; d3d11cfg.Header.BackBufferSize = new OVR.Sizei(hmd.Resolution.Width, hmd.Resolution.Height); d3d11cfg.Header.Multisample = 1; d3d11cfg.Device = d3dDevice.NativePointer; d3d11cfg.DeviceContext = d3dDevice.ImmediateContext.NativePointer; d3d11cfg.BackBufferRenderTargetView = backbufferColor.Surface.RTV.NativePointer; d3d11cfg.SwapChain = swapChain.NativePointer; eyeRenderDesc = hmd.ConfigureRendering(d3d11cfg, OVR.DistortionCaps.ovrDistortionCap_Chromatic | OVR.DistortionCaps.ovrDistortionCap_Vignette | OVR.DistortionCaps.ovrDistortionCap_TimeWarp | OVR.DistortionCaps.ovrDistortionCap_Overdrive, eyeFov); if (eyeRenderDesc == null) { throw new ArgumentNullException("eyeRenderDesc", "Achtung eyeRenderDesc = null"); } // Specify which head tracking capabilities to enable. hmd.SetEnabledCaps(OVR.HmdCaps.LowPersistence /*| OVR.HmdCaps.DynamicPrediction*/); // Start the sensor which informs of the Rift's pose and motion hmd.ConfigureTracking(OVR.TrackingCaps.ovrTrackingCap_Orientation | OVR.TrackingCaps.ovrTrackingCap_MagYawCorrection | OVR.TrackingCaps.ovrTrackingCap_Position, OVR.TrackingCaps.None); }
protected override void OnLoad(EventArgs e) { base.OnLoad(e); InitShader(); InitBuffer(); // Define initialization parameters with debug flag. OVRTypes.InitParams initializationParameters = new OVRTypes.InitParams(); initializationParameters.Flags = OVRTypes.InitFlags.Debug; // Initialize the Oculus runtime. bool success = wrap.Initialize(initializationParameters); if (!success) { MessageBox.Show("Failed to initialize the Oculus runtime library.", "Uh oh", MessageBoxButtons.OK, MessageBoxIcon.Error); Exit(); return; } // Use the head mounted display. OVRTypes.GraphicsLuid graphicsLuid; hmd = wrap.Hmd_Create(out graphicsLuid); if (hmd == null) { MessageBox.Show("Oculus Rift not detected.", "Uh oh", MessageBoxButtons.OK, MessageBoxIcon.Error); Exit(); return; } if (hmd.ProductName == string.Empty) { MessageBox.Show("The HMD is not enabled.", "There's a tear in the Rift", MessageBoxButtons.OK, MessageBoxIcon.Error); Exit(); return; } Console.WriteLine("SDK Version: " + wrap.GetVersionString()); try { for (int i = 0; i < 2; i++) { OVRTypes.Sizei idealTextureSize = hmd.GetFovTextureSize((OVRTypes.EyeType)i, hmd.DefaultEyeFov[i], 1); eyeRenderTexture[i] = new TextureBuffer(wrap, hmd, true, true, idealTextureSize, 1, IntPtr.Zero, 1); eyeDepthBuffer[i] = new DepthBuffer(eyeRenderTexture[i].GetSize(), 0); } // Note: the mirror window can be any size, for this sample we use 1/2 the HMD resolution windowSize = new OVRTypes.Sizei(hmd.Resolution.Width / 2, hmd.Resolution.Height / 2); //For image displayed at ordinary monitor - copy of Oculus rendered one. OVRTypes.MirrorTextureDesc mirrorTextureDescription = new OVRTypes.MirrorTextureDesc(); mirrorTextureDescription.Format = OVRTypes.TextureFormat.R8G8B8A8_UNORM_SRGB; mirrorTextureDescription.Width = windowSize.Width; mirrorTextureDescription.Height = windowSize.Height; mirrorTextureDescription.MiscFlags = OVRTypes.TextureMiscFlags.None; // Create the texture used to display the rendered result on the computer monitor. OVRTypes.Result result; result = hmd.CreateMirrorTextureGL(mirrorTextureDescription, out mirrorTexture); WriteErrorDetails(wrap, result, "Failed to create mirror texture."); layerFov = layers.AddLayerEyeFov(); layerFov.Header.Flags = OVRTypes.LayerFlags.TextureOriginAtBottomLeft; // OpenGL Texture coordinates start from bottom left layerFov.Header.Type = OVRTypes.LayerType.EyeFov; // Configure the mirror read buffer uint texId; result = mirrorTexture.GetBufferGL(out texId); WriteErrorDetails(wrap, result, "Failed to retrieve the texture from the created mirror texture buffer."); //Rendertarget for mirror desktop window GL.GenFramebuffers(1, out mirrorFbo); GL.BindFramebuffer(FramebufferTarget.ReadFramebuffer, mirrorFbo); GL.FramebufferTexture2D(FramebufferTarget.ReadFramebuffer, FramebufferAttachment.ColorAttachment0, TextureTarget.Texture2D, texId, 0); GL.FramebufferRenderbuffer(FramebufferTarget.ReadFramebuffer, FramebufferAttachment.DepthAttachment, RenderbufferTarget.Renderbuffer, 0); GL.BindFramebuffer(FramebufferTarget.ReadFramebuffer, 0); // Turn off vsync to let the compositor do its magic this.VSync = VSyncMode.Off; //wglSwapIntervalEXT(0); // FloorLevel will give tracking poses where the floor height is 0 result = hmd.SetTrackingOriginType(OVRTypes.TrackingOrigin.FloorLevel); WriteErrorDetails(wrap, result, "Failed to set tracking origin type."); GL.Enable(EnableCap.DepthTest); //DO NOT DELETE IT IN FUTURE UPDATES! } catch { // Release all resources Dispose(layers); if (mirrorFbo != 0) GL.DeleteFramebuffers(1, ref mirrorFbo); Dispose(mirrorTexture); for (int eyeIndex = 0; eyeIndex < 2; ++eyeIndex) { Dispose(eyeRenderTexture[eyeIndex]); Dispose(eyeDepthBuffer[eyeIndex]); } // Disposing the device, before the hmd, will cause the hmd to fail when disposing. // Disposing the device, after the hmd, will cause the dispose of the device to fail. // It looks as if the hmd steals ownership of the device and destroys it, when it's shutting down. // device.Dispose(); Dispose(hmd); Dispose(wrap); } }
override protected void Render() { Lock = true; using (Wrap oculus = new Wrap()) { // Initialize the Oculus runtime. if (!oculus.Initialize(initializationParameters)) { throw new HeadsetError("Failed to initialize the Oculus runtime library."); } OVRTypes.GraphicsLuid graphicsLuid; // Create a set of layers to submit. EyeTexture[] eyeTextures = new EyeTexture[2]; // Create a depth buffer, using the same width and height as the back buffer. Texture2DDescription depthBufferDescription = new Texture2DDescription() { Format = Format.D32_Float, ArraySize = 1, MipLevels = 1, Width = 1920, // TODO: FIXME? Height = 1080, SampleDescription = new SampleDescription(1, 0), Usage = ResourceUsage.Default, BindFlags = BindFlags.DepthStencil, CpuAccessFlags = CpuAccessFlags.None, OptionFlags = ResourceOptionFlags.None }; // Define how the depth buffer will be used to filter out objects, based on their distance from the viewer. DepthStencilStateDescription depthStencilStateDescription = new DepthStencilStateDescription() { IsDepthEnabled = true, DepthComparison = Comparison.Less, DepthWriteMask = DepthWriteMask.Zero }; //#if DEBUG // SharpDX.Configuration.EnableObjectTracking = true; //#endif using (Hmd hmd = oculus.Hmd_Create(out graphicsLuid)) // Create DirectX drawing device. using (_device = new Device(SharpDX.Direct3D.DriverType.Hardware, DeviceCreationFlags.BgraSupport, new SharpDX.Direct3D.FeatureLevel[] { SharpDX.Direct3D.FeatureLevel.Level_10_0 })) // Create DirectX Graphics Interface factory, used to create the swap chain. using (Factory factory = new Factory()) using (DeviceContext immediateContext = _device.ImmediateContext) // Create the depth buffer. using (Texture2D depthBuffer = new Texture2D(_device, depthBufferDescription)) using (DepthStencilView depthStencilView = new DepthStencilView(_device, depthBuffer)) using (DepthStencilState depthStencilState = new DepthStencilState(_device, depthStencilStateDescription)) using (Layers layers = new Layers()) using (_gd = SharpDX.Toolkit.Graphics.GraphicsDevice.New(_device)) using (vrui = new VRUI(_device, _gd)) using (customEffectL = GetCustomEffect(_gd)) using (customEffectR = GetCustomEffect(_gd)) //using (SharpDX.Toolkit.Graphics.GeometricPrimitive primitive = GraphicTools.CreateGeometry(_projection, _gd, false)) { if (hmd == null) { throw new HeadsetError("Oculus Rift not detected."); } if (hmd.ProductName == string.Empty) { throw new HeadsetError("The HMD is not enabled."); } Viewport viewport = new Viewport(0, 0, hmd.Resolution.Width, hmd.Resolution.Height, 0.0f, 1.0f); LayerEyeFov layerEyeFov = layers.AddLayerEyeFov(); // Retrieve the DXGI device, in order to set the maximum frame latency. using (SharpDX.DXGI.Device1 dxgiDevice = _device.QueryInterface <SharpDX.DXGI.Device1>()) { dxgiDevice.MaximumFrameLatency = 1; } for (int eyeIndex = 0; eyeIndex < 2; eyeIndex++) { OVRTypes.EyeType eye = (OVRTypes.EyeType)eyeIndex; var textureSize = hmd.GetFovTextureSize(eye, hmd.DefaultEyeFov[eyeIndex], 1.0f); var renderDescription = hmd.GetRenderDesc(eye, hmd.DefaultEyeFov[eyeIndex]); EyeTexture eyeTexture = eyeTextures[eyeIndex] = new EyeTexture() { // Retrieve size and position of the texture for the current eye. FieldOfView = hmd.DefaultEyeFov[eyeIndex], TextureSize = textureSize, RenderDescription = renderDescription, // Define a texture at the size recommended for the eye texture. Viewport = new Viewport(0, 0, textureSize.Width, textureSize.Height, 0.0f, 1.0f), HmdToEyeViewOffset = renderDescription.HmdToEyeOffset, Texture2DDescription = new Texture2DDescription() { Width = textureSize.Width, Height = textureSize.Height, ArraySize = 1, MipLevels = 1, Format = Format.R8G8B8A8_UNorm_SRgb, SampleDescription = new SampleDescription(1, 0), Usage = ResourceUsage.Default, CpuAccessFlags = CpuAccessFlags.None, BindFlags = BindFlags.ShaderResource | BindFlags.RenderTarget } }; eyeTexture.ViewportSize.Position = new OVRTypes.Vector2i(0, 0); eyeTexture.ViewportSize.Size = textureSize; // Convert the SharpDX texture description to the native Direct3D texture description. OVRTypes.TextureSwapChainDesc textureSwapChainDesc = SharpDXHelpers.CreateTextureSwapChainDescription(eyeTexture.Texture2DDescription); AssertSuccess(hmd.CreateTextureSwapChainDX(_device.NativePointer, textureSwapChainDesc, out eyeTexture.SwapTextureSet), oculus, "Failed to create swap chain."); // Retrieve the number of buffers of the created swap chain. int textureSwapChainBufferCount; AssertSuccess(eyeTexture.SwapTextureSet.GetLength(out textureSwapChainBufferCount), oculus, "Failed to retrieve the number of buffers of the created swap chain."); // Create room for each DirectX texture in the SwapTextureSet. eyeTexture.Textures = new Texture2D[textureSwapChainBufferCount]; eyeTexture.RenderTargetViews = new RenderTargetView[textureSwapChainBufferCount]; // Create a texture 2D and a render target view, for each unmanaged texture contained in the SwapTextureSet. for (int textureIndex = 0; textureIndex < textureSwapChainBufferCount; textureIndex++) { // Interface ID of the Direct3D Texture2D interface. Guid textureInterfaceId = new Guid("6f15aaf2-d208-4e89-9ab4-489535d34f9c"); // Retrieve the Direct3D texture contained in the Oculus TextureSwapChainBuffer. IntPtr swapChainTextureComPtr = IntPtr.Zero; AssertSuccess(eyeTexture.SwapTextureSet.GetBufferDX(textureIndex, textureInterfaceId, out swapChainTextureComPtr), oculus, "Failed to retrieve a texture from the created swap chain."); // Create a managed Texture2D, based on the unmanaged texture pointer. eyeTexture.Textures[textureIndex] = new Texture2D(swapChainTextureComPtr); // Create a render target view for the current Texture2D. eyeTexture.RenderTargetViews[textureIndex] = new RenderTargetView(_device, eyeTexture.Textures[textureIndex]); } // Define the depth buffer, at the size recommended for the eye texture. eyeTexture.DepthBufferDescription = new Texture2DDescription() { Format = Format.D32_Float, Width = eyeTexture.TextureSize.Width, Height = eyeTexture.TextureSize.Height, ArraySize = 1, MipLevels = 1, SampleDescription = new SampleDescription(1, 0), Usage = ResourceUsage.Default, BindFlags = BindFlags.DepthStencil, CpuAccessFlags = CpuAccessFlags.None, OptionFlags = ResourceOptionFlags.None }; // Create the depth buffer. eyeTexture.DepthBuffer = new Texture2D(_device, eyeTexture.DepthBufferDescription); eyeTexture.DepthStencilView = new DepthStencilView(_device, eyeTexture.DepthBuffer); // Specify the texture to show on the HMD. layerEyeFov.ColorTexture[eyeIndex] = eyeTexture.SwapTextureSet.TextureSwapChainPtr; layerEyeFov.Viewport[eyeIndex].Position = new OVRTypes.Vector2i(0, 0); layerEyeFov.Viewport[eyeIndex].Size = eyeTexture.TextureSize; layerEyeFov.Fov[eyeIndex] = eyeTexture.FieldOfView; layerEyeFov.Header.Flags = OVRTypes.LayerFlags.HighQuality; } #region Render loop DateTime startTime = DateTime.Now; DateTime lastTime = DateTime.Now; float deltaTime = 0; // Start with default background SetDefaultScene(); while (!abort) { UpdateContentIfRequested(); OVRTypes.Vector3f[] hmdToEyeViewOffsets = { eyeTextures[0].HmdToEyeViewOffset, eyeTextures[1].HmdToEyeViewOffset }; //OVR.FrameTiming frameTiming = hmd.GetFrameTiming(0); //OVR.TrackingState trackingState = hmd.GetTrackingState(frameTiming.DisplayMidpointSeconds); double displayMidpoint = hmd.GetPredictedDisplayTime(0); OVRTypes.TrackingState trackingState = hmd.GetTrackingState(displayMidpoint, true); OVRTypes.Posef[] eyePoses = new OVRTypes.Posef[2]; // Calculate the position and orientation of each eye. oculus.CalcEyePoses(trackingState.HeadPose.ThePose, hmdToEyeViewOffsets, ref eyePoses); float timeSinceStart = (float)(DateTime.Now - startTime).TotalSeconds; deltaTime = (float)(DateTime.Now - lastTime).TotalSeconds; lastTime = DateTime.Now; Vector3 centerEye = (eyePoses[0].Position.ToVector3() + eyePoses[1].Position.ToVector3()) * 0.5f; for (int eyeIndex = 0; eyeIndex < 2; eyeIndex++) { OVRTypes.EyeType eye = (OVRTypes.EyeType)eyeIndex; EyeTexture eyeTexture = eyeTextures[eyeIndex]; layerEyeFov.RenderPose[eyeIndex] = eyePoses[eyeIndex]; // Update the render description at each frame, as the HmdToEyeOffset can change at runtime. eyeTexture.RenderDescription = hmd.GetRenderDesc(eye, hmd.DefaultEyeFov[eyeIndex]); // Retrieve the index of the active texture int textureIndex; AssertSuccess(eyeTexture.SwapTextureSet.GetCurrentIndex(out textureIndex), oculus, "Failed to retrieve texture swap chain current index."); immediateContext.OutputMerger.SetRenderTargets(eyeTexture.DepthStencilView, eyeTexture.RenderTargetViews[textureIndex]); immediateContext.ClearRenderTargetView(eyeTexture.RenderTargetViews[textureIndex], Color.Black); immediateContext.ClearDepthStencilView(eyeTexture.DepthStencilView, DepthStencilClearFlags.Depth | DepthStencilClearFlags.Stencil, 1.0f, 0); immediateContext.Rasterizer.SetViewport(eyeTexture.Viewport); // Retrieve the eye rotation quaternion and use it to calculate the LookAt direction and the LookUp direction. Quaternion lookRotation = SharpDXHelpers.ToQuaternion(eyePoses[eyeIndex].Orientation); lookRotation = new Quaternion(1, 0, 0, 0) * lookRotation; Matrix rotationMatrix = Matrix.RotationQuaternion(lookRotation); Vector3 lookUp = Vector3.Transform(new Vector3(0, -1, 0), rotationMatrix).ToVector3(); Vector3 lookAt = Vector3.Transform(new Vector3(0, 0, 1), rotationMatrix).ToVector3(); //Vector3 eyeDiff = eyePoses[eyeIndex].Position.ToVector3() - eyePoses[1 - eyeIndex].Position.ToVector3(); Vector3 lookPosition = new Vector3( -eyePoses[eyeIndex].Position.X, eyePoses[eyeIndex].Position.Y, eyePoses[eyeIndex].Position.Z ); Matrix worldMatrix = Matrix.Translation(lookPosition); Matrix viewMatrix = Matrix.LookAtLH(lookPosition, lookPosition + lookAt, lookUp); Matrix projectionMatrix = oculus.Matrix4f_Projection(eyeTexture.FieldOfView, 0.1f, 100.0f, OVRTypes.ProjectionModifier.LeftHanded).ToMatrix(); projectionMatrix.Transpose(); Matrix MVP = worldMatrix * viewMatrix * projectionMatrix; customEffectL.Parameters["WorldViewProj"].SetValue(MVP); customEffectR.Parameters["WorldViewProj"].SetValue(MVP); lock (localCritical) { try { if (eyeIndex == 0) { primitive?.Draw(customEffectL); } if (eyeIndex == 1) { primitive?.Draw(customEffectR); } } catch (NullReferenceException) { ; } } if (ProvideLook != null && eyeIndex == 0) { lookRotation.Invert(); lookRotation = lookRotation * new Quaternion(1, 0, 0, 0); // rotate 180 in x Vector3 forward = Vector3.Transform(Vector3.ForwardRH, lookRotation); Vector3 up = Vector3.Transform(Vector3.Up, lookRotation); log.Publish("oculus.forward", forward.ToString("0.00")); log.Publish("oculus.up", up.ToString("0.00")); log.Publish("oculus.lookAt", lookAt.ToString("0.00")); log.Publish("oculus.lookUp", lookUp.ToString("0.00")); log.Publish("oculus.vr_quat", lookRotation); log.Publish("q.sent", lookRotation); ProvideLook(lookPosition, lookRotation, OculusFOV); } // reset UI position every frame if it is not visible if (vrui.isUIHidden) { vrui.SetWorldPosition(viewMatrix.Forward, lookPosition, false); } vrui.Draw(Media, currentTime, Duration); vrui.Render(deltaTime, viewMatrix, projectionMatrix, lookPosition, ShouldShowVRUI); // Commits any pending changes to the TextureSwapChain, and advances its current index AssertSuccess(eyeTexture.SwapTextureSet.Commit(), oculus, "Failed to commit the swap chain texture."); //Console.WriteLine("xbox: " + ((hmd.ovr_GetConnectedControllerTypes() & OVRTypes.ControllerType.XBox) != 0)); //Console.WriteLine("remote: " + ((hmd.ovr_GetConnectedControllerTypes() & OVRTypes.ControllerType.Remote) != 0)); //Console.WriteLine("active: " + hmd.GetInputState(OVRTypes.ControllerType.Active)); //Console.WriteLine("buttons: " + hmd.GetInputState(OVRTypes.ControllerType.Remote).Buttons); } hmd.SubmitFrame(0, layers); } #endregion //debugWindow.Stop(); waitForRendererStop.Set(); // Release all resources primitive?.Dispose(); eyeTextures[0].Dispose(); eyeTextures[1].Dispose(); immediateContext.ClearState(); immediateContext.Flush(); } } Lock = false; }
protected override void OnLoad(EventArgs e) { base.OnLoad(e); InitShader(); InitBuffer(); // Initialize the Oculus runtime. bool success = wrap.Initialize(); if (!success) { MessageBox.Show("Failed to initialize the Oculus runtime library.", "Uh oh", MessageBoxButtons.OK, MessageBoxIcon.Error); Exit(); return; } // Use the head mounted display. OVR.GraphicsLuid graphicsLuid; hmd = wrap.Hmd_Create(out graphicsLuid); if (hmd == null) { MessageBox.Show("Oculus Rift not detected.", "Uh oh", MessageBoxButtons.OK, MessageBoxIcon.Error); Exit(); return; } if (hmd.ProductName == string.Empty) { MessageBox.Show("The HMD is not enabled.", "There's a tear in the Rift", MessageBoxButtons.OK, MessageBoxIcon.Error); Exit(); return; } Console.WriteLine("SDK Version: " + wrap.GetVersionString()); for (int i = 0; i < 2; i++) { OVR.Sizei idealTextureSize = hmd.GetFovTextureSize((OVR.EyeType)i, hmd.DefaultEyeFov[i], 1); eyeRenderTexture[i] = new OvrSharedRendertarget(idealTextureSize.Width, idealTextureSize.Height, hmd); eyeDepthBuffer[i] = new DepthBuffer(eyeRenderTexture[i].Width, eyeRenderTexture[i].Height); } //For image displayed at ordinary monitor - copy of Oculus rendered one. hmd.CreateMirrorTextureGL((uint)All.Srgb8Alpha8, this.Width, this.Height, out mirrorTex); layerFov = layers.AddLayerEyeFov(); layerFov.Header.Flags = OVR.LayerFlags.TextureOriginAtBottomLeft; // OpenGL Texture coordinates start from bottom left layerFov.Header.Type = OVR.LayerType.EyeFov; //Rendertarget for mirror desktop window GL.GenFramebuffers(1, out mirrorFbo); GL.BindFramebuffer(FramebufferTarget.ReadFramebuffer, mirrorFbo); GL.FramebufferTexture2D(FramebufferTarget.ReadFramebuffer, FramebufferAttachment.ColorAttachment0, TextureTarget.Texture2D, mirrorTex.Texture.TexId, 0); GL.FramebufferRenderbuffer(FramebufferTarget.ReadFramebuffer, FramebufferAttachment.DepthAttachment, RenderbufferTarget.Renderbuffer, 0); GL.BindFramebuffer(FramebufferTarget.ReadFramebuffer, 0); EyeRenderDesc[0] = hmd.GetRenderDesc(OVR.EyeType.Left, hmd.DefaultEyeFov[0]); EyeRenderDesc[1] = hmd.GetRenderDesc(OVR.EyeType.Right, hmd.DefaultEyeFov[1]); // Specify which head tracking capabilities to enable. hmd.SetEnabledCaps(OVR.HmdCaps.DebugDevice); // Start the sensor //Update SDK 0.8: Usage of ovr_ConfigureTracking is no longer needed unless you want to disable tracking features. By default, ovr_Create enables the full tracking capabilities supported by any given device. //hmd.ConfigureTracking(OVR.TrackingCaps.ovrTrackingCap_Orientation | OVR.TrackingCaps.ovrTrackingCap_MagYawCorrection | OVR.TrackingCaps.ovrTrackingCap_Position, OVR.TrackingCaps.None); this.VSync = VSyncMode.Off; hmd.RecenterPose(); // Init GL GL.Enable(EnableCap.DepthTest); }