void ResizeSurface() { Math.Rectangle newBounds = surface.Bounds; SurfaceSize = newBounds.Size; SurfaceBounds = newBounds; deviceContext.OutputMerger.ResetTargets(); swapChain.ResizeBuffers(0, (int)newBounds.Width, (int)newBounds.Height, DXGI.Format.R8G8B8A8_UNorm, DXGI.SwapChainFlags.None); deviceContext.Rasterizer.SetViewport(0, 0, newBounds.Width, newBounds.Height); viewMatrix = Matrix.Transpose(Matrix.Scaling(2f / newBounds.Width, -2f / newBounds.Height, 1)); viewMatrix *= Matrix.Transpose(Matrix.Translation(-newBounds.Width / 2f, -newBounds.Height / 2f, 0)); UpdateMatrixBuffer(); depthStencilView?.Dispose(); depthStencilBuffer?.Dispose(); surfaceTarget?.Dispose(); surfaceView?.Dispose(); surfaceTexture?.Dispose(); depthStencilBuffer = device.CreateDepthStencilBuffer((int)newBounds.Width, (int)newBounds.Height, sampleDescription, out depthStencilView); surfaceTexture = device.CreateSurface((int)newBounds.Width, (int)newBounds.Height, sampleDescription, out surfaceTarget); deviceContext.OutputMerger.SetTargets(depthStencilView, surfaceTarget); surfaceView = new D3D11.ShaderResourceView(device, surfaceTexture); }
/// <summary> /// Releases unmanaged and - optionally - managed resources. /// </summary> /// <param name="disposing"><c>true</c> to release both managed and unmanaged resources; <c>false</c> to release only unmanaged resources.</param> protected virtual void Dispose(bool disposing) { if (!disposed) { if (disposing) //Managed here { if (d3dRenderTarget != null) { d3dRenderTarget.Dispose(); } if (d3dDepthStencil != null) { d3dDepthStencil.Dispose(); } if (swapChain != null) { swapChain.Dispose(); } if (d3dDevice != null) { d3dDevice.Dispose(); } if (d3dContext != null) { d3dContext.Dispose(); } } //Unmanaged here disposed = true; } }
public void Dispose() { backbufferTexture.Dispose(); backbufferRTV.Dispose(); sceneTexture.Dispose(); sceneRTV.Dispose(); sceneSRV.Dispose(); depthDSV.Dispose(); depthSRV.Dispose(); depthStencilState.Dispose(); rasterizerState.Dispose(); constantBuffer.Dispose(); mainVertexShader.Dispose(); mainPixelShader.Dispose(); trianglePositionVertexBuffer.Dispose(); triangleIndexBuffer.Dispose(); inputLayout.Dispose(); postEffect.Dispose(); }
public void Dispose() { renderTargetView?.Dispose(); depthStencilView?.Dispose(); renderTargetResource?.Dispose(); depthStencilResource?.Dispose(); }
// https://gamedev.stackexchange.com/questions/75461/how-do-i-set-up-a-depth-buffer-in-sharpdx // https://docs.microsoft.com/en-us/windows/desktop/direct3d11/d3d10-graphics-programming-guide-depth-stencil private void EnableDepthTest() { if (depthStencilView != null) { depthStencilView.Dispose(); } if (depthStencilState != null) { depthStencilState.Dispose(); } // Create the depth stencil description depthTextureDesc = new D3D11.Texture2DDescription { Format = Format.D16_UNorm, ArraySize = 1, MipLevels = 1, Width = this.Width, Height = this.Height, SampleDescription = new SampleDescription(1, 0), Usage = D3D11.ResourceUsage.Default, BindFlags = D3D11.BindFlags.DepthStencil, CpuAccessFlags = D3D11.CpuAccessFlags.None, OptionFlags = D3D11.ResourceOptionFlags.None }; // Create the depth stencil view using (var depthTex = new D3D11.Texture2D(d3dDevice, depthTextureDesc)) { depthStencilView = new D3D11.DepthStencilView(d3dDevice, depthTex); } // Create the depth stencil state description depthStencilStateDesc = new D3D11.DepthStencilStateDescription(); depthStencilStateDesc.IsDepthEnabled = true; depthStencilStateDesc.DepthWriteMask = D3D11.DepthWriteMask.All; depthStencilStateDesc.DepthComparison = D3D11.Comparison.Less; depthStencilStateDesc.IsStencilEnabled = false; // Create the depth stencil state depthStencilState = new D3D11.DepthStencilState(d3dDevice, depthStencilStateDesc); // Update the context d3dDeviceContext.OutputMerger.SetTargets(depthStencilView, renderTargetView); d3dDeviceContext.OutputMerger.SetDepthStencilState(depthStencilState); }
public void Dispose() { _shaderSignature?.Dispose(); _vertexShaderByteCode?.Dispose(); _vertexShader?.Dispose(); _pixelShaderByteCode?.Dispose(); _pixelShader?.Dispose(); _verticesBuffer?.Dispose(); _inputLayout?.Dispose(); _contantBuffer?.Dispose(); _depthBuffer?.Dispose(); _depthView?.Dispose(); _context?.ClearState(); _context?.Flush(); _swapChain?.Dispose(); _renderView?.Dispose(); _backBuffer?.Dispose(); _device?.Dispose(); _context?.Dispose(); _factory?.Dispose(); }
private void InitializeOculus() { RenderForm form = new RenderForm("OculusWrap SharpDX demo"); Wrap oculus = new Wrap(); Hmd hmd; form.KeyUp += new System.Windows.Forms.KeyEventHandler(this.Window_KeyUp); //form.moused //form.Activate(); //form.Show(); int textureWidth = 0, textureHeight = 0; newTextureArrived = false; //zoom == 2 is not implemented, because the visual quality would be too low. //zoom == 4 will be implemented in the future. if (zoom == 3) { textureWidth = 3328; textureHeight = 1664; } bool success = oculus.Initialize(); if (!success) { System.Windows.Forms.MessageBox.Show("Failed to initialize the Oculus runtime library.", "Uh oh", MessageBoxButtons.OK, MessageBoxIcon.Error); return; } // Use the head mounted display, if it's available, otherwise use the debug HMD. int numberOfHeadMountedDisplays = oculus.Hmd_Detect(); if (numberOfHeadMountedDisplays > 0) hmd = oculus.Hmd_Create(0); else hmd = oculus.Hmd_CreateDebug(OculusWrap.OVR.HmdType.DK2); if (hmd == null) { System.Windows.Forms.MessageBox.Show("Oculus Rift not detected.", "Uh oh", MessageBoxButtons.OK, MessageBoxIcon.Error); return; } if (hmd.ProductName == string.Empty) System.Windows.Forms.MessageBox.Show("The HMD is not enabled.", "There's a tear in the Rift", MessageBoxButtons.OK, MessageBoxIcon.Error); // Specify which head tracking capabilities to enable. hmd.SetEnabledCaps(OVR.HmdCaps.LowPersistence | OVR.HmdCaps.DynamicPrediction); // Start the sensor which informs of the Rift's pose and motion hmd.ConfigureTracking(OVR.TrackingCaps.ovrTrackingCap_Orientation | OVR.TrackingCaps.ovrTrackingCap_MagYawCorrection | OVR.TrackingCaps.ovrTrackingCap_Position, OVR.TrackingCaps.None); // Create a set of layers to submit. EyeTexture[] eyeTextures = new EyeTexture[2]; OVR.ovrResult result; // Create DirectX drawing device. SharpDX.Direct3D11.Device device = new Device(SharpDX.Direct3D.DriverType.Hardware, DeviceCreationFlags.Debug); // Create DirectX Graphics Interface factory, used to create the swap chain. Factory factory = new Factory(); DeviceContext immediateContext = device.ImmediateContext; // Define the properties of the swap chain. SwapChainDescription swapChainDescription = new SwapChainDescription(); swapChainDescription.BufferCount = 1; swapChainDescription.IsWindowed = true; swapChainDescription.OutputHandle = form.Handle; swapChainDescription.SampleDescription = new SampleDescription(1, 0); swapChainDescription.Usage = Usage.RenderTargetOutput | Usage.ShaderInput; swapChainDescription.SwapEffect = SwapEffect.Sequential; swapChainDescription.Flags = SwapChainFlags.AllowModeSwitch; swapChainDescription.ModeDescription.Width = form.Width; swapChainDescription.ModeDescription.Height = form.Height; swapChainDescription.ModeDescription.Format = Format.R8G8B8A8_UNorm; swapChainDescription.ModeDescription.RefreshRate.Numerator = 0; swapChainDescription.ModeDescription.RefreshRate.Denominator = 1; // Create the swap chain. SharpDX.DXGI.SwapChain swapChain = new SwapChain(factory, device, swapChainDescription); // Retrieve the back buffer of the swap chain. Texture2D backBuffer = swapChain.GetBackBuffer<Texture2D>(0); RenderTargetView backBufferRenderTargetView = new RenderTargetView(device, backBuffer); // Create a depth buffer, using the same width and height as the back buffer. Texture2DDescription depthBufferDescription = new Texture2DDescription(); depthBufferDescription.Format = Format.D32_Float; depthBufferDescription.ArraySize = 1; depthBufferDescription.MipLevels = 1; depthBufferDescription.Width = form.Width; depthBufferDescription.Height = form.Height; depthBufferDescription.SampleDescription = new SampleDescription(1, 0); depthBufferDescription.Usage = ResourceUsage.Default; depthBufferDescription.BindFlags = BindFlags.DepthStencil; depthBufferDescription.CpuAccessFlags = CpuAccessFlags.None; depthBufferDescription.OptionFlags = ResourceOptionFlags.None; // Define how the depth buffer will be used to filter out objects, based on their distance from the viewer. DepthStencilStateDescription depthStencilStateDescription = new DepthStencilStateDescription(); depthStencilStateDescription.IsDepthEnabled = true; depthStencilStateDescription.DepthComparison = Comparison.Less; depthStencilStateDescription.DepthWriteMask = DepthWriteMask.Zero; // Create the depth buffer. Texture2D depthBuffer = new Texture2D(device, depthBufferDescription); DepthStencilView depthStencilView = new DepthStencilView(device, depthBuffer); DepthStencilState depthStencilState = new DepthStencilState(device, depthStencilStateDescription); Viewport viewport = new Viewport(0, 0, hmd.Resolution.Width, hmd.Resolution.Height, 0.0f, 1.0f); immediateContext.OutputMerger.SetDepthStencilState(depthStencilState); immediateContext.OutputMerger.SetRenderTargets(depthStencilView, backBufferRenderTargetView); immediateContext.Rasterizer.SetViewport(viewport); // Retrieve the DXGI device, in order to set the maximum frame latency. using (SharpDX.DXGI.Device1 dxgiDevice = device.QueryInterface<SharpDX.DXGI.Device1>()) { dxgiDevice.MaximumFrameLatency = 1; } Layers layers = new Layers(); LayerEyeFov layerEyeFov = layers.AddLayerEyeFov(); for (int eyeIndex = 0; eyeIndex < 2; eyeIndex++) { OVR.EyeType eye = (OVR.EyeType)eyeIndex; EyeTexture eyeTexture = new EyeTexture(); eyeTextures[eyeIndex] = eyeTexture; // Retrieve size and position of the texture for the current eye. eyeTexture.FieldOfView = hmd.DefaultEyeFov[eyeIndex]; eyeTexture.TextureSize = hmd.GetFovTextureSize(eye, hmd.DefaultEyeFov[eyeIndex], 1.0f); eyeTexture.RenderDescription = hmd.GetRenderDesc(eye, hmd.DefaultEyeFov[eyeIndex]); eyeTexture.HmdToEyeViewOffset = eyeTexture.RenderDescription.HmdToEyeViewOffset; eyeTexture.ViewportSize.Position = new OVR.Vector2i(0, 0); eyeTexture.ViewportSize.Size = eyeTexture.TextureSize; eyeTexture.Viewport = new Viewport(0, 0, eyeTexture.TextureSize.Width, eyeTexture.TextureSize.Height, 0.0f, 1.0f); // Define a texture at the size recommended for the eye texture. eyeTexture.Texture2DDescription = new Texture2DDescription(); eyeTexture.Texture2DDescription.Width = eyeTexture.TextureSize.Width; eyeTexture.Texture2DDescription.Height = eyeTexture.TextureSize.Height; eyeTexture.Texture2DDescription.ArraySize = 1; eyeTexture.Texture2DDescription.MipLevels = 1; eyeTexture.Texture2DDescription.Format = Format.R8G8B8A8_UNorm; eyeTexture.Texture2DDescription.SampleDescription = new SampleDescription(1, 0); eyeTexture.Texture2DDescription.Usage = ResourceUsage.Default; eyeTexture.Texture2DDescription.CpuAccessFlags = CpuAccessFlags.None; eyeTexture.Texture2DDescription.BindFlags = BindFlags.ShaderResource | BindFlags.RenderTarget; // Convert the SharpDX texture description to the native Direct3D texture description. OVR.D3D11.D3D11_TEXTURE2D_DESC swapTextureDescriptionD3D11 = SharpDXHelpers.CreateTexture2DDescription(eyeTexture.Texture2DDescription); // Create a SwapTextureSet, which will contain the textures to render to, for the current eye. result = hmd.CreateSwapTextureSetD3D11(device.NativePointer, ref swapTextureDescriptionD3D11, out eyeTexture.SwapTextureSet); WriteErrorDetails(oculus, result, "Failed to create swap texture set."); // Create room for each DirectX texture in the SwapTextureSet. eyeTexture.Textures = new Texture2D[eyeTexture.SwapTextureSet.TextureCount]; eyeTexture.RenderTargetViews = new RenderTargetView[eyeTexture.SwapTextureSet.TextureCount]; // Create a texture 2D and a render target view, for each unmanaged texture contained in the SwapTextureSet. for (int textureIndex = 0; textureIndex < eyeTexture.SwapTextureSet.TextureCount; textureIndex++) { // Retrieve the current textureData object. OVR.D3D11.D3D11TextureData textureData = eyeTexture.SwapTextureSet.Textures[textureIndex]; // Create a managed Texture2D, based on the unmanaged texture pointer. eyeTexture.Textures[textureIndex] = new Texture2D(textureData.Texture); // Create a render target view for the current Texture2D. eyeTexture.RenderTargetViews[textureIndex] = new RenderTargetView(device, eyeTexture.Textures[textureIndex]); } // Define the depth buffer, at the size recommended for the eye texture. eyeTexture.DepthBufferDescription = new Texture2DDescription(); eyeTexture.DepthBufferDescription.Format = Format.D32_Float; eyeTexture.DepthBufferDescription.Width = eyeTexture.TextureSize.Width; eyeTexture.DepthBufferDescription.Height = eyeTexture.TextureSize.Height; eyeTexture.DepthBufferDescription.ArraySize = 1; eyeTexture.DepthBufferDescription.MipLevels = 1; eyeTexture.DepthBufferDescription.SampleDescription = new SampleDescription(1, 0); eyeTexture.DepthBufferDescription.Usage = ResourceUsage.Default; eyeTexture.DepthBufferDescription.BindFlags = BindFlags.DepthStencil; eyeTexture.DepthBufferDescription.CpuAccessFlags = CpuAccessFlags.None; eyeTexture.DepthBufferDescription.OptionFlags = ResourceOptionFlags.None; // Create the depth buffer. eyeTexture.DepthBuffer = new Texture2D(device, eyeTexture.DepthBufferDescription); eyeTexture.DepthStencilView = new DepthStencilView(device, eyeTexture.DepthBuffer); // Specify the texture to show on the HMD. layerEyeFov.ColorTexture[eyeIndex] = eyeTexture.SwapTextureSet.SwapTextureSetPtr; layerEyeFov.Viewport[eyeIndex].Position = new OVR.Vector2i(0, 0); layerEyeFov.Viewport[eyeIndex].Size = eyeTexture.TextureSize; layerEyeFov.Fov[eyeIndex] = eyeTexture.FieldOfView; layerEyeFov.Header.Flags = OVR.LayerFlags.TextureOriginAtBottomLeft; } // Define the texture used to display the rendered result on the computer monitor. Texture2DDescription mirrorTextureDescription = new Texture2DDescription(); mirrorTextureDescription.Width = form.Width; mirrorTextureDescription.Height = form.Height; mirrorTextureDescription.ArraySize = 1; mirrorTextureDescription.MipLevels = 1; mirrorTextureDescription.Format = Format.R8G8B8A8_UNorm; mirrorTextureDescription.SampleDescription = new SampleDescription(1, 0); mirrorTextureDescription.Usage = ResourceUsage.Default; mirrorTextureDescription.CpuAccessFlags = CpuAccessFlags.None; mirrorTextureDescription.BindFlags = BindFlags.ShaderResource | BindFlags.RenderTarget; SamplerStateDescription samplerStateDescription = new SamplerStateDescription { AddressU = TextureAddressMode.Wrap, AddressV = TextureAddressMode.Wrap, AddressW = TextureAddressMode.Wrap, Filter = Filter.Anisotropic }; RasterizerStateDescription rasterizerStateDescription = RasterizerStateDescription.Default(); rasterizerStateDescription.IsFrontCounterClockwise = true; // Convert the SharpDX texture description to the native Direct3D texture description. OVR.D3D11.D3D11_TEXTURE2D_DESC mirrorTextureDescriptionD3D11 = SharpDXHelpers.CreateTexture2DDescription(mirrorTextureDescription); OculusWrap.D3D11.MirrorTexture mirrorTexture; // Create the texture used to display the rendered result on the computer monitor. result = hmd.CreateMirrorTextureD3D11(device.NativePointer, ref mirrorTextureDescriptionD3D11, out mirrorTexture); WriteErrorDetails(oculus, result, "Failed to create mirror texture."); Texture2D mirrorTextureD3D11 = new Texture2D(mirrorTexture.Texture.Texture); #region Vertex and pixel shader // Create vertex shader. ShaderBytecode vertexShaderByteCode = ShaderBytecode.CompileFromFile("Shaders.fx", "VertexShaderMain", "vs_4_0"); VertexShader vertexShader = new VertexShader(device, vertexShaderByteCode); // Create pixel shader. ShaderBytecode pixelShaderByteCode = ShaderBytecode.CompileFromFile("Shaders.fx", "PixelShaderMain", "ps_4_0"); PixelShader pixelShader = new PixelShader(device, pixelShaderByteCode); ShaderSignature shaderSignature = ShaderSignature.GetInputSignature(vertexShaderByteCode); Texture2D myTexture = new Texture2D(device, new Texture2DDescription() { Format = Format.R8G8B8A8_UNorm, ArraySize = 1, MipLevels = 1, Width = textureWidth, Height = textureHeight, SampleDescription = new SampleDescription(1, 0), Usage = ResourceUsage.Dynamic, BindFlags = BindFlags.ShaderResource, CpuAccessFlags = CpuAccessFlags.Write, OptionFlags = ResourceOptionFlags.None, }); ShaderResourceView textureView = new ShaderResourceView(device, myTexture); //set sampler for texture SamplerState samplerState = new SamplerState(device, samplerStateDescription); //initialize rasterizer RasterizerState rasterizerState = new RasterizerState(device, rasterizerStateDescription); // Specify that each vertex consists of a single vertex position and color. int[] indices = null; Vertex[] vertices = null; CreateGeometry(out indices, out vertices); InputElement[] inputElements = new InputElement[] { new InputElement("SV_Position", 0, Format.R32G32B32A32_Float, 0, 0), new InputElement("COLOR", 0, Format.R32G32B32A32_Float, 16, 0), new InputElement("TEXCOORD", 0, Format.R32G32_Float, 32, 0), /*new InputElement("TEXCOORD", 0, Format.R32G32_Float, 16, 0), new InputElement("TEXCOORD", 0, Format.R32G32_Float, 32, 0),*/ }; // Define an input layout to be passed to the vertex shader. InputLayout inputLayout = new InputLayout(device, shaderSignature, inputElements); // Create a vertex buffer, containing our 3D model. Buffer vertexBuffer = Buffer.Create(device, BindFlags.VertexBuffer, vertices);//m_vertices); // Create a constant buffer, to contain our WorldViewProjection matrix, that will be passed to the vertex shader. Buffer constantBuffer = new Buffer(device, Utilities.SizeOf<Matrix>(), ResourceUsage.Default, BindFlags.ConstantBuffer, CpuAccessFlags.None, ResourceOptionFlags.None, 0); Buffer indexBuffer = SharpDX.Direct3D11.Buffer.Create(device, BindFlags.IndexBuffer, indices); // Setup the immediate context to use the shaders and model we defined. immediateContext.InputAssembler.InputLayout = inputLayout; immediateContext.InputAssembler.PrimitiveTopology = PrimitiveTopology.TriangleList; immediateContext.InputAssembler.SetVertexBuffers(0, new VertexBufferBinding(vertexBuffer, Utilities.SizeOf<Vertex>(), 0)); immediateContext.InputAssembler.SetIndexBuffer(indexBuffer, Format.R32_UInt, 0); immediateContext.VertexShader.SetConstantBuffer(0, constantBuffer); immediateContext.VertexShader.Set(vertexShader); immediateContext.PixelShader.Set(pixelShader); immediateContext.PixelShader.SetShaderResource(0, textureView); immediateContext.PixelShader.SetSampler(0, samplerState); #endregion DateTime startTime = DateTime.Now; Vector3 position = new Vector3(0, 0, 0); oculusReady = true; #region Render loop RenderLoop.Run(form, () => { OVR.Vector3f[] hmdToEyeViewOffsets = { eyeTextures[0].HmdToEyeViewOffset, eyeTextures[1].HmdToEyeViewOffset }; OVR.FrameTiming frameTiming = hmd.GetFrameTiming(0); OVR.TrackingState trackingState = hmd.GetTrackingState(frameTiming.DisplayMidpointSeconds); OVR.Posef[] eyePoses = new OVR.Posef[2]; // Calculate the position and orientation of each eye. oculus.CalcEyePoses(trackingState.HeadPose.ThePose, hmdToEyeViewOffsets, ref eyePoses); float timeSinceStart = (float)(DateTime.Now - startTime).TotalSeconds; for (int eyeIndex = 0; eyeIndex < 2; eyeIndex++) { OVR.EyeType eye = (OVR.EyeType)eyeIndex; EyeTexture eyeTexture = eyeTextures[eyeIndex]; layerEyeFov.RenderPose[eyeIndex] = eyePoses[eyeIndex]; // Retrieve the index of the active texture and select the next texture as being active next. int textureIndex = eyeTexture.SwapTextureSet.CurrentIndex++; immediateContext.OutputMerger.SetRenderTargets(eyeTexture.DepthStencilView, eyeTexture.RenderTargetViews[textureIndex]); immediateContext.ClearRenderTargetView(eyeTexture.RenderTargetViews[textureIndex], Color.Black); immediateContext.ClearDepthStencilView(eyeTexture.DepthStencilView, DepthStencilClearFlags.Depth | DepthStencilClearFlags.Stencil, 1.0f, 0); immediateContext.Rasterizer.SetViewport(eyeTexture.Viewport); //added a custom rasterizer immediateContext.Rasterizer.State = rasterizerState; // Retrieve the eye rotation quaternion and use it to calculate the LookAt direction and the LookUp direction. Quaternion rotationQuaternion = SharpDXHelpers.ToQuaternion(eyePoses[eyeIndex].Orientation); Matrix rotationMatrix = Matrix.RotationQuaternion(rotationQuaternion); Vector3 lookUp = Vector3.Transform(new Vector3(0, -1, 0), rotationMatrix).ToVector3(); Vector3 lookAt = Vector3.Transform(new Vector3(0, 0, 1), rotationMatrix).ToVector3(); Vector3 viewPosition = position - eyePoses[eyeIndex].Position.ToVector3(); //use this to get the first rotation to goal Matrix world = Matrix.Scaling(1.0f) /** Matrix.RotationX(timeSinceStart*0.2f) */* Matrix.RotationY(timeSinceStart * 2 / 10f) /** Matrix.RotationZ(timeSinceStart*3/10f)*/; Matrix viewMatrix = Matrix.LookAtRH(viewPosition, viewPosition + lookAt, lookUp); Matrix projectionMatrix = OVR.ovrMatrix4f_Projection(eyeTexture.FieldOfView, 0.1f, 10.0f, OVR.ProjectionModifier.None).ToMatrix(); projectionMatrix.Transpose(); Matrix worldViewProjection = world * viewMatrix * projectionMatrix; worldViewProjection.Transpose(); // Update the transformation matrix. immediateContext.UpdateSubresource(ref worldViewProjection, constantBuffer); // Draw the cube //immediateContext.Draw(vertices.Length/2, 0); immediateContext.DrawIndexed(indices.Length, 0, 0); } hmd.SubmitFrame(0, layers); immediateContext.CopyResource(mirrorTextureD3D11, backBuffer); swapChain.Present(0, PresentFlags.None); if (newTextureArrived == true) { newTextureArrived = false; DataBox map = device.ImmediateContext.MapSubresource(myTexture, 0, MapMode.WriteDiscard, SharpDX.Direct3D11.MapFlags.None); //load the BitMapSource with appropriate formating (Format32bppPRGBA) SharpDX.WIC.BitmapSource bitMap = LoadBitmap(new SharpDX.WIC.ImagingFactory(), streamTexture); //string newFile = Path.GetDirectoryName(Process.GetCurrentProcess().MainModule.FileName) + @"\img_merged.jpg"; //SharpDX.WIC.BitmapSource bitMap = LoadBitmapFromFile(new SharpDX.WIC.ImagingFactory(), newFile); int width = bitMap.Size.Width; int height = bitMap.Size.Height; int stride = bitMap.Size.Width * 4; bitMap.CopyPixels(stride, map.DataPointer, height * stride); device.ImmediateContext.UnmapSubresource(myTexture, 0); //bitMap.Dispose(); streamTexture.Seek(0, SeekOrigin.Begin); } }); #endregion // Release all resources inputLayout.Dispose(); constantBuffer.Dispose(); indexBuffer.Dispose(); vertexBuffer.Dispose(); inputLayout.Dispose(); shaderSignature.Dispose(); pixelShader.Dispose(); pixelShaderByteCode.Dispose(); vertexShader.Dispose(); vertexShaderByteCode.Dispose(); mirrorTextureD3D11.Dispose(); layers.Dispose(); eyeTextures[0].Dispose(); eyeTextures[1].Dispose(); immediateContext.ClearState(); immediateContext.Flush(); immediateContext.Dispose(); depthStencilState.Dispose(); depthStencilView.Dispose(); depthBuffer.Dispose(); backBufferRenderTargetView.Dispose(); backBuffer.Dispose(); swapChain.Dispose(); factory.Dispose(); // Disposing the device, before the hmd, will cause the hmd to fail when disposing. // Disposing the device, after the hmd, will cause the dispose of the device to fail. // It looks as if the hmd steals ownership of the device and destroys it, when it's shutting down. // device.Dispose(); hmd.Dispose(); oculus.Dispose(); }