void ResizeSurface() { Math.Rectangle newBounds = surface.Bounds; SurfaceSize = newBounds.Size; SurfaceBounds = newBounds; deviceContext.OutputMerger.ResetTargets(); swapChain.ResizeBuffers(0, (int)newBounds.Width, (int)newBounds.Height, DXGI.Format.R8G8B8A8_UNorm, DXGI.SwapChainFlags.None); deviceContext.Rasterizer.SetViewport(0, 0, newBounds.Width, newBounds.Height); viewMatrix = Matrix.Transpose(Matrix.Scaling(2f / newBounds.Width, -2f / newBounds.Height, 1)); viewMatrix *= Matrix.Transpose(Matrix.Translation(-newBounds.Width / 2f, -newBounds.Height / 2f, 0)); UpdateMatrixBuffer(); depthStencilView?.Dispose(); depthStencilBuffer?.Dispose(); surfaceTarget?.Dispose(); surfaceView?.Dispose(); surfaceTexture?.Dispose(); depthStencilBuffer = device.CreateDepthStencilBuffer((int)newBounds.Width, (int)newBounds.Height, sampleDescription, out depthStencilView); surfaceTexture = device.CreateSurface((int)newBounds.Width, (int)newBounds.Height, sampleDescription, out surfaceTarget); deviceContext.OutputMerger.SetTargets(depthStencilView, surfaceTarget); surfaceView = new D3D11.ShaderResourceView(device, surfaceTexture); }
public void SetColormap(string file, D3D11.Device device) { colorMap?.Dispose(); colorMapView?.Dispose(); D3D11.Resource rsrc; ResourceUtil.LoadFromFile(device, file, out colorMapView, out rsrc); colorMap = rsrc as D3D11.Texture2D; }
public override void Dispose() { texture.Dispose(); if (popupTexture != null) { popupTexture.Dispose(); } foreach (var texture in obsoluteTextures) { texture.Dispose(); } }
internal void ReleaseDevices() { IsRendererSuppressed = true; RenderTarget.Dispose(); Backbuffer.Dispose(); RenderTargetSurface.Dispose(); RenderTargetView.Dispose(); D2DDeviceContext.Dispose(); D2DDevice.Dispose(); D2DFactory.Dispose(); DXGIDevice.Dispose(); D3DDevice.Dispose(); D3DDefaultDevice.Dispose(); SwapChain.Dispose(); SwapChain = null; RenderTarget = null; RenderTargetSurface = null; Backbuffer = null; RenderTargetView = null; D2DDeviceContext = null; D2DFactory = null; D2DDevice = null; DXGIDevice = null; D3DDevice = null; D3DDefaultDevice = null; }
void InitializeSharedBackBuffer(IntPtr resourcePtr) { // convert native pointer to DXGI shared resource Resource resource = CppObject.FromPointer <Resource>(resourcePtr).QueryInterface <Resource>(); // convert shared resource to D3D11 Texture D3D11.Texture2D sharedBackbuffer = device.OpenSharedResource <D3D11.Texture2D>(resource.SharedHandle); // release reference resource.Dispose(); // use D3D11 Texture as render target D3D11.RenderTargetViewDescription desc = new D3D11.RenderTargetViewDescription(); desc.Format = Format.B8G8R8A8_UNorm; desc.Dimension = D3D11.RenderTargetViewDimension.Texture2D; desc.Texture2D.MipSlice = 0; renderTargetView = new D3D11.RenderTargetView(device, sharedBackbuffer, desc); deviceContext.OutputMerger.SetRenderTargets(renderTargetView); // release reference sharedBackbuffer.Dispose(); // setup viewport Size size = Utils.WpfSizeToPixels(ImageGrid); deviceContext.Rasterizer.SetViewport(new Viewport(0, 0, (int)size.Width, (int)size.Height, 0.0f, 1.0f)); }
public static Texture2D FromColor(Device device, Color color) { var value = new Texture2D(); Texture2DDescription textureDesc = new Texture2DDescription() { MipLevels = 1, Format = Format.B8G8R8A8_UNorm, Width = 1, Height = 1, ArraySize = 1, BindFlags = BindFlags.ShaderResource, Usage = ResourceUsage.Immutable, SampleDescription = new SampleDescription(1, 0) }; uint bgra = color.ToArgb(); GCHandle gc = GCHandle.Alloc(bgra, GCHandleType.Pinned); DataRectangle rect = new DataRectangle(gc.AddrOfPinnedObject(), sizeof(uint)); var buffer = new Texture2D11(device.NativeDevice, textureDesc, rect); gc.Free(); var resourceView = new ShaderResourceView(device.NativeDevice, buffer); buffer.Dispose(); var texture = new Texture2D(); texture.NativeResourceView = resourceView; return(texture); }
public void Dispose() { backbufferTexture.Dispose(); backbufferRTV.Dispose(); sceneTexture.Dispose(); sceneRTV.Dispose(); sceneSRV.Dispose(); depthDSV.Dispose(); depthSRV.Dispose(); depthStencilState.Dispose(); rasterizerState.Dispose(); constantBuffer.Dispose(); mainVertexShader.Dispose(); mainPixelShader.Dispose(); trianglePositionVertexBuffer.Dispose(); triangleIndexBuffer.Dispose(); inputLayout.Dispose(); postEffect.Dispose(); }
private void Cleanup() { _framePool?.Dispose(); _session?.Dispose(); if (_captureItem != null) { _captureItem.Closed -= OnClosed; } _captureItem = null; _device = null; _d3dDevice = null; _composeTexture?.Dispose(); _composeTexture = null; _composeRenderTargetView?.Dispose(); _composeRenderTargetView = null; _currentFrame?.Dispose(); }
public new void Dispose() { base.Dispose(); if (dxTexture != null) { dxTexture.Dispose(); } }
private void Resize() { // Dispose all previous allocated resources Canvas.Release(); Utilities.Dispose(ref backBufferView); Utilities.Dispose(ref depthStencilView); if (View.ClientSize.Width == 0 || View.ClientSize.Height == 0) { return; } // Resize the backbuffer swapChain.ResizeBuffers(1, View.ClientSize.Width, View.ClientSize.Height, Format.B8G8R8A8_UNorm, SwapChainFlags.None); // Get the backbuffer from the swapchain var backBufferTexture = swapChain.GetBackBuffer <Texture2D11>(0); //update font Canvas.UpdateResources(backBufferTexture); // Backbuffer backBufferView = new RenderTargetView(NativeDevice, backBufferTexture); backBufferTexture.Dispose(); // Depth buffer var depthStencilTexture = new Texture2D11(NativeDevice, new Texture2DDescription() { Format = Format.D16_UNorm, ArraySize = 1, MipLevels = 1, Width = View.ClientSize.Width, Height = View.ClientSize.Height, SampleDescription = new SampleDescription(1, 0), Usage = ResourceUsage.Default, BindFlags = BindFlags.DepthStencil, CpuAccessFlags = CpuAccessFlags.None, OptionFlags = ResourceOptionFlags.None }); // Create the depth buffer view depthStencilView = new DepthStencilView(NativeDevice, depthStencilTexture); depthStencilTexture.Dispose(); //SetDefaultTargers(); // Set Default Targets NativeDeviceContext.Rasterizer.SetViewport(0, 0, View.ClientSize.Width, View.ClientSize.Height); NativeDeviceContext.OutputMerger.SetTargets(depthStencilView, backBufferView); // End resize //MustResize = false; }
public Bitmap CaptureAndProcess(ModernCaptureItemDescription itemDescription) { // Assuming old texture is already disposed description = itemDescription; // Initialize DirectX context (for the canvas) textureSDRImage = new D3D11.Texture2D(d3dDevice, new D3D11.Texture2DDescription { Width = description.CanvasRect.Width, Height = description.CanvasRect.Height, MipLevels = 1, ArraySize = 1, Format = DXGI.Format.B8G8R8A8_UNorm_SRgb, Usage = D3D11.ResourceUsage.Default, SampleDescription = new DXGI.SampleDescription(1, 0), BindFlags = D3D11.BindFlags.RenderTarget, CpuAccessFlags = D3D11.CpuAccessFlags.None, OptionFlags = D3D11.ResourceOptionFlags.None, }); rtvSdrTexture = new D3D11.RenderTargetView(d3dDevice, textureSDRImage); d3dContext.Rasterizer.SetViewport(new Viewport(0, 0, description.CanvasRect.Width, description.CanvasRect.Height)); d3dContext.OutputMerger.SetRenderTargets(rtvSdrTexture); d3dContext.ClearRenderTargetView(rtvSdrTexture, new SharpDX.Mathematics.Interop.RawColor4 { A = 0.0f, B = 0.0f, G = 0.0f, R = 0.0f }); foreach (var item in description.Regions) { using (var session = new ModernCaptureMonitorSession(wrtD3D11Device, item)) { Direct3D11CaptureFrame f; currentSession = session; session.Session.StartCapture(); while ((f = session.FramePool.TryGetNextFrame()) == null) { Thread.Sleep(1); } ProcessFrame(f); f.Dispose(); } } // Process final 8-bit bitmap var gdiPlusBitmap = new Bitmap(DumpAndSaveImage()); rtvSdrTexture.Dispose(); textureSDRImage.Dispose(); description = null; #if DEBUG System.Diagnostics.Debug.WriteLine(SharpDX.Diagnostics.ObjectTracker.ReportActiveObjects()); #endif return(gdiPlusBitmap); }
protected override void Dispose(bool disposing) { if (disposing) { if (texCubeArray != null) { texCubeArray.Dispose(); } } base.Dispose(disposing); }
public void Dispose() { defaultRenderViews.Dispose(); swapRenderViews.Dispose(); renderTarget.Dispose(); depthStencil.Dispose(); renderTargetSwap.Dispose(); depthStencilSwap.Dispose(); }
internal void EndRender() { if (SyncInterval > 0 || sw.Elapsed.TotalMilliseconds >= 1000.0 / refreshRate) { D3D11.Texture2D backBuffer = swapChain.GetBackBuffer <D3D11.Texture2D>(0); deviceContext.ResolveSubresource(surfaceTexture, 0, backBuffer, 0, DXGI.Format.R8G8B8A8_UNorm); backBuffer.Dispose(); swapChain.Present(SyncInterval, DXGI.PresentFlags.None); sw.Restart(); } }
public void Dispose() { if (pinnedMemBuffer.IsAllocated) { pinnedMemBuffer.Free(); } TryReleaseFrame(); if (Settings.SettingsHwnd != IntPtr.Zero) // restore window background color { try { device?.ImmediateContext.ClearRenderTargetView(renderTarget, SharpDX.Color.WhiteSmoke); renderWindow?.Present(0, PresentFlags.None); } catch { } } Settings.Model.PropertyChanged -= LedsChanged; fpsCounter?.Dispose(); fpsColor?.Dispose(); fpsFont?.Dispose(); renderOverlay?.Dispose(); renderTarget?.Dispose(); renderTexture?.Dispose(); renderWindow?.Dispose(); try { device?.ImmediateContext?.ClearState(); device?.ImmediateContext?.Flush(); device?.ImmediateContext?.Dispose(); } catch { } adapter?.Dispose(); output?.Dispose(); device?.Dispose(); duplicator?.Dispose(); capture?.Dispose(); scaler?.Dispose(); gpuTexture?.Dispose(); cpuTexture?.Dispose(); }
public void CheckTexture(ref D3D11.Texture2D tx, GDI.Size size) { if (tx == null) { return; } if (tx.Description.Width != size.Width || tx.Description.Height != size.Height) { tx.Dispose(); tx = null; } }
public void Dispose() { rtvSdrTexture?.Dispose(); textureSDRImage?.Dispose(); inputLayout.Dispose(); samplerState.Dispose(); shaderInputSigVsQuad.Dispose(); psToneMapping.Dispose(); vsQuad.Dispose(); wrtD3D11Device.Dispose(); d3dDevice.Dispose(); wicFactory.Dispose(); }
private Stream DumpAndSaveImage() { var stream = new MemoryStream(); var textureSDRCpuCopy = new D3D11.Texture2D(d3dDevice, new D3D11.Texture2DDescription { Width = description.CanvasRect.Width, Height = description.CanvasRect.Height, MipLevels = 1, ArraySize = 1, Format = DXGI.Format.B8G8R8A8_UNorm_SRgb, Usage = D3D11.ResourceUsage.Staging, SampleDescription = new DXGI.SampleDescription(1, 0), BindFlags = D3D11.BindFlags.None, CpuAccessFlags = D3D11.CpuAccessFlags.Read, OptionFlags = D3D11.ResourceOptionFlags.None, }); DataStream rawSdrImageDataStream; d3dContext.CopyResource(textureSDRImage, textureSDRCpuCopy); var dataBox = d3dContext.MapSubresource(textureSDRCpuCopy, 0, 0, D3D11.MapMode.Read, D3D11.MapFlags.None, out rawSdrImageDataStream); var dataRectangle = new DataRectangle { DataPointer = rawSdrImageDataStream.DataPointer, Pitch = dataBox.RowPitch, }; using (var bitmap = new WIC.Bitmap(wicFactory, description.CanvasRect.Width, description.CanvasRect.Height, WIC.PixelFormat.Format32bppBGRA, dataRectangle)) using (var imageEncoder = new WIC.BmpBitmapEncoder(wicFactory, stream)) using (var encodeInstance = new WIC.BitmapFrameEncode(imageEncoder)) { encodeInstance.Initialize(); encodeInstance.SetSize(bitmap.Size.Width, bitmap.Size.Height); var pixelFormat = WIC.PixelFormat.Format24bppBGR; encodeInstance.SetPixelFormat(ref pixelFormat); encodeInstance.WriteSource(bitmap); encodeInstance.Commit(); imageEncoder.Commit(); stream.Flush(); } d3dContext.UnmapSubresource(textureSDRCpuCopy, 0); rawSdrImageDataStream.Dispose(); textureSDRCpuCopy.Dispose(); return(stream); }
public void Dispose() { if (duplicatedOutput != null) { duplicatedOutput.Dispose(); } if (screenTexture != null) { screenTexture.Dispose(); } if (device != null) { device.Dispose(); } fluxListenerThread.Abort(); }
void OnSave() { var tex = new SharpDX.Direct3D11.Texture2D(ResultRT.GetNativeTexturePtr()); var tex2 = new SharpDX.Direct3D11.Texture2D(Result.GetNativeTexturePtr()); //SharpDX.Direct3D11.TextureLoadInformation info = new SharpDX.Direct3D11.TextureLoadInformation(); // info. tex.Device.ImmediateContext.CopyResource(tex, tex2); // SharpDX.Direct3D11.Texture2D.LoadTextureFromTexture(tex.Device.ImmediateContext, tex, tex2, null); SharpDX.Direct3D11.Resource.ToFile(tex2.Device.ImmediateContext, tex2, SharpDX.Direct3D11.ImageFileFormat.Dds, outputPath); tex.Dispose(); tex2.Dispose(); }
/// <summary> /// /// </summary> /// <param name="syncInterval"></param> public override void SwapBuffers(int syncInterval) { eyeTextures[0].SwapTexture.Commit(); eyeTextures[1].SwapTexture.Commit(); layerEyeFov.Header.Type = OVR.LayerType.EyeFov; layerEyeFov.Header.Flags = OVR.LayerFlags.None; layerEyeFov.SensorSampleTime = sampleTime; for (int i = 0; i < 2; i++) { layerEyeFov.ColorTexture[i] = eyeTextures[i].SwapTexture.TextureChain; layerEyeFov.Viewport[i] = eyeTextures[i].ViewportSize; layerEyeFov.Fov[i] = hmd.DefaultEyeFov[i]; layerEyeFov.RenderPose[i] = eyePoses[i]; } if (hmd.SubmitFrame(frameIndex, layers) < 0) { Log.Warning("OculusRiftDisplay SubmitFrame returned error"); } OVR.SessionStatus sessionStatus; hmd.GetSessionStatus(out sessionStatus); if (sessionStatus.ShouldQuit > 0) { Application.Exit(); } if (sessionStatus.ShouldRecenter > 0) { hmd.RecenterPose(); } frameIndex++; var mirrorTextureD3D11 = new SharpDX.Direct3D11.Texture2D(mirrorTexture.GetMirrorBufferPtr()); d3dDevice.ImmediateContext.CopyResource(mirrorTextureD3D11, backbufferColor.Surface.Resource); mirrorTextureD3D11.Dispose(); swapChain.Present(0, PresentFlags.None); }
/// <summary> /// Constructor /// </summary> /// <param name="device">Device</param> /// <param name="size">Cube Size</param> /// <param name="format">Color Format</param> public SharpCubeTarget(SharpDevice device, int size, Format format) { Device = device; Size = size; Texture2D target = new Texture2D(device.Device, new Texture2DDescription() { Format = format, Width = size, Height = size, ArraySize = 6, BindFlags = BindFlags.RenderTarget | BindFlags.ShaderResource, CpuAccessFlags = CpuAccessFlags.None, MipLevels = 1, OptionFlags = ResourceOptionFlags.TextureCube, SampleDescription = new SampleDescription(1, 0), Usage = ResourceUsage.Default, }); _target = new RenderTargetView(device.Device, target); _resource = new ShaderResourceView(device.Device, target); target.Dispose(); var _zbufferTexture = new Texture2D(Device.Device, new Texture2DDescription() { Format = Format.D16_UNorm, ArraySize = 6, MipLevels = 1, Width = size, Height = size, SampleDescription = new SampleDescription(1, 0), Usage = ResourceUsage.Default, BindFlags = BindFlags.DepthStencil, CpuAccessFlags = CpuAccessFlags.None, OptionFlags = ResourceOptionFlags.TextureCube }); // Create the depth buffer view _zbuffer = new DepthStencilView(Device.Device, _zbufferTexture); _zbufferTexture.Dispose(); }
private void disposeResources() { if (renderTarget != null) { renderTarget.Dispose(); } if (swapChain != null) { swapChain.Dispose(); } if (backbufferView != null) { backbufferView.Dispose(); } if (backbuffer != null) { backbuffer.Dispose(); } }
private int mipLevels = 1; //Mips will screw with the readings from the depth texture. public ShadowMap(GraphicsDevice device, int width, int height) { viewport = new SharpDX.Viewport(0, 0, width, height, 0.0f, 1.0f); Texture2DDescription depthTexDesc = FillOutDepthTextureDescription(); SharpDX.Direct3D11.Texture2D depthTexture = new SharpDX.Direct3D11.Texture2D(device, depthTexDesc); DepthStencilViewDescription dsvDesc = FillOutDSVDescription(); depthMapDSV = new DepthStencilView(device, depthTexture, dsvDesc); ShaderResourceViewDescription srvDesc = FillOutSRVDescription(); depthMapSRV = new ShaderResourceView(device, depthTexture, srvDesc); renderTarget = new RenderTarget("ShadowMap", null, depthMapDSV, viewport); depthTexture.Dispose(); }
public void Dispose() { _shaderSignature?.Dispose(); _vertexShaderByteCode?.Dispose(); _vertexShader?.Dispose(); _pixelShaderByteCode?.Dispose(); _pixelShader?.Dispose(); _verticesBuffer?.Dispose(); _inputLayout?.Dispose(); _contantBuffer?.Dispose(); _depthBuffer?.Dispose(); _depthView?.Dispose(); _context?.ClearState(); _context?.Flush(); _swapChain?.Dispose(); _renderView?.Dispose(); _backBuffer?.Dispose(); _device?.Dispose(); _context?.Dispose(); _factory?.Dispose(); }
public static ShaderResourceView CreateTextureFromBitmap(Device device, string filename) { System.Drawing.Bitmap bitmap = new System.Drawing.Bitmap(filename); int width = bitmap.Width; int height = bitmap.Height; // Describe and create a Texture2D. Texture2DDescription textureDesc = new Texture2DDescription() { MipLevels = 1, Format = Format.B8G8R8A8_UNorm, Width = width, Height = height, ArraySize = 1, BindFlags = BindFlags.ShaderResource, Usage = ResourceUsage.Default, SampleDescription = new SampleDescription(1, 0) }; System.Drawing.Imaging.BitmapData data = bitmap.LockBits( new System.Drawing.Rectangle(0, 0, bitmap.Width, bitmap.Height), System.Drawing.Imaging.ImageLockMode.ReadOnly, System.Drawing.Imaging.PixelFormat.Format32bppArgb); DataRectangle dataRectangle = new DataRectangle(data.Scan0, data.Stride); var buffer = new Texture2D11(device.NativeDevice, textureDesc, dataRectangle); bitmap.UnlockBits(data); var resourceView = new ShaderResourceView(device.NativeDevice, buffer); buffer.Dispose(); return(resourceView); }
void OnProcess() { if (Result != null) { DestroyImmediate(Result); } if (ResultRT != null) { DestroyImmediate(ResultRT); } Debug.Log("Re-render texture at time " + Time.realtimeSinceStartup); ResultRT = new RenderTexture(OutputWidth, OutputHeight, 24, RenderTextureFormat.ARGBFloat); ResultRT.useMipMap = true; ResultRT.autoGenerateMips = false; ResultRT.Create(); Result = new Texture2D(OutputWidth, OutputHeight, TextureFormat.RGBAFloat, true, true); // RenderTexture.active = Result; var tex = new SharpDX.Direct3D11.Texture2D(ResultRT.GetNativeTexturePtr()); var mips = tex.Description.MipLevels; tex.Dispose(); Debug.Log(mips); var srcTexture = (Cubemap)RenderMaterial.GetTexture("_Cube"); for (int i = 0; i < mips; ++i) { Graphics.SetRenderTarget(ResultRT, i); RenderMaterial.SetFloat("_MipOffset", ((float)i / (mips - 1)) * (srcTexture.mipmapCount - 1)); RenderMaterial.SetFloat("_VerticalUvScale", FlipVertical ? 1 : 0); RenderMaterial.SetPass(0); GL.PushMatrix(); GL.LoadOrtho(); GL.Begin(GL.QUADS); GL.Clear(true, true, Color.blue); GL.TexCoord(new Vector2(0, 1)); GL.Vertex3(0, 0, 0.0f); GL.TexCoord(new Vector2(0, 0)); GL.Vertex3(0, 1, 0.0f); GL.TexCoord(new Vector2(1, 0)); GL.Vertex3(1, 1, 0.0f); GL.TexCoord(new Vector2(1, 1)); GL.Vertex3(1, 0, 0.0f); GL.End(); GL.PopMatrix(); GL.Flush(); } RenderTexture.active = null; // EditorUtility.CompressTexture(Result, TextureFormat.RGB24, (int)CompressionQuality); }
private void CompileButton_Click(object sender, RoutedEventArgs e) { //make the width and height flexible if (paths.Count > 2 && redIndex > -1 && greenIndex > -1 && blueIndex > -1) { Device d = new Device(SharpDX.Direct3D.DriverType.Hardware); ImageLoadInformation loadInfo = new ImageLoadInformation() { BindFlags = BindFlags.None, CpuAccessFlags = CpuAccessFlags.Read, Format = SharpDX.DXGI.Format.R8G8B8A8_UNorm, OptionFlags = ResourceOptionFlags.None, Usage = ResourceUsage.Staging }; Texture2D red, green, blue,final; red = Texture2D.FromFile<Texture2D>(d,paths[redIndex],loadInfo); green = Texture2D.FromFile<Texture2D>(d, paths[greenIndex],loadInfo); blue = Texture2D.FromFile<Texture2D>(d, paths[blueIndex],loadInfo); final = new Texture2D(d, new Texture2DDescription() { ArraySize = 1, BindFlags = BindFlags.None, CpuAccessFlags = CpuAccessFlags.Write, Format = SharpDX.DXGI.Format.R8G8B8A8_UNorm, Height = 3456, MipLevels = 0, OptionFlags = ResourceOptionFlags.None, SampleDescription = new SharpDX.DXGI.SampleDescription(1,0), Usage = ResourceUsage.Staging, Width = 4608 }); DataBox redDataBox = d.ImmediateContext.MapSubresource(red,0,MapMode.Read,MapFlags.None); DataBox greenDataBox = d.ImmediateContext.MapSubresource(green, 0, MapMode.Read, MapFlags.None); DataBox blueDataBox = d.ImmediateContext.MapSubresource(blue, 0, MapMode.Read, MapFlags.None); DataBox finalDataBox = d.ImmediateContext.MapSubresource(final, 0, MapMode.Write, MapFlags.None); byte[] redData = new byte[redDataBox.RowPitch * red.Description.Height], greenData = new byte[greenDataBox.RowPitch * green.Description.Height], blueData =new byte[blueDataBox.RowPitch * blue.Description.Height], finalData = new byte[finalDataBox.RowPitch * final.Description.Height]; Utilities.Read(redDataBox.DataPointer, redData, 0, redData.Length); Utilities.Read(greenDataBox.DataPointer, greenData, 0, greenData.Length); Utilities.Read(blueDataBox.DataPointer, blueData, 0, blueData.Length); for (int x = 0; x < final.Description.Width; x++) { for (int y = 0; y < final.Description.Height; y++) { int index = (x*4) + (y * finalDataBox.RowPitch); finalData[index] = redData[index]; finalData[index + 1] = greenData[index]; finalData[index + 2] = blueData[index]; finalData[index + 3] = 255;//A } } Utilities.Write(finalDataBox.DataPointer, finalData, 0, finalData.Length); d.ImmediateContext.UnmapSubresource(red, 0); d.ImmediateContext.UnmapSubresource(green, 0); d.ImmediateContext.UnmapSubresource(blue, 0); d.ImmediateContext.UnmapSubresource(final, 0); Texture2D.ToFile(d.ImmediateContext, final, ImageFileFormat.Png, "C:\\dan\\projectmedia\\BumpMap\\final.png"); final.Dispose(); red.Dispose(); green.Dispose(); blue.Dispose(); d.Dispose(); } else MessageBox.Show("must choose at least 3 files and select 3 to be the red green and blue channels"); }
public void Dispose() { backbufferBitmap.Dispose(); depthbufferBitmap.Dispose(); screenTexture.Dispose(); }
public void setupDX() { try { // # of graphics card adapter const int numAdapter = 0; // # of output device (i.e. monitor) const int numOutput = 0; //const string outputFileName = "ScreenCapture.png"; // Create DXGI Factory1 var factory = new Factory1(); var adapter = factory.GetAdapter1(numAdapter); // Create device from Adapter device = new SharpDX.Direct3D11.Device(adapter); // Get DXGI.Output var output = adapter.GetOutput(numOutput); var output1 = output.QueryInterface <Output1>(); // Width/Height of desktop to capture int width = ((SharpDX.Rectangle)output.Description.DesktopBounds).Width; int height = ((SharpDX.Rectangle)output.Description.DesktopBounds).Height; // Create Staging texture CPU-accessible var textureDesc = new Texture2DDescription { CpuAccessFlags = CpuAccessFlags.Read, BindFlags = BindFlags.None, Format = Format.B8G8R8A8_UNorm, Width = width, Height = height, OptionFlags = ResourceOptionFlags.None, MipLevels = 1, ArraySize = 1, SampleDescription = { Count = 1, Quality = 0 }, Usage = ResourceUsage.Staging }; screenTexture = new Texture2D(device, textureDesc); // Duplicate the output duplicatedOutput = output1.DuplicateOutput(device); } catch (SharpDXException e) { if (device != null) { device.Dispose(); } if (screenTexture != null) { screenTexture.Dispose(); } if (duplicatedOutput != null) { duplicatedOutput.Dispose(); } device = null; if (e.ResultCode.Code == SharpDX.DXGI.ResultCode.AccessDenied.Result.Code) { device = null; } //else throw e; } catch (Exception e) { throw e; } // TODO: We should cleanp up all allocated COM objects here }
void ResizeBuffers() { // Dispose all previous allocated resources Utilities.Dispose(ref _backbufferView); Utilities.Dispose(ref _zbufferView); if (RenderViewSize.Width == 0 || RenderViewSize.Height == 0) return; // Resize the backbuffer SwapChain.ResizeBuffers(1, RenderViewSize.Width, RenderViewSize.Height, Format.R8G8B8A8_UNorm, SwapChainFlags.AllowModeSwitch); // Get the backbuffer from the swapchain var _backBufferTexture = SwapChain.GetBackBuffer<Texture2D>(0); _backBufferTexture.DebugName = "Lilium BackBuffer"; // Backbuffer _backbufferView = new RenderTargetView(Device, _backBufferTexture); _backbufferView.DebugName = "Lilium BackBuffer View"; _backBufferTexture.Dispose(); // Depth buffer var _zbufferTexture = new Texture2D(Device, new Texture2DDescription() { Format = Format.D24_UNorm_S8_UInt, ArraySize = 1, MipLevels = 1, Width = RenderViewSize.Width, Height = RenderViewSize.Height, SampleDescription = new SampleDescription(Config.MSAASampleCount, Config.MSAAQuality), Usage = ResourceUsage.Default, BindFlags = BindFlags.DepthStencil, CpuAccessFlags = CpuAccessFlags.None, OptionFlags = ResourceOptionFlags.None }); _zbufferTexture.DebugName = "Lilium DepthStencilBuffer"; // Create the depth buffer view _zbufferView = new DepthStencilView(Device, _zbufferTexture); _zbufferView.DebugName = "Lilium DepthStencilBuffer View"; _zbufferTexture.Dispose(); DeviceContext.Rasterizer.SetViewport(0, 0, RenderViewSize.Width, RenderViewSize.Height); DeviceContext.OutputMerger.SetTargets(_zbufferView, _backbufferView); // Resize UI Surface if(mUISurface != null) mUISurface.SetDesignHeight(RenderViewSize.Height); needResize = false; }
/// <summary> /// /// </summary> /// <param name="syncInterval"></param> public override void SwapBuffers(int syncInterval) { eyeTextures[0].SwapTexture.Commit(); eyeTextures[1].SwapTexture.Commit(); layerEyeFov.Header.Type = OVR.LayerType.EyeFov; layerEyeFov.Header.Flags = OVR.LayerFlags.None; layerEyeFov.SensorSampleTime = sampleTime; for (int i = 0; i < 2; i++) { layerEyeFov.ColorTexture[i] = eyeTextures[i].SwapTexture.TextureChain; layerEyeFov.Viewport[i] = eyeTextures[i].ViewportSize; layerEyeFov.Fov[i] = hmd.DefaultEyeFov[i]; layerEyeFov.RenderPose[i] = eyePoses[i]; } if (hmd.SubmitFrame(frameIndex, layers) < 0) { Log.Warning("OculusRiftDisplay SubmitFrame returned error"); } OVR.SessionStatus sessionStatus; hmd.GetSessionStatus(out sessionStatus); if (sessionStatus.ShouldQuit > 0) Application.Exit(); if (sessionStatus.ShouldRecenter > 0) hmd.RecenterPose(); frameIndex++; var mirrorTextureD3D11 = new SharpDX.Direct3D11.Texture2D(mirrorTexture.GetMirrorBufferPtr()); d3dDevice.ImmediateContext.CopyResource(mirrorTextureD3D11, backbufferColor.Surface.Resource); mirrorTextureD3D11.Dispose(); swapChain.Present(0, PresentFlags.None); }
/// <summary> /// Our present hook that will grab a copy of the backbuffer when requested. Note: this supports multi-sampling (anti-aliasing) /// </summary> /// <param name="swapChainPtr"></param> /// <param name="syncInterval"></param> /// <param name="flags"></param> /// <returns>The HRESULT of the original method</returns> int PresentHook(IntPtr swapChainPtr, int syncInterval, SharpDX.DXGI.PresentFlags flags) { this.Frame(); SwapChain swapChain = (SharpDX.DXGI.SwapChain)swapChainPtr; try { #region Screenshot Request if (this.Request != null) { this.DebugMessage("PresentHook: Request Start"); DateTime startTime = DateTime.Now; using (Texture2D texture = Texture2D.FromSwapChain<Texture2D>(swapChain, 0)) { #region Determine region to capture System.Drawing.Rectangle regionToCapture = new System.Drawing.Rectangle(0, 0, texture.Description.Width, texture.Description.Height); if (this.Request.RegionToCapture.Width > 0) { regionToCapture = this.Request.RegionToCapture; } #endregion var theTexture = texture; // If texture is multisampled, then we can use ResolveSubresource to copy it into a non-multisampled texture Texture2D textureResolved = null; if (texture.Description.SampleDescription.Count > 1) { this.DebugMessage("PresentHook: resolving multi-sampled texture"); // texture is multi-sampled, lets resolve it down to single sample textureResolved = new Texture2D(texture.Device, new Texture2DDescription() { CpuAccessFlags = CpuAccessFlags.None, Format = texture.Description.Format, Height = texture.Description.Height, Usage = ResourceUsage.Default, Width = texture.Description.Width, ArraySize = 1, SampleDescription = new SharpDX.DXGI.SampleDescription(1, 0), // Ensure single sample BindFlags = BindFlags.None, MipLevels = 1, OptionFlags = texture.Description.OptionFlags }); // Resolve into textureResolved texture.Device.ImmediateContext.ResolveSubresource(texture, 0, textureResolved, 0, texture.Description.Format); // Make "theTexture" be the resolved texture theTexture = textureResolved; } // Create destination texture Texture2D textureDest = new Texture2D(texture.Device, new Texture2DDescription() { CpuAccessFlags = CpuAccessFlags.None,// CpuAccessFlags.Write | CpuAccessFlags.Read, Format = SharpDX.DXGI.Format.R8G8B8A8_UNorm, // Supports BMP/PNG Height = regionToCapture.Height, Usage = ResourceUsage.Default,// ResourceUsage.Staging, Width = regionToCapture.Width, ArraySize = 1,//texture.Description.ArraySize, SampleDescription = new SharpDX.DXGI.SampleDescription(1, 0),// texture.Description.SampleDescription, BindFlags = BindFlags.None, MipLevels = 1,//texture.Description.MipLevels, OptionFlags = texture.Description.OptionFlags }); // Copy the subresource region, we are dealing with a flat 2D texture with no MipMapping, so 0 is the subresource index theTexture.Device.ImmediateContext.CopySubresourceRegion(theTexture, 0, new ResourceRegion() { Top = regionToCapture.Top, Bottom = regionToCapture.Bottom, Left = regionToCapture.Left, Right = regionToCapture.Right, Front = 0, Back = 1 // Must be 1 or only black will be copied }, textureDest, 0, 0, 0, 0); // Note: it would be possible to capture multiple frames and process them in a background thread // Copy to memory and send back to host process on a background thread so that we do not cause any delay in the rendering pipeline Guid requestId = this.Request.RequestId; // this.Request gets set to null, so copy the RequestId for use in the thread ThreadPool.QueueUserWorkItem(delegate { //FileStream fs = new FileStream(@"c:\temp\temp.bmp", FileMode.Create); //Texture2D.ToStream(testSubResourceCopy, ImageFileFormat.Bmp, fs); DateTime startCopyToSystemMemory = DateTime.Now; using (MemoryStream ms = new MemoryStream()) { Texture2D.ToStream(textureDest.Device.ImmediateContext, textureDest, ImageFileFormat.Bmp, ms); ms.Position = 0; this.DebugMessage("PresentHook: Copy to System Memory time: " + (DateTime.Now - startCopyToSystemMemory).ToString()); DateTime startSendResponse = DateTime.Now; ProcessCapture(ms, requestId); this.DebugMessage("PresentHook: Send response time: " + (DateTime.Now - startSendResponse).ToString()); } // Free the textureDest as we no longer need it. textureDest.Dispose(); textureDest = null; this.DebugMessage("PresentHook: Full Capture time: " + (DateTime.Now - startTime).ToString()); }); // Prevent the request from being processed a second time this.Request = null; // Make sure we free up the resolved texture if it was created if (textureResolved != null) { textureResolved.Dispose(); textureResolved = null; } } this.DebugMessage("PresentHook: Copy BackBuffer time: " + (DateTime.Now - startTime).ToString()); this.DebugMessage("PresentHook: Request End"); } #endregion #if OVERLAYENGINE #region Draw overlay (after screenshot so we don't capture overlay as well) if (this.Config.ShowOverlay) { // Initialise Overlay Engine if (_swapChainPointer != swapChain.NativePointer || _overlayEngine == null) { if (_overlayEngine != null) _overlayEngine.Dispose(); _overlayEngine = new DX11.DXOverlayEngine(); _overlayEngine.Overlays.Add(new Capture.Hook.Common.Overlay { Elements = { //new Capture.Hook.Common.TextElement(new System.Drawing.Font("Times New Roman", 22)) { Text = "Test", Location = new System.Drawing.Point(200, 200), Color = System.Drawing.Color.Yellow, AntiAliased = false}, new Capture.Hook.Common.FramesPerSecond(new System.Drawing.Font("Arial", 16)) { Location = new System.Drawing.Point(5,5), Color = System.Drawing.Color.Red, AntiAliased = true } } }); _overlayEngine.Initialise(swapChain); _swapChainPointer = swapChain.NativePointer; } // Draw Overlay(s) else if (_overlayEngine != null) { foreach (var overlay in _overlayEngine.Overlays) overlay.Frame(); _overlayEngine.Draw(); } } #endregion #endif } catch (Exception e) { // If there is an error we do not want to crash the hooked application, so swallow the exception this.DebugMessage("PresentHook: Exeception: " + e.GetType().FullName + ": " + e.ToString()); //return unchecked((int)0x8000FFFF); //E_UNEXPECTED } // As always we need to call the original method, note that EasyHook has already repatched the original method // so calling it here will not cause an endless recursion to this function swapChain.Present(syncInterval, flags); return SharpDX.Result.Ok.Code; }
/// <summary> /// Disposes <see cref="TextureView"/> and <see cref="Texture"/> if they have been loaded. /// </summary> public void Dispose() { TextureView?.Dispose(); Texture?.Dispose(); }
public Bitmap GetScreenBitmap() { MemoryStream ms = new MemoryStream(); if (MultiSampleCount != 1) { Texture2D tex2 = new Texture2D(RenderContext11.PrepDevice, new Texture2DDescription() { Format = RenderContext11.DefaultColorFormat, ArraySize = 1, MipLevels = 1, Width = (int)ViewPort.Width, Height = (int)ViewPort.Height, SampleDescription = new SampleDescription(1, 0), Usage = ResourceUsage.Default, BindFlags = BindFlags.RenderTarget, CpuAccessFlags = CpuAccessFlags.None, OptionFlags = ResourceOptionFlags.None }); devContext.ResolveSubresource(backBuffer, 0, tex2, 0, RenderContext11.DefaultColorFormat); Texture2D.ToStream(devContext, tex2, ImageFileFormat.Png, ms); tex2.Dispose(); GC.SuppressFinalize(tex2); } else { Texture2D.ToStream(devContext, backBuffer, ImageFileFormat.Png, ms); } ms.Seek(0, SeekOrigin.Begin); Bitmap bmp = new Bitmap(ms); ms.Close(); ms.Dispose(); return bmp; }
private void GenerateHeightMaps() { ShaderBytecode shaderByteCode = ShaderBytecode.CompileFromFile(@"Data/Shaders/TerrainCompute.hlsl", "initTerrain", "cs_5_0", Shader.ShaderFlags); ComputeShader initTerrain = new ComputeShader(_context.DirectX.Device, shaderByteCode); shaderByteCode.Dispose(); shaderByteCode = ShaderBytecode.CompileFromFile(@"Data/Shaders/TerrainCompute.hlsl", "initWater", "cs_5_0", Shader.ShaderFlags); ComputeShader initWater = new ComputeShader(_context.DirectX.Device, shaderByteCode); shaderByteCode.Dispose(); shaderByteCode = ShaderBytecode.CompileFromFile(@"Data/Shaders/TerrainCompute.hlsl", "applyRandomDisplacement", "cs_5_0", Shader.ShaderFlags); _baseTerrainGeneration = new ComputeShader(_context.DirectX.Device, shaderByteCode); shaderByteCode.Dispose(); shaderByteCode = ShaderBytecode.CompileFromFile(@"Data/Shaders/TerrainCompute.hlsl", "flowsCalculation", "cs_5_0", Shader.ShaderFlags); _flowsCalculation = new ComputeShader(_context.DirectX.Device, shaderByteCode); shaderByteCode.Dispose(); shaderByteCode = ShaderBytecode.CompileFromFile(@"Data/Shaders/TerrainCompute.hlsl", "updateWaterLevel", "cs_5_0", Shader.ShaderFlags); _updateWaterLevel = new ComputeShader(_context.DirectX.Device, shaderByteCode); shaderByteCode.Dispose(); Texture2DDescription textureDescription = new Texture2DDescription { ArraySize = 1, BindFlags = BindFlags.ShaderResource | BindFlags.UnorderedAccess, CpuAccessFlags = CpuAccessFlags.None, Format = Format.R32_Float, Height = TextureSize, Width = TextureSize, MipLevels = 1, OptionFlags = ResourceOptionFlags.None, SampleDescription = new SampleDescription(1, 0), Usage = ResourceUsage.Default }; ConstantBuffer<ComputeData> computeBuffer = new ConstantBuffer<ComputeData>(_context); _context.DirectX.DeviceContext.ComputeShader.SetConstantBuffer(1, computeBuffer.Buffer); foreach (Face face in _faces) { Texture2D terrainTexture = new Texture2D(_context.DirectX.Device, textureDescription); face.TerrainSrv = new ShaderResourceView(_context.DirectX.Device, terrainTexture); face.TerrainUav = new UnorderedAccessView(_context.DirectX.Device, terrainTexture); terrainTexture.Dispose(); Texture2D waterTexture = new Texture2D(_context.DirectX.Device, textureDescription); face.WaterSrv = new ShaderResourceView(_context.DirectX.Device, waterTexture); face.WaterUav = new UnorderedAccessView(_context.DirectX.Device, waterTexture); waterTexture.Dispose(); Texture2D flowsTexture = new Texture2D(_context.DirectX.Device, textureDescription); face.FlowsLeftUav = new UnorderedAccessView(_context.DirectX.Device, flowsTexture); flowsTexture.Dispose(); flowsTexture = new Texture2D(_context.DirectX.Device, textureDescription); face.FlowsTopUav = new UnorderedAccessView(_context.DirectX.Device, flowsTexture); flowsTexture.Dispose(); flowsTexture = new Texture2D(_context.DirectX.Device, textureDescription); face.FlowsRightUav = new UnorderedAccessView(_context.DirectX.Device, flowsTexture); flowsTexture.Dispose(); flowsTexture = new Texture2D(_context.DirectX.Device, textureDescription); face.FlowsBottomUav = new UnorderedAccessView(_context.DirectX.Device, flowsTexture); flowsTexture.Dispose(); _context.DirectX.DeviceContext.ComputeShader.SetUnorderedAccessView(0, face.TerrainUav); _context.DirectX.DeviceContext.ComputeShader.SetUnorderedAccessView(1, face.WaterUav); _context.DirectX.DeviceContext.ComputeShader.Set(initTerrain); computeBuffer.Update(new ComputeData(TextureSize - 1 - BatchSize, 0, 0, 0.0f)); _context.DirectX.DeviceContext.Dispatch(TextureSize / BatchSize, TextureSize / BatchSize, 1); _context.DirectX.DeviceContext.ComputeShader.Set(initWater); computeBuffer.Update(new ComputeData(TextureSize - 1 - BatchSize, 0, 0, 0.05f)); _context.DirectX.DeviceContext.Dispatch(TextureSize / BatchSize, TextureSize / BatchSize, 1); _context.DirectX.DeviceContext.ComputeShader.Set(initTerrain); computeBuffer.Update(new ComputeData(TextureSize - 1 - BatchSize, BatchSize / 2, BatchSize / 2, 0.5f)); _context.DirectX.DeviceContext.Dispatch(TextureSize / BatchSize - 1, TextureSize / BatchSize - 1, 1); } _planeBuffer = new ConstantBuffer<PlaneData>(_context); initTerrain.Dispose(); computeBuffer.Dispose(); }
public static ShaderResourceView CreateTextureFromBitmap(Device device, params string[] filenames) { int count = filenames.Length; BitmapSD[] sourceBitmaps = new BitmapSD[filenames.Length]; for (int i = 0; i < count; i++) { sourceBitmaps[i] = new BitmapSD(filenames[i]); } int width = sourceBitmaps[0].Width; int height = sourceBitmaps[0].Height; int mips = (int)(Math.Log(Math.Min(width, height)) / Math.Log(2)); BitmapSD[] bitmaps = new BitmapSD[count * mips]; BitmapData[] data = new BitmapData[bitmaps.Length]; DataRectangle[] rects = new DataRectangle[bitmaps.Length]; for (int i = 0; i < count; i++) { var source = sourceBitmaps[i]; bitmaps[i * mips] = source; for (int j = 1; j < mips; j++) { bitmaps[i * mips + j] = new BitmapSD(source, new SizeSD(source.Width >> j, source.Height >> j)); } } for (int i = 0; i < bitmaps.Length; i++) { data[i] = bitmaps[i].LockBits( new RectangleSD(0, 0, bitmaps[i].Width, bitmaps[i].Height), ImageLockMode.ReadOnly, PixelFormat.Format32bppArgb); rects[i] = new DataRectangle(data[i].Scan0, data[i].Stride); } // Describe and create a Texture2D. Texture2DDescription textureDesc = new Texture2DDescription() { MipLevels = mips, Format = Format.B8G8R8A8_UNorm, Width = width, Height = height, ArraySize = filenames.Length, BindFlags = BindFlags.ShaderResource, Usage = ResourceUsage.Immutable, SampleDescription = new SampleDescription(1, 0) }; if (filenames.Length == 6) { textureDesc.OptionFlags |= ResourceOptionFlags.TextureCube; } var buffer = new Texture2D11(device.NativeDevice, textureDesc, rects); for (int i = 0; i < bitmaps.Length; i++) { bitmaps[i].UnlockBits(data[i]); bitmaps[i].Dispose(); } var resourceView = new ShaderResourceView(device.NativeDevice, buffer); buffer.Dispose(); return(resourceView); }
private static void Save(IResource res, Stream stream, ImageFileFormat fmt) { var texture = res.Resource as Texture2D; var textureCopy = new Texture2D(MyRender11.Device, new Texture2DDescription { Width = (int)texture.Description.Width, Height = (int)texture.Description.Height, MipLevels = 1, ArraySize = 1, Format = texture.Description.Format, Usage = ResourceUsage.Staging, SampleDescription = new SharpDX.DXGI.SampleDescription(1, 0), BindFlags = BindFlags.None, CpuAccessFlags = CpuAccessFlags.Read, OptionFlags = ResourceOptionFlags.None }); RC.CopyResource(res, textureCopy); DataStream dataStream; var dataBox = RC.MapSubresource( textureCopy, 0, 0, MapMode.Read, MapFlags.None, out dataStream); var dataRectangle = new DataRectangle { DataPointer = dataStream.DataPointer, Pitch = dataBox.RowPitch }; var bitmap = new Bitmap( MyRender11.WIC, textureCopy.Description.Width, textureCopy.Description.Height, PixelFormatFromFormat(textureCopy.Description.Format), // TODO: should use some conversion from textureCopy.Description.Format dataRectangle); using (var wicStream = new WICStream(MyRender11.WIC, stream)) { BitmapEncoder bitmapEncoder; switch (fmt) { case ImageFileFormat.Png: bitmapEncoder = new PngBitmapEncoder(MyRender11.WIC, wicStream); break; case ImageFileFormat.Jpg: bitmapEncoder = new JpegBitmapEncoder(MyRender11.WIC, wicStream); break; case ImageFileFormat.Bmp: bitmapEncoder = new BmpBitmapEncoder(MyRender11.WIC, wicStream); break; default: MyRenderProxy.Assert(false, "Unsupported file format."); bitmapEncoder = null; break; } if (bitmapEncoder != null) { using (var bitmapFrameEncode = new BitmapFrameEncode(bitmapEncoder)) { bitmapFrameEncode.Initialize(); bitmapFrameEncode.SetSize(bitmap.Size.Width, bitmap.Size.Height); var pixelFormat = PixelFormat.FormatDontCare; bitmapFrameEncode.SetPixelFormat(ref pixelFormat); bitmapFrameEncode.WriteSource(bitmap); bitmapFrameEncode.Commit(); bitmapEncoder.Commit(); } bitmapEncoder.Dispose(); } } RC.UnmapSubresource(textureCopy, 0); textureCopy.Dispose(); bitmap.Dispose(); }
public void SaveBackBuffer(string filename, ImageFileFormat format) { if (MultiSampleCount != 1) { Texture2D tex2 = new Texture2D(RenderContext11.PrepDevice, new Texture2DDescription() { Format = RenderContext11.DefaultColorFormat, ArraySize = 1, MipLevels = 1, Width = (int)ViewPort.Width, Height = (int)ViewPort.Height, SampleDescription = new SampleDescription(1, 0), Usage = ResourceUsage.Default, BindFlags = BindFlags.ShaderResource | BindFlags.RenderTarget, CpuAccessFlags = CpuAccessFlags.None, OptionFlags = ResourceOptionFlags.None }); devContext.ResolveSubresource(backBuffer, 0, tex2, 0, RenderContext11.DefaultColorFormat); Texture2D.ToFile(devContext, tex2, format, filename); tex2.Dispose(); GC.SuppressFinalize(tex2); } else { Texture2D.ToFile(devContext, backBuffer, format, filename); } }
public void InitDX(RenderForm form) { // SwapChain description SwapChainDescription desc = new SwapChainDescription() { BufferCount = 1, ModeDescription = new ModeDescription(form.ClientSize.Width, form.ClientSize.Height, new Rational(60, 1), Format.R10G10B10A2_UNorm),//.R8G8B8A8_UNorm), IsWindowed = true, OutputHandle = form.Handle, SampleDescription = new SampleDescription(1, 0), SwapEffect = SwapEffect.Discard, Usage = Usage.RenderTargetOutput }; // Create Device and SwapChain #if DEBUG Device.CreateWithSwapChain(DriverType.Hardware, DeviceCreationFlags.Debug, desc, out device, out swapChain); #else Device.CreateWithSwapChain(DriverType.Hardware, DeviceCreationFlags.None, desc, out device, out swapChain); #endif context = device.ImmediateContext; // Ignore all windows events factory = swapChain.GetParent<Factory>(); factory.MakeWindowAssociation(form.Handle, WindowAssociationFlags.IgnoreAll); // New RenderTargetView from the backbuffer backBuffer = Texture2D.FromSwapChain<Texture2D>(swapChain, 0); renderView = new RenderTargetView(device, backBuffer); LoadShaders(); FileSystemWatcher watcher = new FileSystemWatcher(@"\dev\Galaxies\Galaxies\", "*.fx"); watcher.NotifyFilter = NotifyFilters.LastWrite; watcher.Changed += new FileSystemEventHandler(Watcher_Changed); watcher.EnableRaisingEvents = true; // Compile Vertex and Pixel shaders //vertexShaderByteCode = ShaderBytecode.CompileFromFile("MiniTri.fx", "VS", "vs_5_0"); //vertexShader = new VertexShader(device, vertexShaderByteCode); //pixelShaderByteCode = ShaderBytecode.CompileFromFile("MiniTri.fx", "PS", "ps_5_0"); //pixelShader = new PixelShader(device, pixelShaderByteCode); // Create Constant Buffer //constantBuffer = new Buffer(device, Utilities.SizeOf<Matrix>(), ResourceUsage.Default, BindFlags.ConstantBuffer, CpuAccessFlags.None, ResourceOptionFlags.None, 0); int tempSize = System.Runtime.InteropServices.Marshal.SizeOf(new ShaderParamStruct()); constantBuffer = new Buffer(device, tempSize, ResourceUsage.Default, BindFlags.ConstantBuffer, CpuAccessFlags.None, ResourceOptionFlags.None, 0); // Create Depth Buffer & View var depthBuffer = new Texture2D(device, new Texture2DDescription() { Format = Format.D32_Float_S8X24_UInt, ArraySize = 1, MipLevels = 1, Width = form.ClientSize.Width, Height = form.ClientSize.Height, SampleDescription = new SampleDescription(1, 0), Usage = ResourceUsage.Default, BindFlags = BindFlags.DepthStencil, CpuAccessFlags = CpuAccessFlags.None, OptionFlags = ResourceOptionFlags.None }); depthView = new DepthStencilView(device, depthBuffer); //RasterizerStateDescription rsd = new RasterizerStateDescription() { IsMultisampleEnabled = true }; //rsd.CullMode = CullMode.Back; //rsd.FillMode = FillMode.Solid; //rsd.IsMultisampleEnabled = true; //rsd.IsAntialiasedLineEnabled = false; //rsd.IsDepthClipEnabled = false; //rsd.IsScissorEnabled = false; //RasterizerState rs = new RasterizerState(device, rsd); //device.ImmediateContext.Rasterizer.State = rs; //rs.Dispose(); depthBuffer.Dispose(); texMan = new TextureManager(); }
public void Run() { var form = new RenderForm("2d and 3d combined...it's like magic"); form.KeyDown += (sender, args) => { if (args.KeyCode == Keys.Escape) form.Close(); }; // DirectX DXGI 1.1 factory var factory1 = new Factory1(); // The 1st graphics adapter var adapter1 = factory1.GetAdapter1(0); // --------------------------------------------------------------------------------------------- // Setup direct 3d version 11. It's context will be used to combine the two elements // --------------------------------------------------------------------------------------------- var description = new SwapChainDescription { BufferCount = 1, ModeDescription = new ModeDescription(0, 0, new Rational(60, 1), Format.R8G8B8A8_UNorm), IsWindowed = true, OutputHandle = form.Handle, SampleDescription = new SampleDescription(1, 0), SwapEffect = SwapEffect.Discard, Usage = Usage.RenderTargetOutput, Flags = SwapChainFlags.AllowModeSwitch }; Device11 device11; SwapChain swapChain; Device11.CreateWithSwapChain(adapter1, DeviceCreationFlags.None, description, out device11, out swapChain); // create a view of our render target, which is the backbuffer of the swap chain we just created RenderTargetView renderTargetView; using (var resource = Resource.FromSwapChain<Texture2D>(swapChain, 0)) renderTargetView = new RenderTargetView(device11, resource); // setting a viewport is required if you want to actually see anything var context = device11.ImmediateContext; var viewport = new Viewport(0.0f, 0.0f, form.ClientSize.Width, form.ClientSize.Height); context.OutputMerger.SetTargets(renderTargetView); context.Rasterizer.SetViewports(viewport); // // Create the DirectX11 texture2D. This texture will be shared with the DirectX10 device. // // The DirectX10 device will be used to render text onto this texture. // DirectX11 will then draw this texture (blended) onto the screen. // The KeyedMutex flag is required in order to share this resource between the two devices. var textureD3D11 = new Texture2D(device11, new Texture2DDescription { Width = form.ClientSize.Width, Height = form.ClientSize.Height, MipLevels = 1, ArraySize = 1, Format = Format.B8G8R8A8_UNorm, SampleDescription = new SampleDescription(1, 0), Usage = ResourceUsage.Default, BindFlags = BindFlags.RenderTarget | BindFlags.ShaderResource, CpuAccessFlags = CpuAccessFlags.None, OptionFlags = ResourceOptionFlags.SharedKeyedmutex }); // --------------------------------------------------------------------------------------------- // Setup a direct 3d version 10.1 adapter // --------------------------------------------------------------------------------------------- var device10 = new Device10(adapter1, SharpDX.Direct3D10.DeviceCreationFlags.BgraSupport, FeatureLevel.Level_10_0); // --------------------------------------------------------------------------------------------- // Setup Direct 2d // --------------------------------------------------------------------------------------------- // Direct2D Factory var factory2D = new SharpDX.Direct2D1.Factory(FactoryType.SingleThreaded, DebugLevel.Information); // Here we bind the texture we've created on our direct3d11 device through the direct3d10 // to the direct 2d render target.... var sharedResource = textureD3D11.QueryInterface<SharpDX.DXGI.Resource>(); var textureD3D10 = device10.OpenSharedResource<SharpDX.Direct3D10.Texture2D>(sharedResource.SharedHandle); var surface = textureD3D10.AsSurface(); var rtp = new RenderTargetProperties { MinLevel = SharpDX.Direct2D1.FeatureLevel.Level_10, Type = RenderTargetType.Hardware, PixelFormat = new PixelFormat(Format.Unknown, AlphaMode.Premultiplied) }; var renderTarget2D = new RenderTarget(factory2D, surface, rtp); var solidColorBrush = new SolidColorBrush(renderTarget2D, Colors.Red); // --------------------------------------------------------------------------------------------------- // Setup the rendering data // --------------------------------------------------------------------------------------------------- // Load Effect. This includes both the vertex and pixel shaders. // Also can include more than one technique. ShaderBytecode shaderByteCode = ShaderBytecode.CompileFromFile( "effectDx11.fx", "fx_5_0", ShaderFlags.EnableStrictness); var effect = new Effect(device11, shaderByteCode); // create triangle vertex data, making sure to rewind the stream afterward var verticesTriangle = new DataStream(VertexPositionColor.SizeInBytes * 3, true, true); verticesTriangle.Write(new VertexPositionColor(new Vector3(0.0f, 0.5f, 0.5f),new Color4(1.0f, 0.0f, 0.0f, 1.0f))); verticesTriangle.Write(new VertexPositionColor(new Vector3(0.5f, -0.5f, 0.5f),new Color4(0.0f, 1.0f, 0.0f, 1.0f))); verticesTriangle.Write(new VertexPositionColor(new Vector3(-0.5f, -0.5f, 0.5f),new Color4(0.0f, 0.0f, 1.0f, 1.0f))); verticesTriangle.Position = 0; // create the triangle vertex layout and buffer var layoutColor = new InputLayout(device11, effect.GetTechniqueByName("Color").GetPassByIndex(0).Description.Signature, VertexPositionColor.inputElements); var vertexBufferColor = new Buffer(device11, verticesTriangle, (int)verticesTriangle.Length, ResourceUsage.Default, BindFlags.VertexBuffer, CpuAccessFlags.None, ResourceOptionFlags.None, 0); verticesTriangle.Close(); // create overlay vertex data, making sure to rewind the stream afterward // Top Left of screen is -1, +1 // Bottom Right of screen is +1, -1 var verticesText = new DataStream(VertexPositionTexture.SizeInBytes * 4, true, true); verticesText.Write(new VertexPositionTexture(new Vector3(-1, 1, 0),new Vector2(0, 0f))); verticesText.Write(new VertexPositionTexture(new Vector3(1, 1, 0),new Vector2(1, 0))); verticesText.Write(new VertexPositionTexture(new Vector3(-1, -1, 0),new Vector2(0, 1))); verticesText.Write(new VertexPositionTexture(new Vector3(1, -1, 0),new Vector2(1, 1))); verticesText.Position = 0; // create the overlay vertex layout and buffer var layoutOverlay = new InputLayout(device11, effect.GetTechniqueByName("Overlay").GetPassByIndex(0).Description.Signature, VertexPositionTexture.inputElements); var vertexBufferOverlay = new Buffer(device11, verticesText, (int)verticesText.Length, ResourceUsage.Default, BindFlags.VertexBuffer, CpuAccessFlags.None, ResourceOptionFlags.None, 0); verticesText.Close(); // Think of the shared textureD3D10 as an overlay. // The overlay needs to show the 2d content but let the underlying triangle (or whatever) // show thru, which is accomplished by blending. var bsd = new BlendStateDescription(); bsd.RenderTarget[0].IsBlendEnabled = true; bsd.RenderTarget[0].SourceBlend = BlendOption.SourceColor; bsd.RenderTarget[0].DestinationBlend = BlendOption.BlendFactor; bsd.RenderTarget[0].BlendOperation = BlendOperation.Add; bsd.RenderTarget[0].SourceAlphaBlend = BlendOption.One; bsd.RenderTarget[0].DestinationAlphaBlend = BlendOption.Zero; bsd.RenderTarget[0].AlphaBlendOperation = BlendOperation.Add; bsd.RenderTarget[0].RenderTargetWriteMask = ColorWriteMaskFlags.All; var blendStateTransparent = new BlendState(device11, bsd); // --------------------------------------------------------------------------------------------------- // Create and tesselate an ellipse // --------------------------------------------------------------------------------------------------- var center = new DrawingPointF(form.ClientSize.Width/2.0f, form.ClientSize.Height/2.0f); var ellipse = new EllipseGeometry(factory2D, new Ellipse(center, form.ClientSize.Width / 2.0f, form.ClientSize.Height / 2.0f)); // Populate a PathGeometry from Ellipse tessellation var tesselatedGeometry = new PathGeometry(factory2D); _geometrySink = tesselatedGeometry.Open(); // Force RoundLineJoin otherwise the tesselated looks buggy at line joins _geometrySink.SetSegmentFlags(PathSegment.ForceRoundLineJoin); // Tesselate the ellipse to our TessellationSink ellipse.Tessellate(1, this); _geometrySink.Close(); // --------------------------------------------------------------------------------------------------- // Acquire the mutexes. These are needed to assure the device in use has exclusive access to the surface // --------------------------------------------------------------------------------------------------- var device10Mutex = textureD3D10.QueryInterface<KeyedMutex>(); var device11Mutex = textureD3D11.QueryInterface<KeyedMutex>(); // --------------------------------------------------------------------------------------------------- // Main rendering loop // --------------------------------------------------------------------------------------------------- bool first = true; RenderLoop .Run(form, () => { if(first) { form.Activate(); first = false; } // clear the render target to black context.ClearRenderTargetView(renderTargetView, Colors.DarkSlateGray); // Draw the triangle context.InputAssembler.InputLayout = layoutColor; context.InputAssembler.PrimitiveTopology = PrimitiveTopology.TriangleList; context.InputAssembler.SetVertexBuffers(0, new VertexBufferBinding(vertexBufferColor, VertexPositionColor.SizeInBytes, 0)); context.OutputMerger.BlendState = null; var currentTechnique = effect.GetTechniqueByName("Color"); for (var pass = 0; pass < currentTechnique.Description.PassCount; ++pass) { using (var effectPass = currentTechnique.GetPassByIndex(pass)) { System.Diagnostics.Debug.Assert(effectPass.IsValid, "Invalid EffectPass"); effectPass.Apply(context); } context.Draw(3, 0); }; // Draw Ellipse on the shared Texture2D device10Mutex.Acquire(0, 100); renderTarget2D.BeginDraw(); renderTarget2D.Clear(Colors.Black); renderTarget2D.DrawGeometry(tesselatedGeometry, solidColorBrush); renderTarget2D.DrawEllipse(new Ellipse(center, 200, 200), solidColorBrush, 20, null); renderTarget2D.EndDraw(); device10Mutex.Release(0); // Draw the shared texture2D onto the screen, blending the 2d content in device11Mutex.Acquire(0, 100); var srv = new ShaderResourceView(device11, textureD3D11); effect.GetVariableByName("g_Overlay").AsShaderResource().SetResource(srv); context.InputAssembler.InputLayout = layoutOverlay; context.InputAssembler.PrimitiveTopology = PrimitiveTopology.TriangleStrip; context.InputAssembler.SetVertexBuffers(0, new VertexBufferBinding(vertexBufferOverlay, VertexPositionTexture.SizeInBytes, 0)); context.OutputMerger.BlendState = blendStateTransparent; currentTechnique = effect.GetTechniqueByName("Overlay"); for (var pass = 0; pass < currentTechnique.Description.PassCount; ++pass) { using (var effectPass = currentTechnique.GetPassByIndex(pass)) { System.Diagnostics.Debug.Assert(effectPass.IsValid, "Invalid EffectPass"); effectPass.Apply(context); } context.Draw(4, 0); } srv.Dispose(); device11Mutex.Release(0); swapChain.Present(0, PresentFlags.None); }); // dispose everything vertexBufferColor.Dispose(); vertexBufferOverlay.Dispose(); layoutColor.Dispose(); layoutOverlay.Dispose(); effect.Dispose(); shaderByteCode.Dispose(); renderTarget2D.Dispose(); swapChain.Dispose(); device11.Dispose(); device10.Dispose(); textureD3D10.Dispose(); textureD3D11.Dispose(); factory1.Dispose(); adapter1.Dispose(); sharedResource.Dispose(); factory2D.Dispose(); surface.Dispose(); solidColorBrush.Dispose(); blendStateTransparent.Dispose(); device10Mutex.Dispose(); device11Mutex.Dispose(); }
public static Bitmap CaptureScreen() { // # of graphics card adapter const int numAdapter = 0; // # of output device (i.e. monitor) const int numOutput = 1; // Create DXGI Factory1 var factory = new Factory1(); var adapter = factory.GetAdapter1(numAdapter); // Create device from Adapter var device = new Device(adapter); // Get DXGI.Output var output = adapter.GetOutput(numOutput); var output1 = output.QueryInterface<Output1>(); // Width/Height of desktop to capture int width = ((Rectangle)output.Description.DesktopBounds).Width; //width = 1024; int height = ((Rectangle)output.Description.DesktopBounds).Height; //height = 1024; // Create Staging texture CPU-accessible var textureDesc = new Texture2DDescription { CpuAccessFlags = CpuAccessFlags.Read, BindFlags = BindFlags.None, Format = Format.B8G8R8A8_UNorm, Width = width, Height = height, OptionFlags = ResourceOptionFlags.None, MipLevels = 1, ArraySize = 1, SampleDescription = { Count = 1, Quality = 0 }, Usage = ResourceUsage.Staging }; var screenTexture = new Texture2D(device, textureDesc); // Duplicate the output var duplicatedOutput = output1.DuplicateOutput(device); bool captureDone = false; Bitmap bitmap = null; for (int i = 0; !captureDone; i++) { try { SharpDX.DXGI.Resource screenResource; OutputDuplicateFrameInformation duplicateFrameInformation; // Try to get duplicated frame within given time duplicatedOutput.AcquireNextFrame(10000, out duplicateFrameInformation, out screenResource); if (i > 0) { // copy resource into memory that can be accessed by the CPU using (var screenTexture2D = screenResource.QueryInterface<Texture2D>()) device.ImmediateContext.CopyResource(screenTexture2D, screenTexture); // Get the desktop capture texture var mapSource = device.ImmediateContext.MapSubresource(screenTexture, 0, MapMode.Read, MapFlags.None); // Create Drawing.Bitmap bitmap = new System.Drawing.Bitmap(width, height, PixelFormat.Format32bppArgb); var boundsRect = new System.Drawing.Rectangle(0, 0, width, height); // Copy pixels from screen capture Texture to GDI bitmap var mapDest = bitmap.LockBits(boundsRect, ImageLockMode.WriteOnly, bitmap.PixelFormat); var sourcePtr = mapSource.DataPointer; var destPtr = mapDest.Scan0; for (int y = 0; y < height; y++) { // Copy a single line Utilities.CopyMemory(destPtr, sourcePtr, width * 4); // Advance pointers sourcePtr = IntPtr.Add(sourcePtr, mapSource.RowPitch); destPtr = IntPtr.Add(destPtr, mapDest.Stride); } // Release source and dest locks bitmap.UnlockBits(mapDest); device.ImmediateContext.UnmapSubresource(screenTexture, 0); // Capture done captureDone = true; } screenResource.Dispose(); duplicatedOutput.ReleaseFrame(); } catch (SharpDXException e) { if (e.ResultCode.Code != SharpDX.DXGI.ResultCode.WaitTimeout.Result.Code) { throw e; } } } duplicatedOutput.Dispose(); screenTexture.Dispose(); output1.Dispose(); output.Dispose(); device.Dispose(); adapter.Dispose(); factory.Dispose(); return bitmap; }
private void InitializeOculus() { RenderForm form = new RenderForm("OculusWrap SharpDX demo"); Wrap oculus = new Wrap(); Hmd hmd; form.KeyUp += new System.Windows.Forms.KeyEventHandler(this.Window_KeyUp); //form.moused //form.Activate(); //form.Show(); int textureWidth = 0, textureHeight = 0; newTextureArrived = false; //zoom == 2 is not implemented, because the visual quality would be too low. //zoom == 4 will be implemented in the future. if (zoom == 3) { textureWidth = 3328; textureHeight = 1664; } bool success = oculus.Initialize(); if (!success) { System.Windows.Forms.MessageBox.Show("Failed to initialize the Oculus runtime library.", "Uh oh", MessageBoxButtons.OK, MessageBoxIcon.Error); return; } // Use the head mounted display, if it's available, otherwise use the debug HMD. int numberOfHeadMountedDisplays = oculus.Hmd_Detect(); if (numberOfHeadMountedDisplays > 0) hmd = oculus.Hmd_Create(0); else hmd = oculus.Hmd_CreateDebug(OculusWrap.OVR.HmdType.DK2); if (hmd == null) { System.Windows.Forms.MessageBox.Show("Oculus Rift not detected.", "Uh oh", MessageBoxButtons.OK, MessageBoxIcon.Error); return; } if (hmd.ProductName == string.Empty) System.Windows.Forms.MessageBox.Show("The HMD is not enabled.", "There's a tear in the Rift", MessageBoxButtons.OK, MessageBoxIcon.Error); // Specify which head tracking capabilities to enable. hmd.SetEnabledCaps(OVR.HmdCaps.LowPersistence | OVR.HmdCaps.DynamicPrediction); // Start the sensor which informs of the Rift's pose and motion hmd.ConfigureTracking(OVR.TrackingCaps.ovrTrackingCap_Orientation | OVR.TrackingCaps.ovrTrackingCap_MagYawCorrection | OVR.TrackingCaps.ovrTrackingCap_Position, OVR.TrackingCaps.None); // Create a set of layers to submit. EyeTexture[] eyeTextures = new EyeTexture[2]; OVR.ovrResult result; // Create DirectX drawing device. SharpDX.Direct3D11.Device device = new Device(SharpDX.Direct3D.DriverType.Hardware, DeviceCreationFlags.Debug); // Create DirectX Graphics Interface factory, used to create the swap chain. Factory factory = new Factory(); DeviceContext immediateContext = device.ImmediateContext; // Define the properties of the swap chain. SwapChainDescription swapChainDescription = new SwapChainDescription(); swapChainDescription.BufferCount = 1; swapChainDescription.IsWindowed = true; swapChainDescription.OutputHandle = form.Handle; swapChainDescription.SampleDescription = new SampleDescription(1, 0); swapChainDescription.Usage = Usage.RenderTargetOutput | Usage.ShaderInput; swapChainDescription.SwapEffect = SwapEffect.Sequential; swapChainDescription.Flags = SwapChainFlags.AllowModeSwitch; swapChainDescription.ModeDescription.Width = form.Width; swapChainDescription.ModeDescription.Height = form.Height; swapChainDescription.ModeDescription.Format = Format.R8G8B8A8_UNorm; swapChainDescription.ModeDescription.RefreshRate.Numerator = 0; swapChainDescription.ModeDescription.RefreshRate.Denominator = 1; // Create the swap chain. SharpDX.DXGI.SwapChain swapChain = new SwapChain(factory, device, swapChainDescription); // Retrieve the back buffer of the swap chain. Texture2D backBuffer = swapChain.GetBackBuffer<Texture2D>(0); RenderTargetView backBufferRenderTargetView = new RenderTargetView(device, backBuffer); // Create a depth buffer, using the same width and height as the back buffer. Texture2DDescription depthBufferDescription = new Texture2DDescription(); depthBufferDescription.Format = Format.D32_Float; depthBufferDescription.ArraySize = 1; depthBufferDescription.MipLevels = 1; depthBufferDescription.Width = form.Width; depthBufferDescription.Height = form.Height; depthBufferDescription.SampleDescription = new SampleDescription(1, 0); depthBufferDescription.Usage = ResourceUsage.Default; depthBufferDescription.BindFlags = BindFlags.DepthStencil; depthBufferDescription.CpuAccessFlags = CpuAccessFlags.None; depthBufferDescription.OptionFlags = ResourceOptionFlags.None; // Define how the depth buffer will be used to filter out objects, based on their distance from the viewer. DepthStencilStateDescription depthStencilStateDescription = new DepthStencilStateDescription(); depthStencilStateDescription.IsDepthEnabled = true; depthStencilStateDescription.DepthComparison = Comparison.Less; depthStencilStateDescription.DepthWriteMask = DepthWriteMask.Zero; // Create the depth buffer. Texture2D depthBuffer = new Texture2D(device, depthBufferDescription); DepthStencilView depthStencilView = new DepthStencilView(device, depthBuffer); DepthStencilState depthStencilState = new DepthStencilState(device, depthStencilStateDescription); Viewport viewport = new Viewport(0, 0, hmd.Resolution.Width, hmd.Resolution.Height, 0.0f, 1.0f); immediateContext.OutputMerger.SetDepthStencilState(depthStencilState); immediateContext.OutputMerger.SetRenderTargets(depthStencilView, backBufferRenderTargetView); immediateContext.Rasterizer.SetViewport(viewport); // Retrieve the DXGI device, in order to set the maximum frame latency. using (SharpDX.DXGI.Device1 dxgiDevice = device.QueryInterface<SharpDX.DXGI.Device1>()) { dxgiDevice.MaximumFrameLatency = 1; } Layers layers = new Layers(); LayerEyeFov layerEyeFov = layers.AddLayerEyeFov(); for (int eyeIndex = 0; eyeIndex < 2; eyeIndex++) { OVR.EyeType eye = (OVR.EyeType)eyeIndex; EyeTexture eyeTexture = new EyeTexture(); eyeTextures[eyeIndex] = eyeTexture; // Retrieve size and position of the texture for the current eye. eyeTexture.FieldOfView = hmd.DefaultEyeFov[eyeIndex]; eyeTexture.TextureSize = hmd.GetFovTextureSize(eye, hmd.DefaultEyeFov[eyeIndex], 1.0f); eyeTexture.RenderDescription = hmd.GetRenderDesc(eye, hmd.DefaultEyeFov[eyeIndex]); eyeTexture.HmdToEyeViewOffset = eyeTexture.RenderDescription.HmdToEyeViewOffset; eyeTexture.ViewportSize.Position = new OVR.Vector2i(0, 0); eyeTexture.ViewportSize.Size = eyeTexture.TextureSize; eyeTexture.Viewport = new Viewport(0, 0, eyeTexture.TextureSize.Width, eyeTexture.TextureSize.Height, 0.0f, 1.0f); // Define a texture at the size recommended for the eye texture. eyeTexture.Texture2DDescription = new Texture2DDescription(); eyeTexture.Texture2DDescription.Width = eyeTexture.TextureSize.Width; eyeTexture.Texture2DDescription.Height = eyeTexture.TextureSize.Height; eyeTexture.Texture2DDescription.ArraySize = 1; eyeTexture.Texture2DDescription.MipLevels = 1; eyeTexture.Texture2DDescription.Format = Format.R8G8B8A8_UNorm; eyeTexture.Texture2DDescription.SampleDescription = new SampleDescription(1, 0); eyeTexture.Texture2DDescription.Usage = ResourceUsage.Default; eyeTexture.Texture2DDescription.CpuAccessFlags = CpuAccessFlags.None; eyeTexture.Texture2DDescription.BindFlags = BindFlags.ShaderResource | BindFlags.RenderTarget; // Convert the SharpDX texture description to the native Direct3D texture description. OVR.D3D11.D3D11_TEXTURE2D_DESC swapTextureDescriptionD3D11 = SharpDXHelpers.CreateTexture2DDescription(eyeTexture.Texture2DDescription); // Create a SwapTextureSet, which will contain the textures to render to, for the current eye. result = hmd.CreateSwapTextureSetD3D11(device.NativePointer, ref swapTextureDescriptionD3D11, out eyeTexture.SwapTextureSet); WriteErrorDetails(oculus, result, "Failed to create swap texture set."); // Create room for each DirectX texture in the SwapTextureSet. eyeTexture.Textures = new Texture2D[eyeTexture.SwapTextureSet.TextureCount]; eyeTexture.RenderTargetViews = new RenderTargetView[eyeTexture.SwapTextureSet.TextureCount]; // Create a texture 2D and a render target view, for each unmanaged texture contained in the SwapTextureSet. for (int textureIndex = 0; textureIndex < eyeTexture.SwapTextureSet.TextureCount; textureIndex++) { // Retrieve the current textureData object. OVR.D3D11.D3D11TextureData textureData = eyeTexture.SwapTextureSet.Textures[textureIndex]; // Create a managed Texture2D, based on the unmanaged texture pointer. eyeTexture.Textures[textureIndex] = new Texture2D(textureData.Texture); // Create a render target view for the current Texture2D. eyeTexture.RenderTargetViews[textureIndex] = new RenderTargetView(device, eyeTexture.Textures[textureIndex]); } // Define the depth buffer, at the size recommended for the eye texture. eyeTexture.DepthBufferDescription = new Texture2DDescription(); eyeTexture.DepthBufferDescription.Format = Format.D32_Float; eyeTexture.DepthBufferDescription.Width = eyeTexture.TextureSize.Width; eyeTexture.DepthBufferDescription.Height = eyeTexture.TextureSize.Height; eyeTexture.DepthBufferDescription.ArraySize = 1; eyeTexture.DepthBufferDescription.MipLevels = 1; eyeTexture.DepthBufferDescription.SampleDescription = new SampleDescription(1, 0); eyeTexture.DepthBufferDescription.Usage = ResourceUsage.Default; eyeTexture.DepthBufferDescription.BindFlags = BindFlags.DepthStencil; eyeTexture.DepthBufferDescription.CpuAccessFlags = CpuAccessFlags.None; eyeTexture.DepthBufferDescription.OptionFlags = ResourceOptionFlags.None; // Create the depth buffer. eyeTexture.DepthBuffer = new Texture2D(device, eyeTexture.DepthBufferDescription); eyeTexture.DepthStencilView = new DepthStencilView(device, eyeTexture.DepthBuffer); // Specify the texture to show on the HMD. layerEyeFov.ColorTexture[eyeIndex] = eyeTexture.SwapTextureSet.SwapTextureSetPtr; layerEyeFov.Viewport[eyeIndex].Position = new OVR.Vector2i(0, 0); layerEyeFov.Viewport[eyeIndex].Size = eyeTexture.TextureSize; layerEyeFov.Fov[eyeIndex] = eyeTexture.FieldOfView; layerEyeFov.Header.Flags = OVR.LayerFlags.TextureOriginAtBottomLeft; } // Define the texture used to display the rendered result on the computer monitor. Texture2DDescription mirrorTextureDescription = new Texture2DDescription(); mirrorTextureDescription.Width = form.Width; mirrorTextureDescription.Height = form.Height; mirrorTextureDescription.ArraySize = 1; mirrorTextureDescription.MipLevels = 1; mirrorTextureDescription.Format = Format.R8G8B8A8_UNorm; mirrorTextureDescription.SampleDescription = new SampleDescription(1, 0); mirrorTextureDescription.Usage = ResourceUsage.Default; mirrorTextureDescription.CpuAccessFlags = CpuAccessFlags.None; mirrorTextureDescription.BindFlags = BindFlags.ShaderResource | BindFlags.RenderTarget; SamplerStateDescription samplerStateDescription = new SamplerStateDescription { AddressU = TextureAddressMode.Wrap, AddressV = TextureAddressMode.Wrap, AddressW = TextureAddressMode.Wrap, Filter = Filter.Anisotropic }; RasterizerStateDescription rasterizerStateDescription = RasterizerStateDescription.Default(); rasterizerStateDescription.IsFrontCounterClockwise = true; // Convert the SharpDX texture description to the native Direct3D texture description. OVR.D3D11.D3D11_TEXTURE2D_DESC mirrorTextureDescriptionD3D11 = SharpDXHelpers.CreateTexture2DDescription(mirrorTextureDescription); OculusWrap.D3D11.MirrorTexture mirrorTexture; // Create the texture used to display the rendered result on the computer monitor. result = hmd.CreateMirrorTextureD3D11(device.NativePointer, ref mirrorTextureDescriptionD3D11, out mirrorTexture); WriteErrorDetails(oculus, result, "Failed to create mirror texture."); Texture2D mirrorTextureD3D11 = new Texture2D(mirrorTexture.Texture.Texture); #region Vertex and pixel shader // Create vertex shader. ShaderBytecode vertexShaderByteCode = ShaderBytecode.CompileFromFile("Shaders.fx", "VertexShaderMain", "vs_4_0"); VertexShader vertexShader = new VertexShader(device, vertexShaderByteCode); // Create pixel shader. ShaderBytecode pixelShaderByteCode = ShaderBytecode.CompileFromFile("Shaders.fx", "PixelShaderMain", "ps_4_0"); PixelShader pixelShader = new PixelShader(device, pixelShaderByteCode); ShaderSignature shaderSignature = ShaderSignature.GetInputSignature(vertexShaderByteCode); Texture2D myTexture = new Texture2D(device, new Texture2DDescription() { Format = Format.R8G8B8A8_UNorm, ArraySize = 1, MipLevels = 1, Width = textureWidth, Height = textureHeight, SampleDescription = new SampleDescription(1, 0), Usage = ResourceUsage.Dynamic, BindFlags = BindFlags.ShaderResource, CpuAccessFlags = CpuAccessFlags.Write, OptionFlags = ResourceOptionFlags.None, }); ShaderResourceView textureView = new ShaderResourceView(device, myTexture); //set sampler for texture SamplerState samplerState = new SamplerState(device, samplerStateDescription); //initialize rasterizer RasterizerState rasterizerState = new RasterizerState(device, rasterizerStateDescription); // Specify that each vertex consists of a single vertex position and color. int[] indices = null; Vertex[] vertices = null; CreateGeometry(out indices, out vertices); InputElement[] inputElements = new InputElement[] { new InputElement("SV_Position", 0, Format.R32G32B32A32_Float, 0, 0), new InputElement("COLOR", 0, Format.R32G32B32A32_Float, 16, 0), new InputElement("TEXCOORD", 0, Format.R32G32_Float, 32, 0), /*new InputElement("TEXCOORD", 0, Format.R32G32_Float, 16, 0), new InputElement("TEXCOORD", 0, Format.R32G32_Float, 32, 0),*/ }; // Define an input layout to be passed to the vertex shader. InputLayout inputLayout = new InputLayout(device, shaderSignature, inputElements); // Create a vertex buffer, containing our 3D model. Buffer vertexBuffer = Buffer.Create(device, BindFlags.VertexBuffer, vertices);//m_vertices); // Create a constant buffer, to contain our WorldViewProjection matrix, that will be passed to the vertex shader. Buffer constantBuffer = new Buffer(device, Utilities.SizeOf<Matrix>(), ResourceUsage.Default, BindFlags.ConstantBuffer, CpuAccessFlags.None, ResourceOptionFlags.None, 0); Buffer indexBuffer = SharpDX.Direct3D11.Buffer.Create(device, BindFlags.IndexBuffer, indices); // Setup the immediate context to use the shaders and model we defined. immediateContext.InputAssembler.InputLayout = inputLayout; immediateContext.InputAssembler.PrimitiveTopology = PrimitiveTopology.TriangleList; immediateContext.InputAssembler.SetVertexBuffers(0, new VertexBufferBinding(vertexBuffer, Utilities.SizeOf<Vertex>(), 0)); immediateContext.InputAssembler.SetIndexBuffer(indexBuffer, Format.R32_UInt, 0); immediateContext.VertexShader.SetConstantBuffer(0, constantBuffer); immediateContext.VertexShader.Set(vertexShader); immediateContext.PixelShader.Set(pixelShader); immediateContext.PixelShader.SetShaderResource(0, textureView); immediateContext.PixelShader.SetSampler(0, samplerState); #endregion DateTime startTime = DateTime.Now; Vector3 position = new Vector3(0, 0, 0); oculusReady = true; #region Render loop RenderLoop.Run(form, () => { OVR.Vector3f[] hmdToEyeViewOffsets = { eyeTextures[0].HmdToEyeViewOffset, eyeTextures[1].HmdToEyeViewOffset }; OVR.FrameTiming frameTiming = hmd.GetFrameTiming(0); OVR.TrackingState trackingState = hmd.GetTrackingState(frameTiming.DisplayMidpointSeconds); OVR.Posef[] eyePoses = new OVR.Posef[2]; // Calculate the position and orientation of each eye. oculus.CalcEyePoses(trackingState.HeadPose.ThePose, hmdToEyeViewOffsets, ref eyePoses); float timeSinceStart = (float)(DateTime.Now - startTime).TotalSeconds; for (int eyeIndex = 0; eyeIndex < 2; eyeIndex++) { OVR.EyeType eye = (OVR.EyeType)eyeIndex; EyeTexture eyeTexture = eyeTextures[eyeIndex]; layerEyeFov.RenderPose[eyeIndex] = eyePoses[eyeIndex]; // Retrieve the index of the active texture and select the next texture as being active next. int textureIndex = eyeTexture.SwapTextureSet.CurrentIndex++; immediateContext.OutputMerger.SetRenderTargets(eyeTexture.DepthStencilView, eyeTexture.RenderTargetViews[textureIndex]); immediateContext.ClearRenderTargetView(eyeTexture.RenderTargetViews[textureIndex], Color.Black); immediateContext.ClearDepthStencilView(eyeTexture.DepthStencilView, DepthStencilClearFlags.Depth | DepthStencilClearFlags.Stencil, 1.0f, 0); immediateContext.Rasterizer.SetViewport(eyeTexture.Viewport); //added a custom rasterizer immediateContext.Rasterizer.State = rasterizerState; // Retrieve the eye rotation quaternion and use it to calculate the LookAt direction and the LookUp direction. Quaternion rotationQuaternion = SharpDXHelpers.ToQuaternion(eyePoses[eyeIndex].Orientation); Matrix rotationMatrix = Matrix.RotationQuaternion(rotationQuaternion); Vector3 lookUp = Vector3.Transform(new Vector3(0, -1, 0), rotationMatrix).ToVector3(); Vector3 lookAt = Vector3.Transform(new Vector3(0, 0, 1), rotationMatrix).ToVector3(); Vector3 viewPosition = position - eyePoses[eyeIndex].Position.ToVector3(); //use this to get the first rotation to goal Matrix world = Matrix.Scaling(1.0f) /** Matrix.RotationX(timeSinceStart*0.2f) */* Matrix.RotationY(timeSinceStart * 2 / 10f) /** Matrix.RotationZ(timeSinceStart*3/10f)*/; Matrix viewMatrix = Matrix.LookAtRH(viewPosition, viewPosition + lookAt, lookUp); Matrix projectionMatrix = OVR.ovrMatrix4f_Projection(eyeTexture.FieldOfView, 0.1f, 10.0f, OVR.ProjectionModifier.None).ToMatrix(); projectionMatrix.Transpose(); Matrix worldViewProjection = world * viewMatrix * projectionMatrix; worldViewProjection.Transpose(); // Update the transformation matrix. immediateContext.UpdateSubresource(ref worldViewProjection, constantBuffer); // Draw the cube //immediateContext.Draw(vertices.Length/2, 0); immediateContext.DrawIndexed(indices.Length, 0, 0); } hmd.SubmitFrame(0, layers); immediateContext.CopyResource(mirrorTextureD3D11, backBuffer); swapChain.Present(0, PresentFlags.None); if (newTextureArrived == true) { newTextureArrived = false; DataBox map = device.ImmediateContext.MapSubresource(myTexture, 0, MapMode.WriteDiscard, SharpDX.Direct3D11.MapFlags.None); //load the BitMapSource with appropriate formating (Format32bppPRGBA) SharpDX.WIC.BitmapSource bitMap = LoadBitmap(new SharpDX.WIC.ImagingFactory(), streamTexture); //string newFile = Path.GetDirectoryName(Process.GetCurrentProcess().MainModule.FileName) + @"\img_merged.jpg"; //SharpDX.WIC.BitmapSource bitMap = LoadBitmapFromFile(new SharpDX.WIC.ImagingFactory(), newFile); int width = bitMap.Size.Width; int height = bitMap.Size.Height; int stride = bitMap.Size.Width * 4; bitMap.CopyPixels(stride, map.DataPointer, height * stride); device.ImmediateContext.UnmapSubresource(myTexture, 0); //bitMap.Dispose(); streamTexture.Seek(0, SeekOrigin.Begin); } }); #endregion // Release all resources inputLayout.Dispose(); constantBuffer.Dispose(); indexBuffer.Dispose(); vertexBuffer.Dispose(); inputLayout.Dispose(); shaderSignature.Dispose(); pixelShader.Dispose(); pixelShaderByteCode.Dispose(); vertexShader.Dispose(); vertexShaderByteCode.Dispose(); mirrorTextureD3D11.Dispose(); layers.Dispose(); eyeTextures[0].Dispose(); eyeTextures[1].Dispose(); immediateContext.ClearState(); immediateContext.Flush(); immediateContext.Dispose(); depthStencilState.Dispose(); depthStencilView.Dispose(); depthBuffer.Dispose(); backBufferRenderTargetView.Dispose(); backBuffer.Dispose(); swapChain.Dispose(); factory.Dispose(); // Disposing the device, before the hmd, will cause the hmd to fail when disposing. // Disposing the device, after the hmd, will cause the dispose of the device to fail. // It looks as if the hmd steals ownership of the device and destroys it, when it's shutting down. // device.Dispose(); hmd.Dispose(); oculus.Dispose(); }