private bool ProcessFrame(Direct3D11CaptureFrame frame, Func <IntPtr, int, int, int, int, object, bool> setPacket) { bool processed = true; using (var bitmap = Direct3D11Helper.CreateSharpDXTexture2D(frame.Surface)) { if (_dx == null) { _d3dDevice.ImmediateContext.CopyResource(bitmap, _screenTexture); var data = _d3dDevice.ImmediateContext.MapSubresource(_screenTexture, 0, MapMode.Read, MapFlags.None); if (_initSize.Width * 4 > data.RowPitch) { ScreenCaptureManager.Instance.Logger.Warining($"Width of capture is higher then Pitch {_initSize.Width * 4} > {data.RowPitch}"); } processed = setPacket(data.DataPointer, 4, _initSize.Width, _initSize.Height, data.RowPitch, null); _d3dDevice.ImmediateContext.UnmapSubresource(_screenTexture, 0); } else { var dxRes = _dx.CreateCopy(bitmap); processed = setPacket(IntPtr.Zero, 0, 0, 0, 0, dxRes); } } return(processed); }
private async void OnFrameArrived(Direct3D11CaptureFramePool sender, object args) { _currentFrame = sender.TryGetNextFrame(); BarcodeReader reader = new BarcodeReader(); reader.AutoRotate = true; reader.Options.TryHarder = true; reader.Options.PureBarcode = false; reader.Options.PossibleFormats = new List <BarcodeFormat>(); reader.Options.PossibleFormats.Add(BarcodeFormat.QR_CODE); var bitmap = await SoftwareBitmap.CreateCopyFromSurfaceAsync(_currentFrame.Surface).AsTask(); var result = reader.Decode(bitmap); if (!string.IsNullOrEmpty(result?.Text) && (result.Text.StartsWith("suavekeys|expression") || result.Text.StartsWith("suavekeys|gesture"))) { Debug.WriteLine("WOOHOO WE FOUND A CODE"); if (!_isSending) { _isSending = true; var command = result.Text.Split('|')[2]; await _suaveKeysService.SendCommandAsync(command); _isSending = false; } } _frameEvent.Set(); }
private void ProcessFrame(Direct3D11CaptureFrame frame) { // Resize and device-lost leverage the same function on the // Direct3D11CaptureFramePool. Refactoring it this way avoids // throwing in the catch block below (device creation could always // fail) along with ensuring that resize completes successfully and // isn’t vulnerable to device-lost. bool needsReset = false; bool recreateDevice = false; var scale = DisplayInformation.GetForCurrentView().RawPixelsPerViewPixel; if ((frame.ContentSize.Width != _lastSize.Width) || (frame.ContentSize.Height != _lastSize.Height)) { needsReset = true; _lastSize = frame.ContentSize; var _height = frame.ContentSize.Height - 32; bool result = Windows.UI.ViewManagement.ApplicationView.GetForCurrentView().TryResizeView(new Size { Width = frame.ContentSize.Width / scale, Height = _height / scale }); System.Diagnostics.Debug.WriteLine(result); if (!result) { bool _ = Windows.UI.ViewManagement.ApplicationView.GetForCurrentView().TryEnterFullScreenMode(); System.Diagnostics.Debug.WriteLine(_); } } try { // Take the D3D11 surface and draw it into a // Composition surface. // Convert our D3D11 surface into a Win2D object. CanvasBitmap canvasBitmap = CanvasBitmap.CreateFromDirect3D11Surface( _canvasDevice, frame.Surface); _currentFrame = canvasBitmap; // Helper that handles the drawing for us. FillSurfaceWithBitmap(canvasBitmap); } // This is the device-lost convention for Win2D. catch (Exception e) when(_canvasDevice.IsDeviceLost(e.HResult)) { // We lost our graphics device. Recreate it and reset // our Direct3D11CaptureFramePool. needsReset = true; recreateDevice = true; } if (needsReset) { ResetFramePool(frame.ContentSize, recreateDevice); } }
private void OnFrameArrived(Direct3D11CaptureFramePool sender, object args) { _frameCounter++; System.Diagnostics.Debug.WriteLine("Frame Arrived Here 2: " + _frameCounter); Direct3D11CaptureFrame frame = sender.TryGetNextFrame(); DisplayFrame(frame); SetResult(frame); }
private Direct3D11CaptureFrame GetNextFrame() { Direct3D11CaptureFrame frame = null; do { frame = _framePool.TryGetNextFrame(); } while (frame == null); return(frame); }
private void ProcessFrame(Direct3D11CaptureFrame frame) { // Resize and device-lost leverage the same function on the // Direct3D11CaptureFramePool. Refactoring it this way avoids // throwing in the catch block below (device creation could always // fail) along with ensuring that resize completes successfully and // isn’t vulnerable to device-lost. bool needsReset = false; bool recreateDevice = false; if ((frame.ContentSize.Width != _lastSize.Width) || (frame.ContentSize.Height != _lastSize.Height)) { needsReset = true; _lastSize = frame.ContentSize; } try { canvasBitmap = CanvasBitmap.CreateFromDirect3D11Surface( _canvasDevice, frame.Surface); foreach (var clip in ClipImages.Values) { clip.Resize(_canvasDevice, false); } using (CanvasDrawingSession ds = renderTarget.CreateDrawingSession()) { ds.Clear(Colors.Black); ds.DrawImage(canvasBitmap); } foreach (var clip in ClipImages.Values) { clip.Rendar(canvasBitmap); } } // This is the device-lost convention for Win2D. catch (Exception e) when(_canvasDevice.IsDeviceLost(e.HResult)) { // We lost our graphics device. Recreate it and reset // our Direct3D11CaptureFramePool. needsReset = true; recreateDevice = true; } if (needsReset) { ResetFramePool(frame.ContentSize, recreateDevice); } }
public void cleanUpCam() { session?.Dispose(); framePool?.Dispose(); currentFrame?.Dispose(); canvasDevice?.Dispose(); session = null; framePool = null; currentFrame = null; canvasDevice = null; gcitem = null; }
public void cleanUpCam() { session?.Dispose(); framePool?.Dispose(); currentFrame?.Dispose(); canvasDevice?.Dispose(); session = null; framePool = null; currentFrame = null; canvasDevice = null; //gcitem = null; //cannot set to null until app has finished encoding }
public void ProcessFrame(Direct3D11CaptureFrame frame) { bool needsReset = false; bool recreateDevice = false; if ((frame.ContentSize.Width != _lastSize.Width) || (frame.ContentSize.Height != _lastSize.Height)) { needsReset = true; _lastSize = frame.ContentSize; } try { IsRunning = true; CanvasBitmap canvasBitmap = CanvasBitmap.CreateFromDirect3D11Surface(_canvasDevice, frame.Surface); //_currentFrame = canvasBitmap; TakeSegments(canvasBitmap); Blur(); if (SendingAction.IsCompleted) { SendingAction = Task.Run(SendToA); Frametime.Text = "Frametime: " + FrameRate(); } //SendToA(); if (ShowCapture.IsChecked == true) { Task.Run(() => FillSurfaceWithBitmap(canvasBitmap)); } else { using (var session = CanvasComposition.CreateDrawingSession(_surface)) { session.Clear(Colors.Transparent); } } } catch (Exception e) when(_canvasDevice.IsDeviceLost(e.HResult)) { needsReset = true; recreateDevice = true; } if (needsReset) { ResetFramePool(frame.ContentSize, recreateDevice); } }
private async Task ProcessFrame(Direct3D11CaptureFrame frame) { // Resize and device-lost leverage the same function on the // Direct3D11CaptureFramePool. Refactoring it this way avoids // throwing in the catch block below (device creation could always // fail) along with ensuring that resize completes successfully and // isn’t vulnerable to device-lost. bool needsReset = false; bool recreateDevice = false; if ((frame.ContentSize.Width != _lastSize.Width) || (frame.ContentSize.Height != _lastSize.Height)) { needsReset = true; _lastSize = frame.ContentSize; } try { // Take the D3D11 surface and draw it into a // Composition surface. // Convert our D3D11 surface into a Win2D object. var canvasBitmap = CanvasBitmap.CreateFromDirect3D11Surface( _canvasDevice, frame.Surface); var file = await ApplicationData.Current.LocalFolder.CreateFileAsync(fileName, CreationCollisionOption.ReplaceExisting); using (var stream = await file.OpenAsync(FileAccessMode.ReadWrite)) { await canvasBitmap.SaveAsync(stream, CanvasBitmapFileFormat.Png); } } // This is the device-lost convention for Win2D. catch (Exception e) when(_canvasDevice.IsDeviceLost(e.HResult)) { // We lost our graphics device. Recreate it and reset // our Direct3D11CaptureFramePool. needsReset = true; recreateDevice = true; } if (needsReset) { ResetFramePool(frame.ContentSize, recreateDevice); } }
private void ProcessFrame(Direct3D11CaptureFrame frame) { // Resize and device-lost leverage the same function on the // Direct3D11CaptureFramePool. Refactoring it this way avoids // throwing in the catch block below (device creation could always // fail) along with ensuring that resize completes successfully and // isn’t vulnerable to device-lost. bool needsReset = false; bool recreateDevice = false; if ((frame.ContentSize.Width != _lastSize.Width) || (frame.ContentSize.Height != _lastSize.Height)) { needsReset = true; _lastSize = frame.ContentSize; } try { // Take the D3D11 surface and draw it into a // Composition surface. // Convert our D3D11 surface into a Win2D object. CanvasBitmap canvasBitmap = CanvasBitmap.CreateFromDirect3D11Surface( _canvasDevice, frame.Surface); _currentFrame = canvasBitmap; // Helper that handles the drawing for us. FillSurfaceWithBitmap(canvasBitmap); } // This is the device-lost convention for Win2D. catch (Exception e) when(_canvasDevice.IsDeviceLost(e.HResult)) { // We lost our graphics device. Recreate it and reset // our Direct3D11CaptureFramePool. needsReset = true; recreateDevice = true; } if (needsReset) { ResetFramePool(frame.ContentSize, recreateDevice); } }
private void OnFrameArrived(Direct3D11CaptureFramePool sender, object args) { #region 帧池动态重构 暂存 //var newSize = false; //using (var frame = sender.TryGetNextFrame()) //{ // if (frame.ContentSize.Width != _lastSize.Width || // frame.ContentSize.Height != _lastSize.Height) // { // // 源已改变,故需要变换抓取大小,首先改变swap chain,之后是Texture // newSize = true; // _lastSize = frame.ContentSize; // _swapChain.ResizeBuffers( // 2, // _lastSize.Width, // _lastSize.Height, // SharpDX.DXGI.Format.B8G8R8A8_UNorm, // SharpDX.DXGI.SwapChainFlags.None); // } // using (var sourceTexture = Direct3D11Helpers.CreateSharpDXTexture2D(frame.Surface)) // using (var backBuffer = _swapChain.GetBackBuffer<SharpDX.Direct3D11.Texture2D>(0)) // using (var renderTargetView = new SharpDX.Direct3D11.RenderTargetView(_d3dDevice, backBuffer)) // { // _d3dDevice.ImmediateContext.ClearRenderTargetView(renderTargetView, new SharpDX.Mathematics.Interop.RawColor4(0, 0, 0, 1)); // _d3dDevice.ImmediateContext.CopyResource(sourceTexture, backBuffer); // } //} //_swapChain.Present(1, SharpDX.DXGI.PresentFlags.None); //if (newSize)//帧池重构 //{ // _framePool.Recreate( // _device, // DirectXPixelFormat.B8G8R8A8UIntNormalized, // 2, // _lastSize); //} #endregion _currentFrame = sender.TryGetNextFrame(); _frameEvent.Set(); }
/// <summary> /// 帧到达事件 /// </summary> /// <param name="sender"></param> /// <param name="args"></param> private void OnFrameArrived(Direct3D11CaptureFramePool sender, object args) { var newSize = false; using Direct3D11CaptureFrame frame = sender.TryGetNextFrame(); if (frame == null) { return; } if (frame.ContentSize.Width != lastSize.Width || frame.ContentSize.Height != lastSize.Height) { // 我们捕捉到的东西变大了。 // 我们需要先调整交换链的大小,然后blit像素。 // 完成此操作后,请注销帧,然后重新创建帧池。 newSize = true; lastSize = frame.ContentSize; swapChain.ResizeBuffers( 2, lastSize.Width, lastSize.Height, SharpDX.DXGI.Format.B8G8R8A8_UNorm, SharpDX.DXGI.SwapChainFlags.None); } using Texture2D backBuffer = swapChain.GetBackBuffer <Texture2D>(0); using Texture2D tex = Direct3D11Helper.CreateSharpDXTexture2D(frame.Surface); d3dDevice.ImmediateContext.CopyResource(tex, backBuffer); // 保存当前帧到位图 if (GetOneFrameFromBitmapEvent != null) { TryGetOneFrameToBitmap(tex); } // GetOneFrameToBitmap(tex); swapChain.Present(0, SharpDX.DXGI.PresentFlags.None); if (newSize) { framePool.Recreate(device, Windows.Graphics.DirectX.DirectXPixelFormat.B8G8R8A8UIntNormalized, 2, lastSize); } }
public async Task <int> InitializeCam() { if (gcitem == null) { return(1); } if (canvasDevice == null) { canvasDevice = new CanvasDevice(); } if (framePool == null) { /* * //only 2 frames ... depending on the number of buffers * framePool = Direct3D11CaptureFramePool.Create( * canvasDevice, // D3D device * DirectXPixelFormat.B8G8R8A8UIntNormalized, // Pixel format * 2, // Number of frames * gcitem.Size); // Size of the buffers */ framePool = Direct3D11CaptureFramePool.CreateFreeThreaded( canvasDevice, DirectXPixelFormat.B8G8R8A8UIntNormalized, 1, gcitem.Size); } initialRecordTime = DateTime.Now; previousRecordTime = DateTime.Now; currentFrame = null; unpackList = new List <UnpackItem>(); _currentVideoStreamPos = 0; tempFile = null; return(0); }
private async void OnFrameArrived(Direct3D11CaptureFramePool sender, object args) { //Need this to make more frames to arrive? if (parent != null) { counter++; if (counter > 1000000) { counter = 0; } parent.msg("Arrived : " + counter.ToString()); } if (isRecording == false) { return; } currentFrame = sender.TryGetNextFrame(); }
private void ProcessFrame(Direct3D11CaptureFrame frame) { // Resize and device-lost leverage the same function on the // Direct3D11CaptureFramePool. Refactoring it this way avoids // throwing in the catch block below (device creation could always // fail) along with ensuring that resize completes successfully and // isn’t vulnerable to device-lost. bool needsReset = false; bool recreateDevice = false; if ((frame.ContentSize.Width != _lastSize.Width) || (frame.ContentSize.Height != _lastSize.Height)) { needsReset = true; _lastSize = frame.ContentSize; } try { bitmap = SoftwareBitmap.CreateCopyFromSurfaceAsync(frame.Surface).GetResults(); byte[] imageBytes = new byte[4 * bitmap.PixelWidth * bitmap.PixelHeight]; bitmap.CopyToBuffer(imageBytes.AsBuffer()); Image.OnNext(new ImageData { SoftwareBitmap = bitmap, Bytes = imageBytes }); } // This is the device-lost convention for Win2D. catch (Exception e) when(_canvasDevice.IsDeviceLost(e.HResult)) { // We lost our graphics device. Recreate it and reset // our Direct3D11CaptureFramePool. needsReset = true; recreateDevice = true; } if (needsReset) { ResetFramePool(frame.ContentSize, recreateDevice); } }
private void ProcessFrame(Direct3D11CaptureFrame frame) { // Resize and device-lost leverage the same function on the // Direct3D11CaptureFramePool. Refactoring it this way avoids // throwing in the catch block below (device creation could always // fail) along with ensuring that resize completes successfully and // isn’t vulnerable to device-lost. bool needsReset = false; if ((frame.ContentSize.Width != _lastSize.Width) || (frame.ContentSize.Height != _lastSize.Height)) { needsReset = true; _lastSize = frame.ContentSize; } try { // Convert our D3D11 surface into a Win2D object. var bitmap = CanvasBitmap.CreateFromDirect3D11Surface( this.measureCanvas.Device, frame.Surface); measureCanvas.Update(bitmap); } catch { needsReset = true; } if (needsReset) { _framePool.Recreate( this.measureCanvas.Device, DirectXPixelFormat.B8G8R8A8UIntNormalized, 2, frame.ContentSize); } }
private void CleanupSessionResources() { if (framePool != null) { framePool.FrameArrived -= OnFrameArrived; framePool.Dispose(); } framePool = null !; session?.Dispose(); session = null !; if (item != null) { item.Closed -= OnClosed; } item = null !; blankTexture?.Dispose(); blankTexture = null !; currentFrame?.Dispose(); currentFrame = null !; }
public async void SeparateThreadToSaveVideoStream() { while (isRecording == true) { //thread is counting //sometimes stuck at thread at frame : 0, depending on the window chosen to record //meaning OnFrameArrived is not called if (parent != null) { DateTime currentTimeLocal = DateTime.Now; TimeSpan elpasedTimeLocal = currentTimeLocal - initialRecordTime; string debugstr = "At frame: " + counter.ToString() + " Threadcounter: " + threadcounter.ToString(); //debugstr += " StreamSize: " + ((int)(videostream.Size / 1024.0)).ToString() + "KB TimeElapsed: " + ((int)elpasedTimeLocal.TotalSeconds).ToString(); debugstr += " StreamSize: " + ((int)(totalMemoryUsed / 1024.0)).ToString() + " KB"; debugstr += " TimeElapsed: " + ((int)elpasedTimeLocal.TotalSeconds).ToString(); parent.msg(debugstr); } threadcounter++; if (threadcounter > 200000) { threadcounter = 0; } if (currentFrame != null) { CanvasBitmap canvasBitmap = CanvasBitmap.CreateFromDirect3D11Surface( canvasDevice, currentFrame.Surface); using (var inputstream = new InMemoryRandomAccessStream()) { CancellationToken ct = new CancellationToken(); await canvasBitmap.SaveAsync(inputstream, CanvasBitmapFileFormat.Png, 1f).AsTask(ct); ulong currentFrameLength = inputstream.Size; _currentVideoStreamPos = 0; totalMemoryUsed += currentFrameLength; DateTime currentTimeLocal = DateTime.Now; TimeSpan diff = currentTimeLocal - previousRecordTime; previousRecordTime = currentTimeLocal; UnpackItem unpackItem = new UnpackItem(); unpackItem.pos = _currentVideoStreamPos; unpackItem.length = currentFrameLength; unpackItem.frameTime = diff; unpackItem.compressedBuffer = new Windows.Storage.Streams.Buffer((uint)inputstream.Size); inputstream.Seek(0); await inputstream.ReadAsync(unpackItem.compressedBuffer, (uint)inputstream.Size, InputStreamOptions.None); //read from stream to buffer await inputstream.FlushAsync(); unpackList.Add(unpackItem); } currentFrame?.Dispose(); currentFrame = null; //need this line so this thread will continue loop when new frame is not yet ready } else { Thread.Sleep(10); } } //await CloseVideoStream(); int len = unpackList.Count; DateTime currentTime = DateTime.Now; TimeSpan elpasedTime = currentTime - initialRecordTime; string debugstrx = "Num frame: " + len.ToString() + " Threadcounter: " + threadcounter.ToString(); debugstrx += " TimeElapsed: " + ((int)elpasedTime.TotalSeconds).ToString(); if (elpasedTime.TotalSeconds > 0) { debugstrx += " Frame Rate (fps) : " + (len / (double)elpasedTime.TotalSeconds).ToString(); } if (parent != null) { parent.StartWritingReport(debugstrx); } //await UnpackVideoStream(); }
private void ProcessFrame(Direct3D11CaptureFrame frame) { // Resize and device-lost leverage the same function on the // Direct3D11CaptureFramePool. Refactoring it this way avoids // throwing in the catch block below (device creation could always // fail) along with ensuring that resize completes successfully and // isn’t vulnerable to device-lost. bool needsReset = false; bool recreateDevice = false; if ((frame.ContentSize.Width != _lastSize.Width) || (frame.ContentSize.Height != _lastSize.Height)) { needsReset = true; _lastSize = frame.ContentSize; _swapChain.ResizeBuffers(_lastSize.Width, _lastSize.Height); } Direct3D11CaptureFrame direct = frame; try { // Take the D3D11 surface and draw it into a // Composition surface. if (direct.SystemRelativeTime - lastFrameTime < TimeSpan.FromSeconds(1)) { //F**k Microsoft🤬 MediaClip mediaClip = MediaClip.CreateFromSurface(direct.Surface, direct.SystemRelativeTime - lastFrameTime); composition.Clips.Add(mediaClip); } lastFrameTime = direct.SystemRelativeTime; // Convert our D3D11 surface into a Win2D object. canvasBitmap = CanvasBitmap.CreateFromDirect3D11Surface( _canvasDevice, direct.Surface); using (var drawingSession = _swapChain.CreateDrawingSession(Colors.Transparent)) { //drawingSession.DrawCircle(400, 300, 100, Colors.Red, 20); ScaleEffect effect = new ScaleEffect() { Source = canvasBitmap, Scale = new Vector2((float)swapChain.ActualWidth / _item.Size.Width) }; drawingSession.DrawImage(effect); } _swapChain.Present(); //canvasControl.Invalidate(); // Helper that handles the drawing for us, not shown. } // This is the device-lost convention for Win2D. catch (Exception e) when(_canvasDevice.IsDeviceLost(e.HResult)) { // We lost our graphics device. Recreate it and reset // our Direct3D11CaptureFramePool. needsReset = true; recreateDevice = true; } if (needsReset) { ResetFramePool(direct.ContentSize, recreateDevice); } }
private void SetResult(Direct3D11CaptureFrame frame) { _currentFrame = frame; _frameEvent.Set(); }
private void SetResult(Direct3D11CaptureFrame frame) { _currentFrame = frame; //System.Diagnostics.Debug.WriteLine("_currentFrame is set"); _frameEvent.Set(); }
private void ProcessFrame(Direct3D11CaptureFrame frame) { // Do proecssing using (frame) { using (var texture = Direct3D11Helper.CreateSharpDXTexture2D(frame.Surface)) { var hdrMetadata = currentSession.HdrMetadata; var vertices = new ShaderInputStructure[] { // Left-Top new ShaderInputStructure { Position = new Vector3(currentSession.DestD3DVsTopLeft.X, currentSession.DestD3DVsTopLeft.Y, 0), TextureCoord = new Vector2(currentSession.DestD3DPsSamplerTopLeft.X, currentSession.DestD3DPsSamplerTopLeft.Y), }, // Right-Top new ShaderInputStructure { Position = new Vector3(currentSession.DestD3DVsBottomRight.X, currentSession.DestD3DVsTopLeft.Y, 0), TextureCoord = new Vector2(currentSession.DestD3DPsSamplerBottomRight.X, currentSession.DestD3DPsSamplerTopLeft.Y) }, // Left-Bottom new ShaderInputStructure { Position = new Vector3(currentSession.DestD3DVsTopLeft.X, currentSession.DestD3DVsBottomRight.Y, 0), TextureCoord = new Vector2(currentSession.DestD3DPsSamplerTopLeft.X, currentSession.DestD3DPsSamplerBottomRight.Y) }, // Right-Top new ShaderInputStructure { Position = new Vector3(currentSession.DestD3DVsBottomRight.X, currentSession.DestD3DVsTopLeft.Y, 0), TextureCoord = new Vector2(currentSession.DestD3DPsSamplerBottomRight.X, currentSession.DestD3DPsSamplerTopLeft.Y) }, // Right-Bottom new ShaderInputStructure { Position = new Vector3(currentSession.DestD3DVsBottomRight.X, currentSession.DestD3DVsBottomRight.Y, 0), TextureCoord = new Vector2(currentSession.DestD3DPsSamplerBottomRight.X, currentSession.DestD3DPsSamplerBottomRight.Y) }, // Left-Bottom new ShaderInputStructure { Position = new Vector3(currentSession.DestD3DVsTopLeft.X, currentSession.DestD3DVsBottomRight.Y, 0), TextureCoord = new Vector2(currentSession.DestD3DPsSamplerTopLeft.X, currentSession.DestD3DPsSamplerBottomRight.Y) }, }; var triangleVertexBuffer = D3D11.Buffer.Create(d3dDevice, D3D11.BindFlags.VertexBuffer, vertices); var hdrMetadataBuffer = new D3D11.Buffer(d3dDevice, Utilities.SizeOf <ShaderHdrMetadata>(), D3D11.ResourceUsage.Default, D3D11.BindFlags.ConstantBuffer, D3D11.CpuAccessFlags.None, D3D11.ResourceOptionFlags.None, 0); d3dContext.UpdateSubresource(ref hdrMetadata, hdrMetadataBuffer); d3dContext.InputAssembler.PrimitiveTopology = D3D.PrimitiveTopology.TriangleList; d3dContext.InputAssembler.InputLayout = inputLayout; d3dContext.InputAssembler.SetVertexBuffers(0, new D3D11.VertexBufferBinding(triangleVertexBuffer, Utilities.SizeOf <ShaderInputStructure>(), 0)); d3dContext.VertexShader.Set(vsQuad); d3dContext.PixelShader.SetConstantBuffer(0, hdrMetadataBuffer); d3dContext.PixelShader.SetSampler(0, samplerState); var canvasTexture = new D3D11.Texture2D(d3dDevice, new D3D11.Texture2DDescription { Width = texture.Description.Width, Height = texture.Description.Height, MipLevels = 1, ArraySize = 1, Format = currentSession.HdrMetadata.EnableHdrProcessing ? DXGI.Format.R16G16B16A16_Float : DXGI.Format.B8G8R8A8_UNorm_SRgb, Usage = D3D11.ResourceUsage.Default, SampleDescription = new DXGI.SampleDescription(1, 0), BindFlags = D3D11.BindFlags.ShaderResource, CpuAccessFlags = D3D11.CpuAccessFlags.None, OptionFlags = D3D11.ResourceOptionFlags.None, }); using (canvasTexture) using (var shaderResView = new D3D11.ShaderResourceView(d3dDevice, canvasTexture)) { d3dContext.CopyResource(texture, canvasTexture); d3dContext.PixelShader.SetShaderResource(0, shaderResView); d3dContext.PixelShader.Set(psToneMapping); d3dContext.Draw(vertices.Length, 0); } triangleVertexBuffer.Dispose(); hdrMetadataBuffer.Dispose(); } } // Cleanup and signal event to proceed currentSession.Session.Dispose(); }
private void OnFrameArrived(Direct3D11CaptureFramePool sender, object args) { currentFrame = sender.TryGetNextFrame(); frameEvent.Set(); }