public void ScreenshotRequestedProxyHandler(ScreenshotRequest request) { if (this.ScreenshotRequested != null) { this.ScreenshotRequested(request); } }
/// <summary> /// Implementation of capturing from the render target of the Direct3D9 Device (or DeviceEx) /// </summary> /// <param name="device"></param> void DoCaptureRenderTarget(Device device, string hook) { this.Frame(); try { #region Screenshot Request // If we have issued the command to copy data to our render target, check if it is complete bool qryResult; if (_queryIssued && _requestCopy != null && _query.GetData(out qryResult, false)) { // The GPU has finished copying data to _renderTargetCopy, we can now lock // the data and access it on another thread. _queryIssued = false; // Lock the render target SharpDX.Rectangle rect; SharpDX.DataRectangle lockedRect = LockRenderTarget(_renderTargetCopy, out rect); _renderTargetCopyLocked = true; // Copy the data from the render target System.Threading.Tasks.Task.Factory.StartNew(() => { lock (_lockRenderTarget) { ProcessCapture(rect.Width, rect.Height, lockedRect.Pitch, _renderTargetCopy.Description.Format.ToPixelFormat(), lockedRect.DataPointer, _requestCopy); } }); } // Single frame capture request if (this.Request != null) { DateTime start = DateTime.Now; try { using (Surface renderTarget = device.GetRenderTarget(0)) { int width, height; // If resizing of the captured image, determine correct dimensions if (Request.Resize != null && (renderTarget.Description.Width > Request.Resize.Value.Width || renderTarget.Description.Height > Request.Resize.Value.Height)) { if (renderTarget.Description.Width > Request.Resize.Value.Width) { width = Request.Resize.Value.Width; height = (int)Math.Round((renderTarget.Description.Height * ((double)Request.Resize.Value.Width / (double)renderTarget.Description.Width))); } else { height = Request.Resize.Value.Height; width = (int)Math.Round((renderTarget.Description.Width * ((double)Request.Resize.Value.Height / (double)renderTarget.Description.Height))); } } else { width = renderTarget.Description.Width; height = renderTarget.Description.Height; } // If existing _renderTargetCopy, ensure that it is the correct size and format if (_renderTargetCopy != null && (_renderTargetCopy.Description.Width != width || _renderTargetCopy.Description.Height != height || _renderTargetCopy.Description.Format != renderTarget.Description.Format)) { // Cleanup resources Cleanup(); } // Ensure that we have something to put the render target data into if (!_resourcesInitialised || _renderTargetCopy == null) { CreateResources(device, width, height, renderTarget.Description.Format); } // Resize from render target Surface to resolvedSurface (also deals with resolving multi-sampling) device.StretchRectangle(renderTarget, _resolvedTarget, TextureFilter.None); } // If the render target is locked from a previous request unlock it if (_renderTargetCopyLocked) { // Wait for the the ProcessCapture thread to finish with it lock (_lockRenderTarget) { if (_renderTargetCopyLocked) { _renderTargetCopy.UnlockRectangle(); _renderTargetCopyLocked = false; } } } // Copy data from resolved target to our render target copy device.GetRenderTargetData(_resolvedTarget, _renderTargetCopy); _requestCopy = Request.Clone(); _query.Issue(Issue.End); _queryIssued = true; } finally { // We have completed the request - mark it as null so we do not continue to try to capture the same request // Note: If you are after high frame rates, consider implementing buffers here to capture more frequently // and send back to the host application as needed. The IPC overhead significantly slows down // the whole process if sending frame by frame. Request = null; } DateTime end = DateTime.Now; this.DebugMessage(hook + ": Capture time: " + (end - start).ToString()); } #endregion if (this.Config.ShowOverlay) { #region Draw Overlay // Check if overlay needs to be initialised if (_overlayEngine == null || _overlayEngine.Device.NativePointer != device.NativePointer) { // Cleanup if necessary if (_overlayEngine != null) RemoveAndDispose(ref _overlayEngine); _overlayEngine = ToDispose(new DX9.DXOverlayEngine()); // Create Overlay _overlayEngine.Overlays.Add(new Capture.Hook.Common.Overlay { Elements = { // Add frame rate new Capture.Hook.Common.FramesPerSecond(new System.Drawing.Font("Arial", 14, FontStyle.Bold)) { Location = new System.Drawing.Point(5,5), Color = System.Drawing.Color.Red, AntiAliased = true }/*, new Capture.Hook.Common.TextElement(new System.Drawing.Font("Arial", 16, FontStyle.Bold)) { Location = new Point(20, 50), Color = Color.Yellow, Text = "Test" }*/ // Example of adding an image to overlay (can implement semi transparency with Tint, e.g. Ting = Color.FromArgb(127, 255, 255, 255)) //new Capture.Hook.Common.ImageElement(@"C:\Temp\test.bmp") { Location = new System.Drawing.Point(20, 20) } } }); _overlayEngine.Initialise(device); } // Draw Overlay(s) else if (_overlayEngine != null) { foreach (var overlay in _overlayEngine.Overlays) overlay.Frame(); _overlayEngine.Draw(); } #endregion } } catch (Exception e) { DebugMessage(e.ToString()); } }
private void SafeInvokeScreenshotRequested(ScreenshotRequest eventArgs) { if (this.ScreenshotRequested == null) { return; //No Listeners } ScreenshotRequestedEvent listener = null; Delegate[] dels = this.ScreenshotRequested.GetInvocationList(); foreach (Delegate del in dels) { try { listener = (ScreenshotRequestedEvent)del; listener.Invoke(eventArgs); } catch (Exception) { //Could not reach the destination, so remove it //from the list this.ScreenshotRequested -= listener; } } }
protected virtual void InterfaceEventProxy_ScreenshotRequested(ScreenshotRequest request) { if (Request != null) { return; } this.Request = request; }
protected override void InterfaceEventProxy_ScreenshotRequested(ScreenshotRequest request) { if (surfacesSetup) { this.lastRequestId = request.RequestId; this.copyReadySignal.Set(); } request.Dispose(); }
protected override void InterfaceEventProxy_ScreenshotRequested(ScreenshotRequest request) { request.Dispose(); }
/// <summary> /// Implementation of capturing from the render target of the Direct3D9 Device (or DeviceEx) /// </summary> /// <param name="device"></param> void DoCaptureRenderTarget(Device device, string hook) { this.Frame(); try { #region Screenshot Request // If we have issued the command to copy data to our render target, check if it is complete bool qryResult; if (_queryIssued && _requestCopy != null && _query.GetData(out qryResult, false)) { // The GPU has finished copying data to _renderTargetCopy, we can now lock // the data and access it on another thread. _queryIssued = false; // Lock the render target SharpDX.Rectangle rect; SharpDX.DataRectangle lockedRect = LockRenderTarget(_renderTargetCopy, out rect); _renderTargetCopyLocked = true; // Copy the data from the render target System.Threading.Tasks.Task.Factory.StartNew(() => { lock (_lockRenderTarget) { ProcessCapture(rect.Width, rect.Height, lockedRect.Pitch, _renderTargetCopy.Description.Format.ToPixelFormat(), lockedRect.DataPointer, _requestCopy); } }); } // Single frame capture request if (this.Request != null) { DateTime start = DateTime.Now; try { using (Surface renderTarget = device.GetRenderTarget(0)) { int width, height; // If resizing of the captured image, determine correct dimensions if (Request.Resize != null && (renderTarget.Description.Width > Request.Resize.Value.Width || renderTarget.Description.Height > Request.Resize.Value.Height)) { if (renderTarget.Description.Width > Request.Resize.Value.Width) { width = Request.Resize.Value.Width; height = (int)Math.Round((renderTarget.Description.Height * ((double)Request.Resize.Value.Width / (double)renderTarget.Description.Width))); } else { height = Request.Resize.Value.Height; width = (int)Math.Round((renderTarget.Description.Width * ((double)Request.Resize.Value.Height / (double)renderTarget.Description.Height))); } } else { width = renderTarget.Description.Width; height = renderTarget.Description.Height; } // If existing _renderTargetCopy, ensure that it is the correct size and format if (_renderTargetCopy != null && (_renderTargetCopy.Description.Width != width || _renderTargetCopy.Description.Height != height || _renderTargetCopy.Description.Format != renderTarget.Description.Format)) { // Cleanup resources Cleanup(); } // Ensure that we have something to put the render target data into if (!_resourcesInitialised || _renderTargetCopy == null) { CreateResources(device, width, height, renderTarget.Description.Format); } // Resize from render target Surface to resolvedSurface (also deals with resolving multi-sampling) device.StretchRectangle(renderTarget, _resolvedTarget, TextureFilter.None); } // If the render target is locked from a previous request unlock it if (_renderTargetCopyLocked) { // Wait for the the ProcessCapture thread to finish with it lock (_lockRenderTarget) { if (_renderTargetCopyLocked) { _renderTargetCopy.UnlockRectangle(); _renderTargetCopyLocked = false; } } } // Copy data from resolved target to our render target copy device.GetRenderTargetData(_resolvedTarget, _renderTargetCopy); _requestCopy = Request.Clone(); _query.Issue(Issue.End); _queryIssued = true; } finally { // We have completed the request - mark it as null so we do not continue to try to capture the same request // Note: If you are after high frame rates, consider implementing buffers here to capture more frequently // and send back to the host application as needed. The IPC overhead significantly slows down // the whole process if sending frame by frame. Request = null; } DateTime end = DateTime.Now; this.DebugMessage(hook + ": Capture time: " + (end - start).ToString()); } #endregion if (this.Config.ShowOverlay) { #region Draw frame rate if (_font == null || _font.Device.NativePointer != device.NativePointer) CreateFont(device); if (this.FPS.GetFPS() >= 1) { _font.DrawText(null, String.Format("{0:N0} fps", this.FPS.GetFPS()), 5, 5, SharpDX.Color.Red); } if (this.TextDisplay != null && this.TextDisplay.Display) { _font.DrawText(null, this.TextDisplay.Text, 5, 25, new SharpDX.ColorBGRA(255, 0, 0, (byte)Math.Round((Math.Abs(1.0f - TextDisplay.Remaining) * 255f)))); } #endregion } } catch (Exception e) { DebugMessage(e.ToString()); } }
protected virtual void InterfaceEventProxy_ScreenshotRequested(ScreenshotRequest request) { this.Request = request; }
/// <summary> /// Our present hook that will grab a copy of the backbuffer when requested. Note: this supports multi-sampling (anti-aliasing) /// </summary> /// <param name="swapChainPtr"></param> /// <param name="syncInterval"></param> /// <param name="flags"></param> /// <returns>The HRESULT of the original method</returns> int PresentHook(IntPtr swapChainPtr, int syncInterval, SharpDX.DXGI.PresentFlags flags) { this.Frame(); SwapChain swapChain = (SharpDX.DXGI.SwapChain)swapChainPtr; try { #region Screenshot Request if (this.Request != null) { this.DebugMessage("PresentHook: Request Start"); DateTime startTime = DateTime.Now; using (Texture2D currentRT = Texture2D.FromSwapChain<Texture2D>(swapChain, 0)) { #region Determine region to capture Rectangle captureRegion = new Rectangle(0, 0, currentRT.Description.Width, currentRT.Description.Height); if (this.Request.RegionToCapture.Width > 0) { captureRegion = new Rectangle(this.Request.RegionToCapture.Left, this.Request.RegionToCapture.Top, this.Request.RegionToCapture.Right, this.Request.RegionToCapture.Bottom); } else if (this.Request.Resize.HasValue) { captureRegion = new Rectangle(0, 0, this.Request.Resize.Value.Width, this.Request.Resize.Value.Height); } #endregion // Create / Recreate resources as necessary EnsureResources(currentRT.Device, currentRT.Description, captureRegion, Request); Texture2D sourceTexture = null; // If texture is multisampled, then we can use ResolveSubresource to copy it into a non-multisampled texture if (currentRT.Description.SampleDescription.Count > 1 || Request.Resize.HasValue) { if (Request.Resize.HasValue) this.DebugMessage("PresentHook: resizing texture"); else this.DebugMessage("PresentHook: resolving multi-sampled texture"); // Resolve into _resolvedRT if (_resolvedRTKeyedMutex != null) _resolvedRTKeyedMutex.Acquire(0, int.MaxValue); currentRT.Device.ImmediateContext.ResolveSubresource(currentRT, 0, _resolvedRT, 0, _resolvedRT.Description.Format); if (_resolvedRTKeyedMutex != null) _resolvedRTKeyedMutex.Release(1); if (Request.Resize.HasValue) { lock(_lock) { if (_resolvedRTKeyedMutex_Dev2 != null) _resolvedRTKeyedMutex_Dev2.Acquire(1, int.MaxValue); _saQuad.ShaderResource = _resolvedSharedSRV; _saQuad.RenderTargetView = _resizedRTV; _saQuad.RenderTarget = _resizedRT; _saQuad.Render(); if (_resolvedRTKeyedMutex_Dev2 != null) _resolvedRTKeyedMutex_Dev2.Release(0); } // set sourceTexture to the resized RT sourceTexture = _resizedRT; } else { // Make sourceTexture be the resolved texture sourceTexture = _resolvedRTShared; } } else { // Copy the resource into the shared texture if (_resolvedRTKeyedMutex != null) _resolvedRTKeyedMutex.Acquire(0, int.MaxValue); currentRT.Device.ImmediateContext.CopySubresourceRegion(currentRT, 0, null, _resolvedRT, 0); if (_resolvedRTKeyedMutex != null) _resolvedRTKeyedMutex.Release(1); sourceTexture = _resolvedRTShared; } // Copy to memory and send back to host process on a background thread so that we do not cause any delay in the rendering pipeline _requestCopy = this.Request.Clone(); // this.Request gets set to null, so copy the Request for use in the thread // Prevent the request from being processed a second time this.Request = null; bool acquireLock = sourceTexture == _resolvedRTShared; ThreadPool.QueueUserWorkItem(new WaitCallback((o) => { // Acquire lock on second device if (acquireLock && _resolvedRTKeyedMutex_Dev2 != null) _resolvedRTKeyedMutex_Dev2.Acquire(1, int.MaxValue); lock (_lock) { // Copy the subresource region, we are dealing with a flat 2D texture with no MipMapping, so 0 is the subresource index sourceTexture.Device.ImmediateContext.CopySubresourceRegion(sourceTexture, 0, new ResourceRegion() { Top = captureRegion.Top, Bottom = captureRegion.Bottom, Left = captureRegion.Left, Right = captureRegion.Right, Front = 0, Back = 1 // Must be 1 or only black will be copied }, _finalRT, 0, 0, 0, 0); // Release lock upon shared surface on second device if (acquireLock && _resolvedRTKeyedMutex_Dev2 != null) _resolvedRTKeyedMutex_Dev2.Release(0); _finalRT.Device.ImmediateContext.End(_query); _queryIssued = true; while (!_finalRT.Device.ImmediateContext.GetData(_query).ReadBoolean()) { // Spin (usually no spin takes place) } DateTime startCopyToSystemMemory = DateTime.Now; try { DataBox db = default(DataBox); if (_requestCopy.Format == ImageFormat.PixelData) { db = _finalRT.Device.ImmediateContext.MapSubresource(_finalRT, 0, MapMode.Read, SharpDX.Direct3D11.MapFlags.DoNotWait); _finalRTMapped = true; } _queryIssued = false; try { using (MemoryStream ms = new MemoryStream()) { switch (_requestCopy.Format) { case ImageFormat.Bitmap: Texture2D.ToStream(_finalRT.Device.ImmediateContext, _finalRT, ImageFileFormat.Bmp, ms); break; case ImageFormat.Jpeg: Texture2D.ToStream(_finalRT.Device.ImmediateContext, _finalRT, ImageFileFormat.Jpg, ms); break; case ImageFormat.Png: Texture2D.ToStream(_finalRT.Device.ImmediateContext, _finalRT, ImageFileFormat.Png, ms); break; case ImageFormat.PixelData: if (db.DataPointer != IntPtr.Zero) { ProcessCapture(_finalRT.Description.Width, _finalRT.Description.Height, db.RowPitch, System.Drawing.Imaging.PixelFormat.Format32bppArgb, db.DataPointer, _requestCopy); } return; } ms.Position = 0; ProcessCapture(ms, _requestCopy); } } finally { this.DebugMessage("PresentHook: Copy to System Memory time: " + (DateTime.Now - startCopyToSystemMemory).ToString()); } if (_finalRTMapped) { lock (_lock) { _finalRT.Device.ImmediateContext.UnmapSubresource(_finalRT, 0); _finalRTMapped = false; } } } catch (SharpDX.SharpDXException exc) { // Catch DXGI_ERROR_WAS_STILL_DRAWING and ignore - the data isn't available yet } } })); // Note: it would be possible to capture multiple frames and process them in a background thread } this.DebugMessage("PresentHook: Copy BackBuffer time: " + (DateTime.Now - startTime).ToString()); this.DebugMessage("PresentHook: Request End"); } #endregion #region Draw overlay (after screenshot so we don't capture overlay as well) if (this.Config.ShowOverlay) { // Initialise Overlay Engine if (_swapChainPointer != swapChain.NativePointer || _overlayEngine == null) { if (_overlayEngine != null) _overlayEngine.Dispose(); _overlayEngine = new DX11.DXOverlayEngine(); _overlayEngine.Overlays.Add(new Capture.Hook.Common.Overlay { Elements = { //new Capture.Hook.Common.TextElement(new System.Drawing.Font("Times New Roman", 22)) { Text = "Test", Location = new System.Drawing.Point(200, 200), Color = System.Drawing.Color.Yellow, AntiAliased = false}, new Capture.Hook.Common.FramesPerSecond(new System.Drawing.Font("Arial", 16)) { Location = new System.Drawing.Point(5,5), Color = System.Drawing.Color.Red, AntiAliased = true }, //new Capture.Hook.Common.ImageElement(@"C:\Temp\test.bmp") { Location = new System.Drawing.Point(20, 20) } } }); _overlayEngine.Initialise(swapChain); _swapChainPointer = swapChain.NativePointer; } // Draw Overlay(s) else if (_overlayEngine != null) { foreach (var overlay in _overlayEngine.Overlays) overlay.Frame(); _overlayEngine.Draw(); } } #endregion } catch (Exception e) { // If there is an error we do not want to crash the hooked application, so swallow the exception this.DebugMessage("PresentHook: Exeception: " + e.GetType().FullName + ": " + e.ToString()); //return unchecked((int)0x8000FFFF); //E_UNEXPECTED } // As always we need to call the original method, note that EasyHook will automatically skip the hook and call the original method // i.e. calling it here will not cause a stack overflow into this function return DXGISwapChain_PresentHook.Original(swapChainPtr, syncInterval, flags); }
void EnsureResources(SharpDX.Direct3D11.Device device, Texture2DDescription description, Rectangle captureRegion, ScreenshotRequest request) { if (_device != null && request.Resize != null && (_resizedRT == null || (_resizedRT.Device.NativePointer != _device.NativePointer || _resizedRT.Description.Width != request.Resize.Value.Width || _resizedRT.Description.Height != request.Resize.Value.Height))) { // Create/Recreate resources for resizing RemoveAndDispose(ref _resizedRT); RemoveAndDispose(ref _resizedRTV); RemoveAndDispose(ref _saQuad); _resizedRT = ToDispose(new Texture2D(_device, new Texture2DDescription() { Format = SharpDX.DXGI.Format.R8G8B8A8_UNorm, // Supports BMP/PNG/etc Height = request.Resize.Value.Height, Width = request.Resize.Value.Width, ArraySize = 1, SampleDescription = new SharpDX.DXGI.SampleDescription(1, 0), BindFlags = BindFlags.RenderTarget, MipLevels = 1, Usage = ResourceUsage.Default, OptionFlags = ResourceOptionFlags.None })); _resizedRTV = ToDispose(new RenderTargetView(_device, _resizedRT)); _saQuad = ToDispose(new DX11.ScreenAlignedQuadRenderer()); _saQuad.Initialize(new DX11.DeviceManager(_device)); } // Check if _resolvedRT or _finalRT require creation if (_finalRT != null && _finalRT.Device.NativePointer == _device.NativePointer && _finalRT.Description.Height == captureRegion.Height && _finalRT.Description.Width == captureRegion.Width && _resolvedRT != null && _resolvedRT.Description.Height == description.Height && _resolvedRT.Description.Width == description.Width && _resolvedRT.Device.NativePointer == device.NativePointer && _resolvedRT.Description.Format == description.Format ) { return; } RemoveAndDispose(ref _query); RemoveAndDispose(ref _resolvedRT); RemoveAndDispose(ref _resolvedSharedSRV); RemoveAndDispose(ref _finalRT); RemoveAndDispose(ref _resolvedRTShared); _query = new Query(_device, new QueryDescription() { Flags = QueryFlags.None, Type = QueryType.Event }); _queryIssued = false; _resolvedRT = ToDispose(new Texture2D(device, new Texture2DDescription() { CpuAccessFlags = CpuAccessFlags.None, Format = description.Format, // for multisampled backbuffer, this must be same format Height = description.Height, Usage = ResourceUsage.Default, Width = description.Width, ArraySize = 1, SampleDescription = new SharpDX.DXGI.SampleDescription(1, 0), // Ensure single sample BindFlags = BindFlags.ShaderResource, MipLevels = 1, OptionFlags = ResourceOptionFlags.SharedKeyedmutex })); // Retrieve reference to the keyed mutex _resolvedRTKeyedMutex = ToDispose(_resolvedRT.QueryInterfaceOrNull<SharpDX.DXGI.KeyedMutex>()); using (var resource = _resolvedRT.QueryInterface<SharpDX.DXGI.Resource>()) { _resolvedRTShared = ToDispose(_device.OpenSharedResource<Texture2D>(resource.SharedHandle)); _resolvedRTKeyedMutex_Dev2 = ToDispose(_resolvedRTShared.QueryInterfaceOrNull<SharpDX.DXGI.KeyedMutex>()); } // SRV for use if resizing _resolvedSharedSRV = ToDispose(new ShaderResourceView(_device, _resolvedRTShared)); _finalRT = ToDispose(new Texture2D(_device, new Texture2DDescription() { CpuAccessFlags = CpuAccessFlags.Read, Format = description.Format, Height = captureRegion.Height, Usage = ResourceUsage.Staging, Width = captureRegion.Width, ArraySize = 1, SampleDescription = new SharpDX.DXGI.SampleDescription(1, 0), BindFlags = BindFlags.None, MipLevels = 1, OptionFlags = ResourceOptionFlags.None })); _finalRTMapped = false; }
protected void ProcessCapture(byte[] bitmapData, ScreenshotRequest request) { try { if (request != null) { Interface.SendScreenshotResponse(new Screenshot(request.RequestId, bitmapData) { Format = request.Format, }); } LastCaptureTime = Timer.Elapsed; } catch (RemotingException) { // Ignore remoting exceptions // .NET Remoting will throw an exception if the host application is unreachable } catch (Exception e) { DebugMessage(e.ToString()); } }
protected void ProcessCapture(Stream stream, ScreenshotRequest request) { ProcessCapture(ReadFullStream(stream), request); }
/// <summary> /// Process the capture based on the requested format. /// </summary> /// <param name="width">image width</param> /// <param name="height">image height</param> /// <param name="pitch">data pitch (bytes per row)</param> /// <param name="format">target format</param> /// <param name="pBits">IntPtr to the image data</param> /// <param name="request">The original requets</param> protected void ProcessCapture(int width, int height, int pitch, PixelFormat format, IntPtr pBits, ScreenshotRequest request) { if (request == null) return; if (format == PixelFormat.Undefined) { DebugMessage("Unsupported render target format"); return; } // Copy the image data from the buffer int size = height * pitch; var data = new byte[size]; Marshal.Copy(pBits, data, 0, size); // Prepare the response Screenshot response = null; if (request.Format == Capture.Interface.ImageFormat.PixelData) { // Return the raw data response = new Screenshot(request.RequestId, data) { Format = request.Format, PixelFormat = format, Height = height, Width = width, Stride = pitch }; } else { // Return an image using (var bm = data.ToBitmap(width, height, pitch, format)) { System.Drawing.Imaging.ImageFormat imgFormat = System.Drawing.Imaging.ImageFormat.Bmp; switch (request.Format) { case Capture.Interface.ImageFormat.Jpeg: imgFormat = System.Drawing.Imaging.ImageFormat.Jpeg; break; case Capture.Interface.ImageFormat.Png: imgFormat = System.Drawing.Imaging.ImageFormat.Png; break; } response = new Screenshot(request.RequestId, bm.ToByteArray(imgFormat)) { Format = request.Format, Height = bm.Height, Width = bm.Width }; } } // Send the response SendResponse(response); }