protected virtual void InterfaceEventProxy_ScreenshotRequested(ScreenshotRequest request) { if (Request != null) { return; } this.Request = request; }
public void Serialize2() { var request = new ScreenshotRequest(new Uri("https://google.com/")) { FullPage = true }; Assert.Equal("?url=https%3A%2F%2Fgoogle.com%2F&full_page=true", request.ToQueryString()); }
protected override void InterfaceEventProxy_ScreenshotRequested(ScreenshotRequest request) { if (surfacesSetup) { this.lastRequestId = request.RequestId; this.copyReadySignal.Set(); } request.Dispose(); }
public void newRequest(ScreenshotRequest request) { try { lock (requestLock) { if (request is PauseRequest) { paused = true; } else if (request is StopRequest) { paused = true; if (frameTarget != null) { frameTarget.close(); frameTarget = null; } } else if (request is ResumeRequest) { if (frameTarget != null) { paused = false; } } else if (request is StreamRequest) { StreamRequest streamRequest = (StreamRequest)request; frameTarget = new FrameServer(streamRequest.Host, streamRequest.Port); this.captureRect = streamRequest.Region; this.freq = 1 / streamRequest.Fps; paused = false; } else if (request is CaptureRequest) { CaptureRequest captureRequest = (CaptureRequest)request; frameTarget = new IPCHostSink(ProcessId, request.RequestId, Interface); this.captureRect = captureRequest.Region; this.freq = 1 / captureRequest.Fps; paused = false; } } } catch (Exception e) { paused = true; if (frameTarget != null) { frameTarget.close(); frameTarget = null; } ErrorMessage("Exception when processing request" + request + "\n\r" + e); paused = true; } }
private void btnScreenshot_Click(object sender, RoutedEventArgs e) { CloudmersiveConvertClient client = new CloudmersiveConvertClient(); ScreenshotRequest req = new ScreenshotRequest(); req.Url = "http://kaycircle.com"; client.WebScreenshot(req); }
public void Serialize1() { var request = new ScreenshotRequest(new Uri("https://google.com/")); Assert.Equal("?url=https%3A%2F%2Fgoogle.com%2F", request.ToQueryString()); var parameters = request.GetParameters(); Assert.Single(parameters); }
public void Serialize3() { var request = new ScreenshotRequest(new Uri("https://google.com/")) { UserAgent = "a", FullPage = false, Timeout = TimeSpan.FromSeconds(30) }; Assert.Equal("?url=https%3A%2F%2Fgoogle.com%2F&full_page=false&user_agent=a&timeout=30000", request.ToQueryString()); }
public void SendScreenshotRequest(int topicId, int discussionId) { if (peer == null || peer.PeerState != PeerStateValue.Connected) { return; } peer.OpCustom((byte)DiscussionOpCode.ScreenshotRequest, ScreenshotRequest.Write(topicId, discussionId), true); }
public byte[] Web_UrlToPdf(ScreenshotRequest req) { HttpClient httpClient = new HttpClient(); MultipartFormDataContent form = new MultipartFormDataContent(); //form.Add(new ByteArrayContent(inputBytes, 0, inputBytes.Length), "inputFile", fileName); httpClient.DefaultRequestHeaders.Add("Apikey", Apikey); string stringPayload = JsonConvert.SerializeObject(req); var httpContent = new StringContent(stringPayload, Encoding.UTF8, "application/json"); HttpResponseMessage response = httpClient.PostAsync("https://api.cloudmersive.com/convert/web/url/to/pdf", httpContent).Result; response.EnsureSuccessStatusCode(); httpClient.Dispose(); var sd = response.Content.ReadAsByteArrayAsync().Result;// ReadAsStringAsync().Result; return(sd); }
protected void ProcessCapture(byte[] bitmapData, ScreenshotRequest request) { try { if (request != null) { Interface.SendScreenshotResponse(new Screenshot(request.RequestId, bitmapData) { Format = request.Format, }); } LastCaptureTime = Timer.Elapsed; } catch (RemotingException) { // Ignore remoting exceptions // .NET Remoting will throw an exception if the host application is unreachable } catch (Exception e) { DebugMessage(e.ToString()); } }
void EnsureResources(SharpDX.Direct3D11.Device device, Texture2DDescription description, Rectangle captureRegion, ScreenshotRequest request) { if (_device != null && request.Resize != null && (_resizedRT == null || (_resizedRT.Device.NativePointer != _device.NativePointer || _resizedRT.Description.Width != request.Resize.Value.Width || _resizedRT.Description.Height != request.Resize.Value.Height))) { // Create/Recreate resources for resizing RemoveAndDispose(ref _resizedRT); RemoveAndDispose(ref _resizedRTV); RemoveAndDispose(ref _saQuad); _resizedRT = ToDispose(new Texture2D(_device, new Texture2DDescription() { Format = SharpDX.DXGI.Format.R8G8B8A8_UNorm, // Supports BMP/PNG/etc Height = request.Resize.Value.Height, Width = request.Resize.Value.Width, ArraySize = 1, SampleDescription = new SharpDX.DXGI.SampleDescription(1, 0), BindFlags = BindFlags.RenderTarget, MipLevels = 1, Usage = ResourceUsage.Default, OptionFlags = ResourceOptionFlags.None })); _resizedRTV = ToDispose(new RenderTargetView(_device, _resizedRT)); _saQuad = ToDispose(new DX11.ScreenAlignedQuadRenderer()); _saQuad.Initialize(new DX11.DeviceManager(_device)); } // Check if _resolvedRT or _finalRT require creation if (_finalRT != null && _finalRT.Device.NativePointer == _device.NativePointer && _finalRT.Description.Height == captureRegion.Height && _finalRT.Description.Width == captureRegion.Width && _resolvedRT != null && _resolvedRT.Description.Height == description.Height && _resolvedRT.Description.Width == description.Width && _resolvedRT.Device.NativePointer == device.NativePointer && _resolvedRT.Description.Format == description.Format ) { return; } RemoveAndDispose(ref _query); RemoveAndDispose(ref _resolvedRT); RemoveAndDispose(ref _resolvedSharedSRV); RemoveAndDispose(ref _finalRT); RemoveAndDispose(ref _resolvedRTShared); _query = new Query(_device, new QueryDescription() { Flags = QueryFlags.None, Type = QueryType.Event }); _queryIssued = false; _resolvedRT = ToDispose(new Texture2D(device, new Texture2DDescription() { CpuAccessFlags = CpuAccessFlags.None, Format = description.Format, // for multisampled backbuffer, this must be same format Height = description.Height, Usage = ResourceUsage.Default, Width = description.Width, ArraySize = 1, SampleDescription = new SharpDX.DXGI.SampleDescription(1, 0), // Ensure single sample BindFlags = BindFlags.ShaderResource, MipLevels = 1, OptionFlags = ResourceOptionFlags.SharedKeyedmutex })); // Retrieve reference to the keyed mutex _resolvedRTKeyedMutex = ToDispose(_resolvedRT.QueryInterfaceOrNull <SharpDX.DXGI.KeyedMutex>()); using (var resource = _resolvedRT.QueryInterface <SharpDX.DXGI.Resource>()) { _resolvedRTShared = ToDispose(_device.OpenSharedResource <Texture2D>(resource.SharedHandle)); _resolvedRTKeyedMutex_Dev2 = ToDispose(_resolvedRTShared.QueryInterfaceOrNull <SharpDX.DXGI.KeyedMutex>()); } // SRV for use if resizing _resolvedSharedSRV = ToDispose(new ShaderResourceView(_device, _resolvedRTShared)); _finalRT = ToDispose(new Texture2D(_device, new Texture2DDescription() { CpuAccessFlags = CpuAccessFlags.Read, Format = description.Format, Height = captureRegion.Height, Usage = ResourceUsage.Staging, Width = captureRegion.Width, ArraySize = 1, SampleDescription = new SharpDX.DXGI.SampleDescription(1, 0), BindFlags = BindFlags.None, MipLevels = 1, OptionFlags = ResourceOptionFlags.None })); _finalRTMapped = false; }
/// <summary> /// Called by EasyHook to begin any hooking etc in the target process /// </summary> /// <param name="InContext"></param> /// <param name="channelName"></param> /// <param name="strVersion">Direct3DVersion passed as a string so that GAC registration is not required</param> /// <param name="showOverlay">Whether or not to show an overlay</param> public void Run( RemoteHooking.IContext InContext, String channelName, String strVersion, bool showOverlay) { Direct3DVersion version = (Direct3DVersion)Enum.Parse(typeof(Direct3DVersion), strVersion); // NOTE: We are now already running within the target process try { _interface.OnDebugMessage(RemoteHooking.GetCurrentProcessId(), "DLL Injection succeeded"); bool isX64Process = RemoteHooking.IsX64Process(RemoteHooking.GetCurrentProcessId()); _interface.OnDebugMessage(RemoteHooking.GetCurrentProcessId(), "64-bit Process: " + isX64Process.ToString()); if (version == Direct3DVersion.AutoDetect) { // Attempt to determine the correct version based on loaded module. // In most cases this will work fine, however it is perfectly ok for an application to use a D3D10 device along with D3D11 devices // so the version might matched might not be the one you want to use IntPtr d3D9Loaded = IntPtr.Zero; IntPtr d3D10Loaded = IntPtr.Zero; IntPtr d3D10_1Loaded = IntPtr.Zero; IntPtr d3D11Loaded = IntPtr.Zero; IntPtr d3D11_1Loaded = IntPtr.Zero; int delayTime = 100; int retryCount = 0; while (d3D9Loaded == IntPtr.Zero && d3D10Loaded == IntPtr.Zero && d3D10_1Loaded == IntPtr.Zero && d3D11Loaded == IntPtr.Zero && d3D11_1Loaded == IntPtr.Zero) { retryCount++; d3D9Loaded = GetModuleHandle("d3d9.dll"); d3D10Loaded = GetModuleHandle("d3d10.dll"); d3D10_1Loaded = GetModuleHandle("d3d10_1.dll"); d3D11Loaded = GetModuleHandle("d3d11.dll"); d3D11_1Loaded = GetModuleHandle("d3d11_1.dll"); Thread.Sleep(delayTime); if (retryCount * delayTime > 5000) { _interface.OnDebugMessage(RemoteHooking.GetCurrentProcessId(), "Unsupported Direct3DVersion, or Direct3D DLL not loaded within 5 seconds."); return; } } version = Direct3DVersion.Unknown; if (d3D11_1Loaded != IntPtr.Zero) { _interface.OnDebugMessage(RemoteHooking.GetCurrentProcessId(), "Autodetect found Direct3D 11.1"); version = Direct3DVersion.Direct3D11_1; } else if (d3D11Loaded != IntPtr.Zero) { _interface.OnDebugMessage(RemoteHooking.GetCurrentProcessId(), "Autodetect found Direct3D 11"); version = Direct3DVersion.Direct3D11; } else if (d3D10_1Loaded != IntPtr.Zero) { _interface.OnDebugMessage(RemoteHooking.GetCurrentProcessId(), "Autodetect found Direct3D 10.1"); version = Direct3DVersion.Direct3D10_1; } else if (d3D10Loaded != IntPtr.Zero) { _interface.OnDebugMessage(RemoteHooking.GetCurrentProcessId(), "Autodetect found Direct3D 10"); version = Direct3DVersion.Direct3D10; } else if (d3D9Loaded != IntPtr.Zero) { _interface.OnDebugMessage(RemoteHooking.GetCurrentProcessId(), "Autodetect found Direct3D 9"); version = Direct3DVersion.Direct3D9; } } switch (version) { case Direct3DVersion.Direct3D9: _directXHook = new DXHookD3D9(_interface); break; case Direct3DVersion.Direct3D10: _directXHook = new DXHookD3D10(_interface); break; case Direct3DVersion.Direct3D10_1: _directXHook = new DXHookD3D10_1(_interface); break; case Direct3DVersion.Direct3D11: _directXHook = new DXHookD3D11(_interface); break; default: _interface.OnDebugMessage(RemoteHooking.GetCurrentProcessId(), "Unsupported Direct3DVersion"); break; } _directXHook.ShowOverlay = showOverlay; _directXHook.Hook(); } catch (Exception e) { /* * We should notify our host process about this error... */ //_interface.ReportError(RemoteHooking.GetCurrentProcessId(), e); _interface.OnDebugMessage(RemoteHooking.GetCurrentProcessId(), "Exception during device creation and hooking: \r\n" + e.Message); while (_interface.Ping(RemoteHooking.GetCurrentProcessId())) { Thread.Sleep(100); } return; } // Wait for host process termination... try { // When not using GAC there can be issues with remoting assemblies resolving correctly // this is a workaround that ensures that the current assembly is correctly associated AppDomain currentDomain = AppDomain.CurrentDomain; currentDomain.AssemblyResolve += (sender, args) => { return(this.GetType().Assembly.FullName == args.Name ? this.GetType().Assembly : null); }; while (_interface.Ping(RemoteHooking.GetCurrentProcessId())) { Thread.Sleep(10); ScreenshotRequest request = _interface.GetScreenshotRequest(RemoteHooking.GetCurrentProcessId()); if (request != null) { _directXHook.Request = request; } } } catch { // .NET Remoting will raise an exception if host is unreachable } finally { try { _directXHook.Cleanup(); } catch { } } }
protected virtual void InterfaceEventProxy_ScreenshotRequested(ScreenshotRequest request) { this.Request = request; }
protected void ProcessCapture(Stream stream, ScreenshotRequest request) { ProcessCapture(ReadFullStream(stream), request); }
/// <summary> /// Process the capture based on the requested format. /// </summary> /// <param name="width">image width</param> /// <param name="height">image height</param> /// <param name="pitch">data pitch (bytes per row)</param> /// <param name="format">target format</param> /// <param name="pBits">IntPtr to the image data</param> /// <param name="request">The original requets</param> protected void ProcessCapture(int width, int height, int pitch, PixelFormat format, IntPtr pBits, ScreenshotRequest request) { if (request == null) { return; } if (format == PixelFormat.Undefined) { DebugMessage("Unsupported render target format"); return; } // Copy the image data from the buffer int size = height * pitch; var data = new byte[size]; Marshal.Copy(pBits, data, 0, size); // Prepare the response Screenshot response = null; if (request.Format == Capture.Interface.ImageFormat.PixelData) { // Return the raw data response = new Screenshot(request.RequestId, data) { Format = request.Format, PixelFormat = format, Height = height, Width = width, Stride = pitch }; } else { // Return an image using (var bm = data.ToBitmap(width, height, pitch, format)) { System.Drawing.Imaging.ImageFormat imgFormat = System.Drawing.Imaging.ImageFormat.Bmp; switch (request.Format) { case Capture.Interface.ImageFormat.Jpeg: imgFormat = System.Drawing.Imaging.ImageFormat.Jpeg; break; case Capture.Interface.ImageFormat.Png: imgFormat = System.Drawing.Imaging.ImageFormat.Png; break; } response = new Screenshot(request.RequestId, bm.ToByteArray(imgFormat)) { Format = request.Format, Height = bm.Height, Width = bm.Width }; } } // Send the response SendResponse(response); }
/// <summary> /// Process the capture based on the requested format. /// </summary> /// <param name="width">image width</param> /// <param name="height">image height</param> /// <param name="pitch">data pitch (bytes per row)</param> /// <param name="format">target format</param> /// <param name="pBits">IntPtr to the image data</param> /// <param name="request">The original requets</param> protected void ProcessCapture(int width, int height, int pitch, PixelFormat format, IntPtr pBits, ScreenshotRequest request) { if (request == null) { return; } if (format == PixelFormat.Undefined) { DebugMessage("Unsupported render target format"); return; } // Copy the image data from the buffer int size = height * pitch; var data = new byte[size]; Marshal.Copy(pBits, data, 0, size); // Prepare the response Screenshot response = null; if (request.Format == Capture.Interface.ImageFormat.AverageColor) { DebugMessage(String.Format("AVG: Request {0} w:{1} h:{2}", data.Length, width, height)); try { uint r = 0; uint g = 0; uint b = 0; const int Bpp = 4; //Bytes per pixel int count = 0; int step = 4; for (int j = 0; j < height / step; j++) { for (int i = 0; i < width / step; i++) { int position = (j * step * width + i * step) * Bpp; r += data[position + 2]; g += data[position + 1]; b += data[position + 0]; count++; } } response = new Screenshot(request.RequestId, (byte)(r / count), (byte)(g / count), (byte)(b / count)) { Format = request.Format, PixelFormat = format, Height = height, Width = width, Stride = pitch }; DebugMessage(String.Format("AVG: {0},{1},{2}", response.R, response.G, response.B)); } catch (Exception e) { DebugMessage(String.Format("AVG: Error {0}", e.Message)); } } else if (request.Format == Capture.Interface.ImageFormat.PixelData) { // Return the raw data response = new Screenshot(request.RequestId, data) { Format = request.Format, PixelFormat = format, Height = height, Width = width, Stride = pitch }; } else { // Return an image using (var bm = data.ToBitmap(width, height, pitch, format)) { System.Drawing.Imaging.ImageFormat imgFormat = System.Drawing.Imaging.ImageFormat.Bmp; switch (request.Format) { case Capture.Interface.ImageFormat.Jpeg: imgFormat = System.Drawing.Imaging.ImageFormat.Jpeg; break; case Capture.Interface.ImageFormat.Png: imgFormat = System.Drawing.Imaging.ImageFormat.Png; break; } response = new Screenshot(request.RequestId, bm.ToByteArray(imgFormat)) { Format = request.Format, Height = bm.Height, Width = bm.Width }; } } // Send the response SendResponse(response); }
protected override void InterfaceEventProxy_ScreenshotRequested(ScreenshotRequest request) { request.Dispose(); }
/// <summary> /// Take screenshot of URL Fully renders a website and returns a PNG screenshot of the full page image. Javascript, HTML5, CSS and other advanced features are all supported. /// </summary> /// <exception cref="IO.Swagger.Client.ApiException">Thrown when fails to make API call</exception> /// <param name="input">Screenshot request parameters</param> /// <returns>Object</returns> public Object ConvertWebUrlToScreenshot(ScreenshotRequest input) { ApiResponse <Object> localVarResponse = ConvertWebUrlToScreenshotWithHttpInfo(input); return(localVarResponse.Data); }
/// <summary> /// Take screenshot of URL Fully renders a website and returns a PNG screenshot of the full page image. Javascript, HTML5, CSS and other advanced features are all supported. /// </summary> /// <exception cref="IO.Swagger.Client.ApiException">Thrown when fails to make API call</exception> /// <param name="input">Screenshot request parameters</param> /// <returns>Task of Object</returns> public async System.Threading.Tasks.Task <Object> ConvertWebUrlToScreenshotAsync(ScreenshotRequest input) { ApiResponse <Object> localVarResponse = await ConvertWebUrlToScreenshotAsyncWithHttpInfo(input); return(localVarResponse.Data); }
/// <summary> /// Take screenshot of URL Fully renders a website and returns a PNG screenshot of the full page image. Javascript, HTML5, CSS and other advanced features are all supported. /// </summary> /// <exception cref="IO.Swagger.Client.ApiException">Thrown when fails to make API call</exception> /// <param name="input">Screenshot request parameters</param> /// <returns>Task of ApiResponse (Object)</returns> public async System.Threading.Tasks.Task <ApiResponse <Object> > ConvertWebUrlToScreenshotAsyncWithHttpInfo(ScreenshotRequest input) { // verify the required parameter 'input' is set if (input == null) { throw new ApiException(400, "Missing required parameter 'input' when calling ConvertWebApi->ConvertWebUrlToScreenshot"); } var localVarPath = "/convert/web/url/to/screenshot"; var localVarPathParams = new Dictionary <String, String>(); var localVarQueryParams = new List <KeyValuePair <String, String> >(); var localVarHeaderParams = new Dictionary <String, String>(Configuration.DefaultHeader); var localVarFormParams = new Dictionary <String, String>(); var localVarFileParams = new Dictionary <String, FileParameter>(); Object localVarPostBody = null; // to determine the Content-Type header String[] localVarHttpContentTypes = new String[] { "application/json", "text/json", "application/xml", "text/xml", "application/x-www-form-urlencoded" }; String localVarHttpContentType = Configuration.ApiClient.SelectHeaderContentType(localVarHttpContentTypes); // to determine the Accept header String[] localVarHttpHeaderAccepts = new String[] { "application/json", "text/json", "application/xml", "text/xml" }; String localVarHttpHeaderAccept = Configuration.ApiClient.SelectHeaderAccept(localVarHttpHeaderAccepts); if (localVarHttpHeaderAccept != null) { localVarHeaderParams.Add("Accept", localVarHttpHeaderAccept); } if (input != null && input.GetType() != typeof(byte[])) { localVarPostBody = Configuration.ApiClient.Serialize(input); // http body (model) parameter } else { localVarPostBody = input; // byte array } // authentication (Apikey) required if (!String.IsNullOrEmpty(Configuration.GetApiKeyWithPrefix("Apikey"))) { localVarHeaderParams["Apikey"] = Configuration.GetApiKeyWithPrefix("Apikey"); } // make the HTTP request IRestResponse localVarResponse = (IRestResponse)await Configuration.ApiClient.CallApiAsync(localVarPath, Method.POST, localVarQueryParams, localVarPostBody, localVarHeaderParams, localVarFormParams, localVarFileParams, localVarPathParams, localVarHttpContentType); int localVarStatusCode = (int)localVarResponse.StatusCode; if (ExceptionFactory != null) { Exception exception = ExceptionFactory("ConvertWebUrlToScreenshot", localVarResponse); if (exception != null) { throw exception; } } return(new ApiResponse <Object>(localVarStatusCode, localVarResponse.Headers.ToDictionary(x => x.Name, x => x.Value.ToString()), (Object)Configuration.ApiClient.Deserialize(localVarResponse, typeof(Object)))); }
public void ScreenshotRequestedProxyHandler(ScreenshotRequest request) { ScreenshotRequested?.Invoke(request); }
/// <summary> /// Called by EasyHook to begin any hooking etc in the target process /// </summary> /// <param name="InContext"></param> /// <param name="InArg1"></param> public void Run( RemoteHooking.IContext InContext, String channelName, Direct3DVersion version, int VertexMode) { // NOTE: We are now already running within the target process try { _interface.OnDebugMessage(RemoteHooking.GetCurrentProcessId(), "DLL Injection succeeded"); bool isX64Process = RemoteHooking.IsX64Process(RemoteHooking.GetCurrentProcessId()); _interface.OnDebugMessage(RemoteHooking.GetCurrentProcessId(), "64-bit Process: " + isX64Process.ToString()); switch (version) { case Direct3DVersion.Direct3D9: _directXHook = new DXHookD3D9(_interface, VertexMode); break; case Direct3DVersion.Direct3D10: _directXHook = new DXHookD3D10(_interface); break; //case Direct3DVersion.Direct3D10_1: // _directXHook = new DXHookD3D10(_interface); // break; case Direct3DVersion.Direct3D11: _directXHook = new DXHookD3D11(_interface); break; default: _interface.OnDebugMessage(RemoteHooking.GetCurrentProcessId(), "Unsupported Direct3DVersion"); break; } _directXHook.Hook(); } catch (Exception e) { /* * We should notify our host process about this error... */ _interface.ReportError(RemoteHooking.GetCurrentProcessId(), e); return; } // Wait for host process termination... try { while (_interface.Ping(RemoteHooking.GetCurrentProcessId())) { Thread.Sleep(10); ScreenshotRequest request = _interface.GetScreenshotRequest(RemoteHooking.GetCurrentProcessId()); if (request != null) { _directXHook.Request = request; } } } catch { // .NET Remoting will raise an exception if host is unreachable } finally { try { _directXHook.Cleanup(); } catch { } } }
public void sendRequest(ScreenshotRequest request) { ScreenshotManager.AddScreenshotRequest(hookedProcess.Id, request, notify); }
/// <summary> /// Our present hook that will grab a copy of the backbuffer when requested. Note: this supports multi-sampling (anti-aliasing) /// </summary> /// <param name="swapChainPtr"></param> /// <param name="syncInterval"></param> /// <param name="flags"></param> /// <returns>The HRESULT of the original method</returns> int PresentHook(IntPtr swapChainPtr, int syncInterval, SharpDX.DXGI.PresentFlags flags) { this.Frame(); SwapChain swapChain = (SharpDX.DXGI.SwapChain)swapChainPtr; try { #region Screenshot Request if (this.Request != null) { this.DebugMessage("PresentHook: Request Start"); DateTime startTime = DateTime.Now; using (Texture2D currentRT = Texture2D.FromSwapChain <Texture2D>(swapChain, 0)) { #region Determine region to capture Rectangle captureRegion = new Rectangle(0, 0, currentRT.Description.Width, currentRT.Description.Height); if (this.Request.RegionToCapture.Width > 0) { captureRegion = new Rectangle(this.Request.RegionToCapture.Left, this.Request.RegionToCapture.Top, this.Request.RegionToCapture.Right, this.Request.RegionToCapture.Bottom); } else if (this.Request.Resize.HasValue) { captureRegion = new Rectangle(0, 0, this.Request.Resize.Value.Width, this.Request.Resize.Value.Height); } #endregion // Create / Recreate resources as necessary EnsureResources(currentRT.Device, currentRT.Description, captureRegion, Request); Texture2D sourceTexture = null; // If texture is multisampled, then we can use ResolveSubresource to copy it into a non-multisampled texture if (currentRT.Description.SampleDescription.Count > 1 || Request.Resize.HasValue) { if (Request.Resize.HasValue) { this.DebugMessage("PresentHook: resizing texture"); } else { this.DebugMessage("PresentHook: resolving multi-sampled texture"); } // Resolve into _resolvedRT if (_resolvedRTKeyedMutex != null) { _resolvedRTKeyedMutex.Acquire(0, int.MaxValue); } currentRT.Device.ImmediateContext.ResolveSubresource(currentRT, 0, _resolvedRT, 0, _resolvedRT.Description.Format); if (_resolvedRTKeyedMutex != null) { _resolvedRTKeyedMutex.Release(1); } if (Request.Resize.HasValue) { lock (_lock) { if (_resolvedRTKeyedMutex_Dev2 != null) { _resolvedRTKeyedMutex_Dev2.Acquire(1, int.MaxValue); } _saQuad.ShaderResource = _resolvedSharedSRV; _saQuad.RenderTargetView = _resizedRTV; _saQuad.RenderTarget = _resizedRT; _saQuad.Render(); if (_resolvedRTKeyedMutex_Dev2 != null) { _resolvedRTKeyedMutex_Dev2.Release(0); } } // set sourceTexture to the resized RT sourceTexture = _resizedRT; } else { // Make sourceTexture be the resolved texture sourceTexture = _resolvedRTShared; } } else { // Copy the resource into the shared texture if (_resolvedRTKeyedMutex != null) { _resolvedRTKeyedMutex.Acquire(0, int.MaxValue); } currentRT.Device.ImmediateContext.CopySubresourceRegion(currentRT, 0, null, _resolvedRT, 0); if (_resolvedRTKeyedMutex != null) { _resolvedRTKeyedMutex.Release(1); } sourceTexture = _resolvedRTShared; } // Copy to memory and send back to host process on a background thread so that we do not cause any delay in the rendering pipeline _requestCopy = this.Request.Clone(); // this.Request gets set to null, so copy the Request for use in the thread // Prevent the request from being processed a second time this.Request = null; bool acquireLock = sourceTexture == _resolvedRTShared; ThreadPool.QueueUserWorkItem(new WaitCallback((o) => { // Acquire lock on second device if (acquireLock && _resolvedRTKeyedMutex_Dev2 != null) { _resolvedRTKeyedMutex_Dev2.Acquire(1, int.MaxValue); } lock (_lock) { // Copy the subresource region, we are dealing with a flat 2D texture with no MipMapping, so 0 is the subresource index sourceTexture.Device.ImmediateContext.CopySubresourceRegion(sourceTexture, 0, new ResourceRegion() { Top = captureRegion.Top, Bottom = captureRegion.Bottom, Left = captureRegion.Left, Right = captureRegion.Right, Front = 0, Back = 1 // Must be 1 or only black will be copied }, _finalRT, 0, 0, 0, 0); // Release lock upon shared surface on second device if (acquireLock && _resolvedRTKeyedMutex_Dev2 != null) { _resolvedRTKeyedMutex_Dev2.Release(0); } _finalRT.Device.ImmediateContext.End(_query); _queryIssued = true; while (!_finalRT.Device.ImmediateContext.GetData(_query).ReadBoolean()) { // Spin (usually no spin takes place) } DateTime startCopyToSystemMemory = DateTime.Now; try { DataBox db = default(DataBox); //Arvy added to support features required by average color capture if (_requestCopy.Format == ImageFormat.PixelData || _requestCopy.Format == ImageFormat.AverageColor) { db = _finalRT.Device.ImmediateContext.MapSubresource(_finalRT, 0, MapMode.Read, SharpDX.Direct3D11.MapFlags.DoNotWait); _finalRTMapped = true; } _queryIssued = false; try { using (MemoryStream ms = new MemoryStream()) { switch (_requestCopy.Format) { case ImageFormat.Bitmap: Texture2D.ToStream(_finalRT.Device.ImmediateContext, _finalRT, ImageFileFormat.Bmp, ms); break; case ImageFormat.Jpeg: Texture2D.ToStream(_finalRT.Device.ImmediateContext, _finalRT, ImageFileFormat.Jpg, ms); break; case ImageFormat.Png: Texture2D.ToStream(_finalRT.Device.ImmediateContext, _finalRT, ImageFileFormat.Png, ms); break; case ImageFormat.PixelData: //Arvy added to support features required by average color capture case ImageFormat.AverageColor: if (db.DataPointer != IntPtr.Zero) { ProcessCapture(_finalRT.Description.Width, _finalRT.Description.Height, db.RowPitch, System.Drawing.Imaging.PixelFormat.Format32bppArgb, db.DataPointer, _requestCopy); } return; } ms.Position = 0; ProcessCapture(ms, _requestCopy); } } finally { this.DebugMessage("PresentHook: Copy to System Memory time: " + (DateTime.Now - startCopyToSystemMemory).ToString()); } if (_finalRTMapped) { lock (_lock) { _finalRT.Device.ImmediateContext.UnmapSubresource(_finalRT, 0); _finalRTMapped = false; } } } catch (SharpDX.SharpDXException exc) { // Catch DXGI_ERROR_WAS_STILL_DRAWING and ignore - the data isn't available yet } } })); // Note: it would be possible to capture multiple frames and process them in a background thread } this.DebugMessage("PresentHook: Copy BackBuffer time: " + (DateTime.Now - startTime).ToString()); this.DebugMessage("PresentHook: Request End"); } #endregion #region Draw overlay (after screenshot so we don't capture overlay as well) if (this.Config.ShowOverlay) { // Initialise Overlay Engine if (_swapChainPointer != swapChain.NativePointer || _overlayEngine == null) { if (_overlayEngine != null) { _overlayEngine.Dispose(); } _overlayEngine = new DX11.DXOverlayEngine(); _overlayEngine.Overlays.Add(new Capture.Hook.Common.Overlay { Elements = { //new Capture.Hook.Common.TextElement(new System.Drawing.Font("Times New Roman", 22)) { Text = "Test", Location = new System.Drawing.Point(200, 200), Color = System.Drawing.Color.Yellow, AntiAliased = false}, new Capture.Hook.Common.FramesPerSecond(new System.Drawing.Font("Arial", 16)) { Location = new System.Drawing.Point(5, 5), Color = System.Drawing.Color.Red, AntiAliased = true }, //new Capture.Hook.Common.ImageElement(@"C:\Temp\test.bmp") { Location = new System.Drawing.Point(20, 20) } } }); _overlayEngine.Initialise(swapChain); _swapChainPointer = swapChain.NativePointer; } // Draw Overlay(s) else if (_overlayEngine != null) { foreach (var overlay in _overlayEngine.Overlays) { overlay.Frame(); } _overlayEngine.Draw(); } } #endregion } catch (Exception e) { // If there is an error we do not want to crash the hooked application, so swallow the exception this.DebugMessage("PresentHook: Exeception: " + e.GetType().FullName + ": " + e.ToString()); //return unchecked((int)0x8000FFFF); //E_UNEXPECTED } // As always we need to call the original method, note that EasyHook will automatically skip the hook and call the original method // i.e. calling it here will not cause a stack overflow into this function return(DXGISwapChain_PresentHook.Original(swapChainPtr, syncInterval, flags)); }
/// <summary> /// Implementation of capturing from the render target of the Direct3D9 Device (or DeviceEx) /// </summary> /// <param name="device"></param> void DoCaptureRenderTarget(Device device, string hook) { this.Frame(); try { #region Screenshot Request // If we have issued the command to copy data to our render target, check if it is complete bool qryResult; if (_queryIssued && _requestCopy != null && _query.GetData(out qryResult, false)) { // The GPU has finished copying data to _renderTargetCopy, we can now lock // the data and access it on another thread. _queryIssued = false; // Lock the render target SharpDX.Rectangle rect; SharpDX.DataRectangle lockedRect = LockRenderTarget(_renderTargetCopy, out rect); _renderTargetCopyLocked = true; // Copy the data from the render target System.Threading.Tasks.Task.Factory.StartNew(() => { lock (_lockRenderTarget) { ProcessCapture(rect.Width, rect.Height, lockedRect.Pitch, _renderTargetCopy.Description.Format.ToPixelFormat(), lockedRect.DataPointer, _requestCopy); } }); } // Single frame capture request if (this.Request != null) { DateTime start = DateTime.Now; try { using (Surface renderTarget = device.GetRenderTarget(0)) { int width, height; // If resizing of the captured image, determine correct dimensions if (Request.Resize != null && (renderTarget.Description.Width > Request.Resize.Value.Width || renderTarget.Description.Height > Request.Resize.Value.Height)) { if (renderTarget.Description.Width > Request.Resize.Value.Width) { width = Request.Resize.Value.Width; height = (int)Math.Round((renderTarget.Description.Height * ((double)Request.Resize.Value.Width / (double)renderTarget.Description.Width))); } else { height = Request.Resize.Value.Height; width = (int)Math.Round((renderTarget.Description.Width * ((double)Request.Resize.Value.Height / (double)renderTarget.Description.Height))); } } else { width = renderTarget.Description.Width; height = renderTarget.Description.Height; } // If existing _renderTargetCopy, ensure that it is the correct size and format if (_renderTargetCopy != null && (_renderTargetCopy.Description.Width != width || _renderTargetCopy.Description.Height != height || _renderTargetCopy.Description.Format != renderTarget.Description.Format)) { // Cleanup resources Cleanup(); } // Ensure that we have something to put the render target data into if (!_resourcesInitialised || _renderTargetCopy == null) { CreateResources(device, width, height, renderTarget.Description.Format); } // Resize from render target Surface to resolvedSurface (also deals with resolving multi-sampling) device.StretchRectangle(renderTarget, _resolvedTarget, TextureFilter.None); } // If the render target is locked from a previous request unlock it if (_renderTargetCopyLocked) { // Wait for the the ProcessCapture thread to finish with it lock (_lockRenderTarget) { if (_renderTargetCopyLocked) { _renderTargetCopy.UnlockRectangle(); _renderTargetCopyLocked = false; } } } // Copy data from resolved target to our render target copy device.GetRenderTargetData(_resolvedTarget, _renderTargetCopy); _requestCopy = Request.Clone(); _query.Issue(Issue.End); _queryIssued = true; } finally { // We have completed the request - mark it as null so we do not continue to try to capture the same request // Note: If you are after high frame rates, consider implementing buffers here to capture more frequently // and send back to the host application as needed. The IPC overhead significantly slows down // the whole process if sending frame by frame. Request = null; } DateTime end = DateTime.Now; this.DebugMessage(hook + ": Capture time: " + (end - start).ToString()); } #endregion if (this.Config.ShowOverlay) { #region Draw Overlay // Check if overlay needs to be initialised if (_overlayEngine == null || _overlayEngine.Device.NativePointer != device.NativePointer) { // Cleanup if necessary if (_overlayEngine != null) { _overlayEngine.Dispose(); } _overlayEngine = ToDispose(new DX9.DXOverlayEngine()); // Create Overlay _overlayEngine.Overlays.Add(new Capture.Hook.Common.Overlay { Elements = { // Add frame rate new Capture.Hook.Common.FramesPerSecond(new System.Drawing.Font("Arial", 16, FontStyle.Bold)) { Location = new System.Drawing.Point(5, 5), Color= System.Drawing.Color.Red, AntiAliased = true }, // Example of adding an image to overlay (can implement semi transparency with Tint, e.g. Ting = Color.FromArgb(127, 255, 255, 255)) //new Capture.Hook.Common.ImageElement(@"C:\Temp\test.bmp") { Location = new System.Drawing.Point(20, 20) } } }); _overlayEngine.Initialise(device); } // Draw Overlay(s) else if (_overlayEngine != null) { foreach (var overlay in _overlayEngine.Overlays) { overlay.Frame(); } _overlayEngine.Draw(); } #endregion } } catch (Exception e) { DebugMessage(e.ToString()); } }
void EnsureResources(global::SharpDX.Direct3D11.Device device, Texture2DDescription description, Rectangle captureRegion, ScreenshotRequest request, bool useSameDeviceForResize = false) { var resizeDevice = useSameDeviceForResize ? device : _device; // Check if _resolvedRT or _finalRT require creation if (_finalRT != null && (_finalRT.Device.NativePointer == device.NativePointer || _finalRT.Device.NativePointer == _device.NativePointer) && _finalRT.Description.Height == captureRegion.Height && _finalRT.Description.Width == captureRegion.Width && _resolvedRT != null && _resolvedRT.Description.Height == description.Height && _resolvedRT.Description.Width == description.Width && (_resolvedRT.Device.NativePointer == device.NativePointer || _resolvedRT.Device.NativePointer == _device.NativePointer) && _resolvedRT.Description.Format == description.Format ) { } else { RemoveAndDispose(ref _query); RemoveAndDispose(ref _resolvedRT); RemoveAndDispose(ref _resolvedSRV); RemoveAndDispose(ref _finalRT); RemoveAndDispose(ref _resolvedRTShared); RemoveAndDispose(ref _resolvedRTKeyedMutex); RemoveAndDispose(ref _resolvedRTKeyedMutex_Dev2); _query = new Query(resizeDevice, new QueryDescription() { Flags = QueryFlags.None, Type = QueryType.Event }); _queryIssued = false; try { ResourceOptionFlags resolvedRTOptionFlags = ResourceOptionFlags.None; if (device != resizeDevice) { resolvedRTOptionFlags |= ResourceOptionFlags.SharedKeyedmutex; } _resolvedRT = ToDispose(new Texture2D(device, new Texture2DDescription() { CpuAccessFlags = CpuAccessFlags.None, Format = description.Format, // for multisampled backbuffer, this must be same format Height = description.Height, Usage = ResourceUsage.Default, Width = description.Width, ArraySize = 1, SampleDescription = new global::SharpDX.DXGI.SampleDescription(1, 0), // Ensure single sample BindFlags = BindFlags.ShaderResource, MipLevels = 1, OptionFlags = resolvedRTOptionFlags })); } catch { // Failed to create the shared resource, try again using the same device as game for resize EnsureResources(device, description, captureRegion, request, true); return; } // Retrieve reference to the keyed mutex _resolvedRTKeyedMutex = ToDispose(_resolvedRT.QueryInterfaceOrNull <global::SharpDX.DXGI.KeyedMutex>()); // If the resolvedRT is a shared resource _resolvedRTKeyedMutex will not be null if (_resolvedRTKeyedMutex != null) { using (var resource = _resolvedRT.QueryInterface <global::SharpDX.DXGI.Resource>()) { _resolvedRTShared = ToDispose(resizeDevice.OpenSharedResource <Texture2D>(resource.SharedHandle)); _resolvedRTKeyedMutex_Dev2 = ToDispose(_resolvedRTShared.QueryInterfaceOrNull <global::SharpDX.DXGI.KeyedMutex>()); } // SRV for use if resizing _resolvedSRV = ToDispose(new ShaderResourceView(resizeDevice, _resolvedRTShared)); } else { _resolvedSRV = ToDispose(new ShaderResourceView(resizeDevice, _resolvedRT)); } _finalRT = ToDispose(new Texture2D(resizeDevice, new Texture2DDescription() { CpuAccessFlags = CpuAccessFlags.Read, Format = description.Format, Height = captureRegion.Height, Usage = ResourceUsage.Staging, Width = captureRegion.Width, ArraySize = 1, SampleDescription = new global::SharpDX.DXGI.SampleDescription(1, 0), BindFlags = BindFlags.None, MipLevels = 1, OptionFlags = ResourceOptionFlags.None })); _finalRTMapped = false; } if (_resolvedRT != null && _resolvedRTKeyedMutex_Dev2 == null && resizeDevice == _device) { resizeDevice = device; } if (resizeDevice != null && request.Resize != null && (_resizedRT == null || (_resizedRT.Device.NativePointer != resizeDevice.NativePointer || _resizedRT.Description.Width != request.Resize.Value.Width || _resizedRT.Description.Height != request.Resize.Value.Height))) { // Create/Recreate resources for resizing RemoveAndDispose(ref _resizedRT); RemoveAndDispose(ref _resizedRTV); RemoveAndDispose(ref _saQuad); _resizedRT = ToDispose(new Texture2D(resizeDevice, new Texture2DDescription() { Format = global::SharpDX.DXGI.Format.R8G8B8A8_UNorm, // Supports BMP/PNG/etc Height = request.Resize.Value.Height, Width = request.Resize.Value.Width, ArraySize = 1, SampleDescription = new global::SharpDX.DXGI.SampleDescription(1, 0), BindFlags = BindFlags.RenderTarget, MipLevels = 1, Usage = ResourceUsage.Default, OptionFlags = ResourceOptionFlags.None })); _resizedRTV = ToDispose(new RenderTargetView(resizeDevice, _resizedRT)); _saQuad = ToDispose(new ScreenAlignedQuadRenderer()); _saQuad.Initialize(new DeviceManager(resizeDevice)); } }