/// <summary> /// Perform async read back from the provided source texture. /// </summary> /// <param name="src">Texture source to be used for the read back.</param> /// <param name="mipIndex">Index of the mipmap to be fetched.</param> /// <param name="functor">Functor that will be invoked after the async read back request is complete.</param> /// <typeparam name="T">Type for the destination data buffer.</typeparam> /// <returns>Returns an AsyncRequest</returns> public static AsyncRequest <object> Capture <T>(Texture src, int mipIndex = 0, Func <AsyncRequest <object>, AsyncRequest <object> .Result> functor = null) where T : struct { var req = Manager.Instance.CreateRequest <AsyncRequest <object> >(); if (GraphicsUtilities.SupportsAsyncReadback()) { AsyncGPUReadback.Request(src, mipIndex, (AsyncGPUReadbackRequest request) => { req.error = request.hasError; if (!request.hasError) { req.data = request.GetData <T>().ToArray(); req.Enqueue(functor); req.Execute(); } }); } else { req.data = GraphicsUtilities.GetPixelsSlow(src as RenderTexture); req.Enqueue(functor); req.Execute(); } return(req); }
/// <summary> /// </summary> /// <param name="src">RenderTexture to capture.</param> /// <param name="functor">Completion functor for handling the captured data. The object passed is a byte[] of the captured data.</param> /// <returns>AsyncRequest<object></returns> public static AsyncRequest <object> Capture(RenderTexture src, Func <AsyncRequest <object>, AsyncRequest <object> .Result> functor = null) { var req = Manager.Instance.CreateRequest <AsyncRequest <object> >(); #if !UNITY_2019_2_OR_NEWER && (PLATFORM_STANDALONE_OSX || UNITY_EDITOR) req.data = GraphicsUtilities.GetPixelsSlow(src as RenderTexture); req.Enqueue(functor); req.Execute(); #else if (GraphicsUtilities.SupportsAsyncReadback()) { AsyncGPUReadback.Request(src, 0, (AsyncGPUReadbackRequest request) => { if (request.hasError) { req.error = true; } else { req.data = request.GetData <byte>().ToArray(); req.Enqueue(functor); req.Execute(); } }); } else { req.data = GraphicsUtilities.GetPixelsSlow(src as RenderTexture); req.Enqueue(functor); req.Execute(); } #endif return(req); }
/// <summary> /// Check if for the given rendering pipeline and GfxAPI there is a need to flip Y during the readback from the backbuffer. /// </summary> /// <param name="camera">Camera from which the readback is being performed.</param> /// <returns>A boolean indicating if the flip is required.</returns> public static bool ShouldFlipY(Camera camera) { #if UNITY_2019_3_OR_NEWER if (SRPSupport != null) { switch (SRPSupport.GetCurrentPipelineRenderingType()) { #if URP_ENABLED case RenderingPipelineType.URP: { return((SystemInfo.graphicsDeviceType == GraphicsDeviceType.Direct3D11 || SystemInfo.graphicsDeviceType == GraphicsDeviceType.Metal) && (camera.targetTexture == null && camera.cameraType == CameraType.Game)); } #endif #if HDRP_ENABLED case RenderingPipelineType.HDRP: { var hdAdditionalCameraData = camera.gameObject.GetComponent <UnityEngine.Rendering.HighDefinition.HDAdditionalCameraData>(); //Based on logic in HDRenderPipeline.PrepareFinalBlitParameters return(camera.targetTexture != null || hdAdditionalCameraData.flipYMode == UnityEngine.Rendering.HighDefinition.HDAdditionalCameraData.FlipYMode.ForceFlipY || camera.cameraType == CameraType.Game); } #endif default: { return(camera.targetTexture == null && GraphicsUtilities.SupportsAsyncReadback()); } } } #endif return(camera.targetTexture == null && GraphicsUtilities.SupportsAsyncReadback()); }
/// <summary> /// Perform async read back from the provided compute buffer with size and offset. /// </summary> /// <param name="src">Compute buffer source to be used for the read back.</param> /// <param name="size">Size in bytes of the data to be retrieved from the ComputeBuffer.</param> /// <param name="offset">Offset in bytes in the ComputeBuffer.</param> /// <param name="functor">Functor that will be invoked after the async read back request is complete.</param> /// <typeparam name="T">Type for the destination data buffer.</typeparam> /// <returns>Returns an AsyncRequest</returns> public static AsyncRequest <object> Capture <T>(ComputeBuffer src, int size, int offset, Func <AsyncRequest <object>, AsyncRequest <object> .Result> functor = null) where T : struct { var req = Manager.Instance.CreateRequest <AsyncRequest <object> >(); if (GraphicsUtilities.SupportsAsyncReadback()) { AsyncGPUReadback.Request(src, size, offset, (AsyncGPUReadbackRequest request) => { req.error = request.hasError; if (!request.hasError) { req.data = request.GetData <T>().ToArray(); req.Enqueue(functor); req.Execute(); } }); } else { T[] dst = new T[size]; src.GetData(dst, offset, offset, size); req.data = dst; req.Enqueue(functor); req.Execute(); } return(req); }
/// <summary> /// Encode the input data as per provided image format. /// </summary> /// <param name="data">An array of data to be encoded.</param> /// <param name="width">Image width.</param> /// <param name="height">Image height.</param> /// <param name="format">Graphics format used by the render texture.</param> /// <param name="imageFormat">Format for encoding the data.</param> /// <param name="additionalParam">Additional flags to be passed for the encoding.</param> /// <returns></returns> /// <exception cref="NotSupportedException"></exception> public static Array EncodeArray(Array data, int width, int height, GraphicsFormat format, ImageFormat imageFormat, int additionalParam = 0) { using (s_Encode.Auto()) { switch (imageFormat) { case ImageFormat.Raw: return(data); #if UNITY_2019_3_OR_NEWER case ImageFormat.Png: #if USIM_USE_BUILTIN_PNG_ENCODER return(ImageConversion.EncodeArrayToPNG(data, format, (uint)width, (uint)height, 0)); #else int bitDepth = 8; PngEncoder.ColorType colorType = PngEncoder.GetTypeAndDepth(GraphicsUtilities.GetBlockSize(format), GraphicsUtilities.GetComponentCount(format), ref bitDepth); return(PngEncoder.Encode(ArrayUtilities.Cast <byte>(data), width, height, colorType, bitDepth, (PngEncoder.PngParam)additionalParam)); #endif case ImageFormat.Exr: return(ImageConversion.EncodeArrayToEXR(data, format, (uint)width, (uint)height, 0, /*EXRFlags*/ (Texture2D.EXRFlags)additionalParam)); case ImageFormat.Tga: return(ImageConversion.EncodeArrayToTGA(data, format, (uint)width, (uint)height, 0)); #endif case ImageFormat.Jpg: #if USIM_USE_BUILTIN_JPG_ENCODER && UNITY_2019_3_OR_NEWER return(ImageConversion.EncodeArrayToJPG(data, format, (uint)width, (uint)height, 0, /*quality*/ additionalParam > 0 ? (int)additionalParam : 75)); #else return(JpegEncoder.Encode(ArrayUtilities.Cast <byte>(data), width, height, GraphicsUtilities.GetBlockSize(format), format, /*quality*/ additionalParam > 0 ? (int)additionalParam : 75)); #endif default: throw new NotSupportedException("ImageFormat is not supported"); } } }
/// <summary> /// Setup a capture request for a channel. Once completed, the functor will be called with the channel data, in the format requested. /// </summary> /// <param name="request"> AsyncRequest to enqueue readbacks to. When all are completed, the request is marked completed. </param> /// <param name="channel"> The channel to capture data from (color, depth etc.) </param> /// <param name="camera"> The Camera to capture data from. </param> /// <param name="format"> The graphics format you want the data to be in. </param> /// <param name="functor"> The completion functor to call with the data. </param> /// <param name="forceFlipY"> Flags allowing you to force flipY for arbitrary channels. </param> /// <param name="readWrite"> Specify the desired color space conversion. If Default, then will be set to sRGB for SRP Color channel. </param> public static void SetupCaptureRequest ( AsyncRequest <CaptureState> request, Channel channel, Camera camera, GraphicsFormat format, Func <AsyncRequest <CaptureState>, AsyncRequest.Result> functor, ForceFlip forceFlipY, RenderTextureReadWrite readWrite = RenderTextureReadWrite.Default ) { request.data.SetFunctor(channel, functor); Debug.Assert(request.data.camera == camera, "Capture: Camera must match the camera in request.data.camera"); Debug.Assert(GraphicsUtilities.SupportsRenderTextureFormat(format), $"Capture: GraphicsFormat {format} not supported for {channel} channel"); var material = SelectShaderVariantForChannel(channel, format); if (scriptableRenderPipeline) { request.data.SetTrigger(channel, (cb, rtid) => SetupCaptureRequestCommandBufferForChannel(request, channel, camera, cb, rtid, material, format, forceFlipY, readWrite, HandleReadbackCompletion)); } else { SetupCaptureRequestCommandBufferForChannel(request, channel, camera, null, default, material, format, forceFlipY, readWrite, HandleReadbackCompletion);
void ProcessBatch() { while (_requestsBatch.Count > 0) { var request = _requestsBatch.Dequeue(); request.InvokeCallback(GraphicsUtilities.GetPixelsSlow(request.renderTexture)); _requestsPool.Enqueue(request); } }
void Flush() { if (GraphicsUtilities.SupportsAsyncReadback()) { ProcessBatchAsync(); } else { ProcessBatch(); } }
/// <summary> /// Capture Screenshot asynchronously for a given source camera /// </summary> /// <param name="sourceCamera">Source camera for which the screen capture is to be performed</param> /// <param name="renderTextureFormat">Render Texture format for the screen capture</param> /// <param name="path">Path where the image is to be saved</param> /// <param name="format">Image format in which the file is to be saved. Default is set to RAW</param> public void ScreenCaptureAsync <T>(Camera sourceCamera, GraphicsFormat renderTextureFormat, string path, CaptureImageEncoder.ImageFormat format = CaptureImageEncoder.ImageFormat.Raw) where T : struct { Debug.Assert((sourceCamera != null), "Source Camera cannot be null"); Debug.Assert(GraphicsUtilities.SupportsRenderTextureFormat(renderTextureFormat)); Func <AsyncRequest <CaptureCamera.CaptureState>, AsyncRequest <CaptureCamera.CaptureState> .Result> functor = (AsyncRequest <CaptureCamera.CaptureState> r) => { r.data.colorBuffer = CaptureImageEncoder.EncodeArray(r.data.colorBuffer as Array, sourceCamera.pixelWidth, sourceCamera.pixelHeight, GraphicsFormat.R8G8B8A8_UNorm, format); var result = Unity.Simulation.FileProducer.Write(GetPath(), r.data.colorBuffer as Array); return(result ? AsyncRequest <CaptureCamera.CaptureState> .Result.Completed : AsyncRequest <CaptureCamera.CaptureState> .Result.Error); }; CaptureCamera.Capture(sourceCamera, functor, forceFlip: ForceFlip.None); }
/// <summary> /// Main Capture entrypoint. /// </summary> /// <param name="camera"> The Camera to capture data from. </param> /// <param name="colorFunctor"> Completion functor for the color channel. </param> /// <param name="colorFormat"> The pixel format to capture in. </param> /// <param name="depthFunctor"> Completion functor for the depth channel. </param> /// <param name="depthFormat"> The pixel format to capture in. </param> /// <param name="motionVectorsFunctor"> Completion functor for the motion vectors channel. </param> /// <param name="motionFormat"> The pixel format to capture in. </param> /// <param name="flipY"> Whether or not to flip the image vertically. </param> /// <returns>AsyncRequest<CaptureState></returns> public static AsyncRequest <CaptureState> Capture ( Camera camera, Func <AsyncRequest <CaptureState>, AsyncRequest <CaptureState> .Result> colorFunctor = null, GraphicsFormat colorFormat = GraphicsFormat.R8G8B8A8_UNorm, Func <AsyncRequest <CaptureState>, AsyncRequest <CaptureState> .Result> depthFunctor = null, GraphicsFormat depthFormat = GraphicsFormat.R16_UNorm, Func <AsyncRequest <CaptureState>, AsyncRequest <CaptureState> .Result> motionVectorsFunctor = null, GraphicsFormat motionFormat = GraphicsFormat.R16_UNorm, bool flipY = false ) { #if UNITY_EDITOR Debug.Assert(camera != null, "Capture camera cannot be null."); Debug.Assert(colorFunctor != null || depthFunctor != null || motionVectorsFunctor != null, "Capture one functor must be valid."); if (colorFunctor != null) { Debug.Assert(GraphicsUtilities.SupportsRenderTextureFormat(colorFormat), "GraphicsFormat not supported"); } if (depthFunctor != null) { Debug.Assert((camera.depthTextureMode & (DepthTextureMode.Depth | DepthTextureMode.DepthNormals)) != 0, "Depth not specified for camera"); Debug.Assert(GraphicsUtilities.SupportsRenderTextureFormat(depthFormat), "GraphicsFormat not supported"); } if (motionVectorsFunctor != null) { Debug.Assert((camera.depthTextureMode & DepthTextureMode.MotionVectors) != 0, "Motion vectors not enabled in depthTextureMode"); Debug.Assert(SystemInfo.supportsMotionVectors, "Motion vectors are not supported"); Debug.Assert(GraphicsUtilities.SupportsRenderTextureFormat(motionFormat), "GraphicsFormat not supported"); } #endif // UNITY_EDITOR var req = Manager.Instance.CreateRequest <AsyncRequest <CaptureState> >(); SetupCaptureRequest(req, Channel.Color, camera, CameraEvent.AfterEverything, BuiltinRenderTextureType.CameraTarget, colorFormat, colorFunctor, flipY); SetupCaptureRequest(req, Channel.Depth, camera, CameraEvent.AfterDepthTexture, BuiltinRenderTextureType.Depth, depthFormat, depthFunctor, flipY); SetupCaptureRequest(req, Channel.Motion, camera, CameraEvent.BeforeImageEffects, BuiltinRenderTextureType.MotionVectors, motionFormat, motionVectorsFunctor, flipY); #if UNITY_2019_3_OR_NEWER SRPSupport?.QueueCameraRequest(camera, req); #endif return(req); }
static Material SelectDepthShaderVariant(GraphicsFormat format) { if (_depthCopyMaterials == null) { _depthCopyMaterials = new Material[4]; #if HDRP_ENABLED if (SRPSupport.GetCurrentPipelineRenderingType() == RenderingPipelineType.HDRP) { _depthCopyMaterials[0] = new Material(Shader.Find("usim/BlitCopyDepthHDRP")); _depthCopyMaterials[0].EnableKeyword("HDRP_ENABLED"); } else #endif // HDRP_ENABLED { for (var i = 0; i < _depthCopyMaterials.Length; ++i) { _depthCopyMaterials[i] = new Material(Shader.Find("usim/BlitCopyDepth")); _depthCopyMaterials[i].EnableKeyword($"CHANNELS{i + 1}"); } ; } } #if HDRP_ENABLED if (SRPSupport.GetCurrentPipelineRenderingType() == RenderingPipelineType.HDRP) { return(_depthCopyMaterials[0]); } else #endif // HDRP_ENABLED { var componentCount = GraphicsUtilities.GetComponentCount(format); Debug.Assert(componentCount >= 1 && componentCount <= 4); return(_depthCopyMaterials[componentCount - 1]); } }
/// <summary> /// Perform the readback from the provided Render texture using ReadPixels API. /// </summary> /// <param name="renderTexture">Input source Render texture for the readback.</param> /// <returns>Returns a byte array of the RGB data retrieved from the readback.</returns> /// <exception cref="NotSupportedException"></exception> public static byte[] GetPixelsSlow(RenderTexture renderTexture) { var graphicsFormat = GraphicsFormatUtility.GetGraphicsFormat(renderTexture.format, false); var pixelSize = GraphicsUtilities.GetBlockSize(graphicsFormat); var channels = GraphicsFormatUtility.GetComponentCount(graphicsFormat); var channelSize = pixelSize / channels; var rect = new Rect(0, 0, renderTexture.width, renderTexture.height); // for RGB(A) we can just return the raw data. if (channels >= 3 && channels <= 4) { var texture = new Texture2D(renderTexture.width, renderTexture.height); RenderTexture.active = renderTexture; texture.ReadPixels(rect, 0, 0); RenderTexture.active = null; var data = texture.GetRawTextureData(); UnityEngine.Object.Destroy(texture); return(data); } else { Debug.Assert(channels == 1, "Can only handle a single channel RT."); // Read pixels must be one of RGBA32, ARGB32, RGB24, RGBAFloat or RGBAHalf. // So R16 and RFloat will be converted to RGBAFloat. var texture = new Texture2D(renderTexture.width, renderTexture.height, TextureFormat.RGBAFloat, false); RenderTexture.active = renderTexture; texture.ReadPixels(rect, 0, 0); RenderTexture.active = null; var length = renderTexture.width * renderTexture.height; var input = ArrayUtilities.Cast <float>(texture.GetRawTextureData()); UnityEngine.Object.Destroy(texture); Array output = null; int index = 0; switch (channelSize) { case 2: short[] shorts = ArrayUtilities.Allocate <short>(length); var si = 0; var numerator = (1 << 16) - 1; while (index < length) { shorts[index++] = (short)(numerator * input[si]); si += 4; } output = shorts; break; case 4: float[] floats = ArrayUtilities.Allocate <float>(length); var fi = 0; while (index < length) { floats[index++] = input[fi]; fi += 4; } output = floats; break; default: throw new NotSupportedException(); } return(ArrayUtilities.Cast <byte>(output)); } }
static void SetupCaptureRequest ( AsyncRequest <CaptureState> req, Channel channel, Camera camera, CameraEvent cameraEvent, BuiltinRenderTextureType source, GraphicsFormat format, Func <AsyncRequest <CaptureState>, AsyncRequest <CaptureState> .Result> functor, bool flipY ) { if (functor != null) { // declared for possible capture, to avoid use from other threads. var cameraTargetTexture = camera.targetTexture; RenderTexture target1 = null; RenderTexture target2 = null; Action ReleaseTargets = () => { if (target1 != null && target1 != cameraTargetTexture) { RenderTexture.ReleaseTemporary(target1); target1 = null; } if (target2 != null) { Debug.Assert(target2 != cameraTargetTexture); RenderTexture.ReleaseTemporary(target2); target2 = null; } }; Material depthMaterial = null; if (source == BuiltinRenderTextureType.Depth) { depthMaterial = SelectDepthShaderVariant(format); } #if UNITY_2019_3_OR_NEWER if (scriptableRenderPipeline) { if (CaptureOptions.useBatchReadback) { QueueForAsyncBatchReadback(req, channel, functor, SetupRenderTargets(ref target1, ref target2, camera, null, format, cameraTargetTexture, depthMaterial, flipY)); } else { req.data.SetFunctor(channel, (AsyncRequest <CaptureState> r) => { var target = SetupRenderTargets(ref target1, ref target2, camera, null, format, cameraTargetTexture, depthMaterial, flipY); if (GraphicsUtilities.SupportsAsyncReadback()) { AsyncGPUReadback.Request(target, 0, (AsyncGPUReadbackRequest request) => { ReleaseTargets(); if (request.hasError) { req.error = true; } else { if (functor != null) { req.data.SetBuffer(channel, request.GetData <byte>().ToArray()); req.Enqueue(functor); req.Execute(); } } }); } else { r.data.SetBuffer(channel, GraphicsUtilities.GetPixelsSlow(target)); ReleaseTargets(); req.Enqueue(functor); req.Execute(); } return(AsyncRequest.Result.None); }); } } else #endif // UNITY_2019_3_OR_NEWER { req.data.SetFunctor(channel, functor); CommandBuffer commandBuffer = GetCommandBufferForCamera(cameraEvent, camera); commandBuffer.name = $"CaptureCamera.{channel.ToString()}"; var target = SetupRenderTargets(ref target1, ref target2, camera, commandBuffer, format, cameraTargetTexture, depthMaterial, flipY); if (GraphicsUtilities.SupportsAsyncReadback()) { #if UNITY_2019_3_OR_NEWER if (CaptureOptions.useBatchReadback) { QueueForAsyncBatchReadback(req, channel, functor, target); ReleaseTargets(); } else #endif { commandBuffer.RequestAsyncReadback(target, (AsyncGPUReadbackRequest request) => { commandBuffer.Clear(); if (request.hasError) { req.error = true; } else { functor = req.data.SetFunctor(channel, null); if (functor != null) { req.data.SetBuffer(channel, request.GetData <byte>().ToArray()); req.Enqueue(functor); req.Execute(); } } ReleaseTargets(); }); } } else { Func <AsyncRequest <CaptureState>, AsyncRequest <CaptureState> .Result> wrapper; #if UNITY_2019_3_OR_NEWER if (CaptureOptions.useBatchReadback) { wrapper = (AsyncRequest <CaptureState> r) => { BatchReadback.Instance().QueueReadback(target, data => { r.data.SetBuffer(channel, data); ReleaseTargets(); r.Enqueue(functor); r.Execute(); return(AsyncRequest.Result.Completed); }); return(AsyncRequest.Result.Completed); }; } else #endif // UNITY_2019_3_OR_NEWER { wrapper = (AsyncRequest <CaptureState> r) => { r.data.SetBuffer(channel, GraphicsUtilities.GetPixelsSlow(target)); ReleaseTargets(); r.Enqueue(functor); r.Execute(); return(AsyncRequest.Result.Completed); }; } req.Enqueue(wrapper); req.Execute(AsyncRequest.ExecutionContext.EndOfFrame); } } } }