void RenderMainLightCascadeShadowmap(ref ScriptableRenderContext context, ref CullingResults cullResults, ref LightData lightData, ref ShadowData shadowData) { int shadowLightIndex = lightData.mainLightIndex; if (shadowLightIndex == -1) { return; } VisibleLight shadowLight = lightData.visibleLights[shadowLightIndex]; // NOTE: Do NOT mix ProfilingScope with named CommandBuffers i.e. CommandBufferPool.Get("name"). // Currently there's an issue which results in mismatched markers. CommandBuffer cmd = CommandBufferPool.Get(); using (new ProfilingScope(cmd, ProfilingSampler.Get(URPProfileId.MainLightShadow))) { var settings = new ShadowDrawingSettings(cullResults, shadowLightIndex); for (int cascadeIndex = 0; cascadeIndex < m_ShadowCasterCascadesCount; ++cascadeIndex) { //settings.splitData = m_CascadeSlices[cascadeIndex].splitData; // NOTE: currently DrawShadows culls more casters if no ShadowSplitData.cullingPlanes are set (version cds 8652678b), so it is currently better to not pass the m_CascadeSlices[cascadeIndex].splitData object returned by CullingResults.ComputeDirectionalShadowMatricesAndCullingPrimitives (change introduced in 8bf71cf). Culling is only based on the ShadowSplitData.cullingSphere distances. var splitData = settings.splitData; splitData.cullingSphere = m_CascadeSplitDistances[cascadeIndex]; settings.splitData = splitData; Vector4 shadowBias = ShadowUtils.GetShadowBias(ref shadowLight, shadowLightIndex, ref shadowData, m_CascadeSlices[cascadeIndex].projectionMatrix, m_CascadeSlices[cascadeIndex].resolution); ShadowUtils.SetupShadowCasterConstantBuffer(cmd, ref shadowLight, shadowBias); CoreUtils.SetKeyword(cmd, ShaderKeywordStrings.CastingPunctualLightShadow, false); ShadowUtils.RenderShadowSlice(cmd, ref context, ref m_CascadeSlices[cascadeIndex], ref settings, m_CascadeSlices[cascadeIndex].projectionMatrix, m_CascadeSlices[cascadeIndex].viewMatrix); } bool softShadows = shadowLight.light.shadows == LightShadows.Soft && shadowData.supportsSoftShadows; CoreUtils.SetKeyword(cmd, ShaderKeywordStrings.MainLightShadows, shadowData.mainLightShadowCascadesCount == 1); CoreUtils.SetKeyword(cmd, ShaderKeywordStrings.MainLightShadowCascades, shadowData.mainLightShadowCascadesCount > 1); CoreUtils.SetKeyword(cmd, ShaderKeywordStrings.SoftShadows, softShadows); SetupMainLightShadowReceiverConstants(cmd, shadowLight, shadowData.supportsSoftShadows); } context.ExecuteCommandBuffer(cmd); CommandBufferPool.Release(cmd); }
void RaytracingRecursiveRender(HDCamera hdCamera, CommandBuffer cmd, ScriptableRenderContext renderContext, CullingResults cull) { // If ray tracing is disabled in the frame settings or the effect is not enabled RecursiveRendering recursiveSettings = hdCamera.volumeStack.GetComponent <RecursiveRendering>(); if (!hdCamera.frameSettings.IsEnabled(FrameSettingsField.RayTracing) || !recursiveSettings.enable.value) { return; } // Recursive rendering works as follow: // - Shader have a _RayTracing property // When this property is setup to true, a RayTracingPrepass pass on the material is enabled (otherwise it is disabled) // - Before prepass we render all object with a RayTracingPrepass pass enabled into the depth buffer for performance saving. // Note that we will exclude from the rendering of DepthPrepass, GBuffer and Forward pass the raytraced objects but not from // motion vector pass, so we can still benefit from motion vector. This is handled in VertMesh.hlsl (see below). // However currently when rendering motion vector this will tag the stencil for deferred lighting, and thus could produce overshading. // - After Transparent Depth pass we render all object with a RayTracingPrepass pass enabled into output a mask buffer (need to depth test but not to write depth) // Note: we render two times: one to save performance and the other to write the mask, otherwise if we write the mask in the first pass it // will not take into account the objects which could render on top of the raytracing one (If we want to do that we need to perform the pass after that // the depth buffer is ready, which is after the Gbuffer pass, so we can't save performance). // - During RaytracingRecursiveRender we perform a RayTracingRendering.raytrace call on all pixel tag in the mask // It is require to exclude mesh from regular pass to save performance (for opaque) and get correct result (for transparent) // For this we cull the mesh by setuping their position to NaN if _RayTracing is true and _EnableRecursiveRayTracing true. // We use this method to avoid to have to deal with RenderQueue and it allow to dynamically disabled Recursive rendering // and fallback to classic rasterize transparent this way. The code for the culling is in VertMesh() // If raytracing is disable _EnableRecursiveRayTracing is set to false and no culling happen. // Objects are still render in shadow and motion vector pass to keep their properties. // We render Recursive render object before transparent, so transparent object can be overlayed on top // like lens flare on top of headlight. We write the depth, so it correctly z-test object behind as recursive rendering // re-render everything (Mean we should also support fog and sky into it). using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.RayTracingRecursiveRendering))) { RTHandle debugBuffer = GetRayTracingBuffer(InternalRayTracingBuffers.RGBA0); RecursiveRendererParameters rrParams = PrepareRecursiveRendererParameters(hdCamera, recursiveSettings); RecursiveRendererResources rrResources = PrepareRecursiveRendererResources(debugBuffer); ExecuteRecursiveRendering(cmd, rrParams, rrResources); PushFullScreenDebugTexture(hdCamera, cmd, debugBuffer, FullScreenDebugMode.RecursiveRayTracing); } }
public override void PreRenderSky(BuiltinSkyParameters builtinParams) { var hdriSky = builtinParams.skySettings as HDRISky; float intensity, phi, backplatePhi; GetParameters(out intensity, out phi, out backplatePhi, builtinParams, hdriSky); using (new ProfilingScope(builtinParams.commandBuffer, ProfilingSampler.Get(HDProfileId.PreRenderSky))) { m_SkyHDRIMaterial.SetTexture(HDShaderIDs._Cubemap, hdriSky.hdriSky.value); m_SkyHDRIMaterial.SetVector(HDShaderIDs._SkyParam, new Vector4(intensity, 0.0f, Mathf.Cos(phi), Mathf.Sin(phi))); m_SkyHDRIMaterial.SetVector(HDShaderIDs._BackplateParameters0, GetBackplateParameters0(hdriSky)); m_PropertyBlock.SetMatrix(HDShaderIDs._PixelCoordToViewDirWS, builtinParams.pixelCoordToViewDirMatrix); CoreUtils.DrawFullScreen(builtinParams.commandBuffer, m_SkyHDRIMaterial, m_PropertyBlock, m_RenderDepthOnlyFullscreenSkyWithBackplateID); } }
void FilterCubemapCommon(CommandBuffer cmd, Texture source, RenderTexture target, Matrix4x4[] worldToViewMatrices) { using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.FilterCubemapCharlie))) { int mipCount = 1 + (int)Mathf.Log(source.width, 2.0f); if (mipCount < (int)EnvConstants.ConvolutionMipCount) { Debug.LogWarning("RenderCubemapCharlieConvolution: Cubemap size is too small for Charlie convolution, needs at least " + (int)EnvConstants.ConvolutionMipCount + " mip levels"); return; } // Solid angle associated with a texel of the cubemap. float invOmegaP = (6.0f * source.width * source.width) / (4.0f * Mathf.PI); // Copy the first mip for (int f = 0; f < 6; f++) { cmd.CopyTexture(source, f, 0, target, f, 0); } var props = new MaterialPropertyBlock(); props.SetTexture("_MainTex", source); props.SetFloat("_InvOmegaP", invOmegaP); for (int mip = 0; mip < (int)EnvConstants.ConvolutionMipCount; ++mip) { props.SetFloat("_Level", mip); for (int face = 0; face < 6; ++face) { var faceSize = new Vector4(source.width >> mip, source.height >> mip, 1.0f / (source.width >> mip), 1.0f / (source.height >> mip)); var transform = HDUtils.ComputePixelCoordToWorldSpaceViewDirectionMatrix(0.5f * Mathf.PI, Vector2.zero, faceSize, worldToViewMatrices[face], true); props.SetMatrix(HDShaderIDs._PixelCoordToViewDirWS, transform); CoreUtils.SetRenderTarget(cmd, target, ClearFlag.None, mip, (CubemapFace)face); CoreUtils.DrawFullScreen(cmd, m_convolveMaterial, props); } } } }
void RenderIndirectDiffuseQuality(HDCamera hdCamera, CommandBuffer cmd, ScriptableRenderContext renderContext, int frameCount) { // First thing to check is: Do we have a valid ray-tracing environment? GlobalIllumination giSettings = hdCamera.volumeStack.GetComponent <GlobalIllumination>(); // Evaluate the signal QualityRTIndirectDiffuseParameters qrtidParameters = PrepareQualityRTIndirectDiffuseParameters(hdCamera, giSettings); QualityRTIndirectDiffuseResources qrtidResources = PrepareQualityRTIndirectDiffuseResources(m_IndirectDiffuseBuffer0); RenderQualityRayTracedIndirectDiffuse(cmd, qrtidParameters, qrtidResources); using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.RaytracingFilterIndirectDiffuse))) { if (giSettings.denoise) { DenoiseIndirectDiffuseBuffer(hdCamera, cmd, giSettings); } } }
void RenderGizmo(Camera RenderCamera, GizmoSubset gizmoSubset) { #if UNITY_EDITOR if (Handles.ShouldRenderGizmos()) { // Add GizmosPass GraphBuilder.AddPass <GizmosPassData>("Gizmos", ProfilingSampler.Get(CustomSamplerId.Gizmos), (ref GizmosPassData PassData, ref RDGPassBuilder PassBuilder) => { PassData.RenderCamera = RenderCamera; PassData.GizmoSubset = gizmoSubset; }, (ref GizmosPassData PassData, RDGContext GraphContext) => { GraphContext.RenderContext.DrawGizmos(PassData.RenderCamera, PassData.GizmoSubset); }); } #endif }
/// <inheritdoc/> public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData) { if (m_SamplingMaterial == null) { Debug.LogErrorFormat("Missing {0}. {1} render pass will not execute. Check for missing reference in the renderer resources.", m_SamplingMaterial, GetType().Name); return; } CommandBuffer cmd = CommandBufferPool.Get(); using (new ProfilingScope(cmd, ProfilingSampler.Get(URPProfileId.CopyColor))) { RenderTargetIdentifier opaqueColorRT = destination.Identifier(); ScriptableRenderer.SetRenderTarget(cmd, opaqueColorRT, BuiltinRenderTextureType.CameraTarget, clearFlag, clearColor); bool useDrawProceduleBlit = renderingData.cameraData.xr.enabled; switch (m_DownsamplingMethod) { case Downsampling.None: RenderingUtils.Blit(cmd, source, opaqueColorRT, m_CopyColorMaterial, 0, useDrawProceduleBlit); break; case Downsampling._2xBilinear: RenderingUtils.Blit(cmd, source, opaqueColorRT, m_CopyColorMaterial, 0, useDrawProceduleBlit); break; case Downsampling._4xBox: m_SamplingMaterial.SetFloat(m_SampleOffsetShaderHandle, 2); RenderingUtils.Blit(cmd, source, opaqueColorRT, m_SamplingMaterial, 0, useDrawProceduleBlit); break; case Downsampling._4xBilinear: RenderingUtils.Blit(cmd, source, opaqueColorRT, m_CopyColorMaterial, 0, useDrawProceduleBlit); break; } } context.ExecuteCommandBuffer(cmd); CommandBufferPool.Release(cmd); }
void RenderIndirectDiffusePerformance(HDCamera hdCamera, CommandBuffer cmd, ScriptableRenderContext renderContext, int frameCount) { // Fetch the required resources var settings = hdCamera.volumeStack.GetComponent <GlobalIllumination>(); // Request the intermediate texture we will be using RTHandle directionBuffer = GetRayTracingBuffer(InternalRayTracingBuffers.Direction); RTHandle intermediateBuffer1 = GetRayTracingBuffer(InternalRayTracingBuffers.RGBA1); using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.RaytracingIndirectDiffuseDirectionGeneration))) { // Prepare the components for the direction generation RTIndirectDiffuseDirGenParameters rtidDirGenParameters = PrepareRTIndirectDiffuseDirGenParameters(hdCamera, settings); RTIndirectDiffuseDirGenResources rtidDirGenResousources = PrepareRTIndirectDiffuseDirGenResources(hdCamera, directionBuffer); RTIndirectDiffuseDirGen(cmd, rtidDirGenParameters, rtidDirGenResousources); } using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.RaytracingIndirectDiffuseEvaluation))) { // Prepare the components for the deferred lighting DeferredLightingRTParameters deferredParamters = PrepareIndirectDiffuseDeferredLightingRTParameters(hdCamera); DeferredLightingRTResources deferredResources = PrepareDeferredLightingRTResources(hdCamera, directionBuffer, intermediateBuffer1); RenderRaytracingDeferredLighting(cmd, deferredParamters, deferredResources); } using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.RaytracingIndirectDiffuseUpscale))) { // Upscale the indirect diffuse buffer RTIndirectDiffuseUpscaleParameters rtidUpscaleParameters = PrepareRTIndirectDiffuseUpscaleParameters(hdCamera, settings); RTIndirectDiffuseUpscaleResources rtidUpscaleResources = PrepareRTIndirectDiffuseUpscaleResources(hdCamera, intermediateBuffer1, directionBuffer, m_IndirectDiffuseBuffer0); RTIndirectDiffuseUpscale(cmd, rtidUpscaleParameters, rtidUpscaleResources); } using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.RaytracingFilterIndirectDiffuse))) { // Denoise if required if (settings.denoise) { DenoiseIndirectDiffuseBuffer(hdCamera, cmd, settings); } } }
void RenderMainLightCascadeShadowmap(ref ScriptableRenderContext context, ref CullingResults cullResults, ref LightData lightData, ref ShadowData shadowData) { int shadowLightIndex = lightData.mainLightIndex; if (shadowLightIndex == -1) { return; } VisibleLight shadowLight = lightData.visibleLights[shadowLightIndex]; // NOTE: Do NOT mix ProfilingScope with named CommandBuffers i.e. CommandBufferPool.Get("name"). // Currently there's an issue which results in mismatched markers. CommandBuffer cmd = CommandBufferPool.Get(); using (new ProfilingScope(cmd, ProfilingSampler.Get(URPProfileId.MainLightShadow))) { var settings = new ShadowDrawingSettings(cullResults, shadowLightIndex); settings.useRenderingLayerMaskTest = UniversalRenderPipeline.asset.supportsLightLayers; for (int cascadeIndex = 0; cascadeIndex < m_ShadowCasterCascadesCount; ++cascadeIndex) { settings.splitData = m_CascadeSlices[cascadeIndex].splitData; Vector4 shadowBias = ShadowUtils.GetShadowBias(ref shadowLight, shadowLightIndex, ref shadowData, m_CascadeSlices[cascadeIndex].projectionMatrix, m_CascadeSlices[cascadeIndex].resolution); ShadowUtils.SetupShadowCasterConstantBuffer(cmd, ref shadowLight, shadowBias); CoreUtils.SetKeyword(cmd, ShaderKeywordStrings.CastingPunctualLightShadow, false); ShadowUtils.RenderShadowSlice(cmd, ref context, ref m_CascadeSlices[cascadeIndex], ref settings, m_CascadeSlices[cascadeIndex].projectionMatrix, m_CascadeSlices[cascadeIndex].viewMatrix); } bool softShadows = shadowLight.light.shadows == LightShadows.Soft && shadowData.supportsSoftShadows; CoreUtils.SetKeyword(cmd, ShaderKeywordStrings.MainLightShadows, shadowData.mainLightShadowCascadesCount == 1); CoreUtils.SetKeyword(cmd, ShaderKeywordStrings.MainLightShadowCascades, shadowData.mainLightShadowCascadesCount > 1); CoreUtils.SetKeyword(cmd, ShaderKeywordStrings.SoftShadows, softShadows); SetupMainLightShadowReceiverConstants(cmd, shadowLight, shadowData.supportsSoftShadows); } context.ExecuteCommandBuffer(cmd); CommandBufferPool.Release(cmd); }
/// <inheritdoc/> public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData) { // NOTE: Do NOT mix ProfilingScope with named CommandBuffers i.e. CommandBufferPool.Get("name"). // Currently there's an issue which results in mismatched markers. CommandBuffer cmd = CommandBufferPool.Get(); using (new ProfilingScope(cmd, ProfilingSampler.Get(URPProfileId.DepthNormalPrepass))) { context.ExecuteCommandBuffer(cmd); cmd.Clear(); var sortFlags = renderingData.cameraData.defaultOpaqueSortFlags; var drawSettings = CreateDrawingSettings(this.shaderTagId, ref renderingData, sortFlags); drawSettings.perObjectData = PerObjectData.None; ref CameraData cameraData = ref renderingData.cameraData; Camera camera = cameraData.camera; context.DrawRenderers(renderingData.cullResults, ref drawSettings, ref m_FilteringSettings); }
// Recursive rendering works as follow: // - Shader have a _RayTracing property // When this property is setup to true, a RayTracingPrepass pass on the material is enabled (otherwise it is disabled) // - Before prepass we render all object with a RayTracingPrepass pass enabled into the depth buffer for performance saving. // Note that we will exclude from the rendering of DepthPrepass, GBuffer and Forward pass the raytraced objects but not from // motion vector pass, so we can still benefit from motion vector. This is handled in VertMesh.hlsl (see below). // However currently when rendering motion vector this will tag the stencil for deferred lighting, and thus could produce overshading. // - After Transparent Depth pass we render all object with a RayTracingPrepass pass enabled into output a mask buffer (need to depth test but not to write depth) // Note: we render two times: one to save performance and the other to write the mask, otherwise if we write the mask in the first pass it // will not take into account the objects which could render on top of the raytracing one (If we want to do that we need to perform the pass after that // the depth buffer is ready, which is after the Gbuffer pass, so we can't save performance). // - During RaytracingRecursiveRender we perform a RayTracingRendering.raytrace call on all pixel tag in the mask // It is require to exclude mesh from regular pass to save performance (for opaque) and get correct result (for transparent) // For this we cull the mesh by setuping their position to NaN if _RayTracing is true and _EnableRecursiveRayTracing true. // We use this method to avoid to have to deal with RenderQueue and it allow to dynamically disabled Recursive rendering // and fallback to classic rasterize transparent this way. The code for the culling is in VertMesh() // If raytracing is disable _EnableRecursiveRayTracing is set to false and no culling happen. // Objects are still render in shadow and motion vector pass to keep their properties. // We render Recursive render object before transparent, so transparent object can be overlayed on top // like lens flare on top of headlight. We write the depth, so it correctly z-test object behind as recursive rendering // re-render everything (Mean we should also support fog and sky into it). void RaytracingRecursiveRender(HDCamera hdCamera, CommandBuffer cmd) { // If ray tracing is disabled in the frame settings or the effect is not enabled RecursiveRendering recursiveSettings = hdCamera.volumeStack.GetComponent <RecursiveRendering>(); if (!hdCamera.frameSettings.IsEnabled(FrameSettingsField.RayTracing) || !recursiveSettings.enable.value) { return; } using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.RayTracingRecursiveRendering))) { RTHandle debugBuffer = GetRayTracingBuffer(InternalRayTracingBuffers.RGBA0); RecursiveRendererParameters rrParams = PrepareRecursiveRendererParameters(hdCamera, recursiveSettings); RecursiveRendererResources rrResources = PrepareRecursiveRendererResources(debugBuffer); ExecuteRecursiveRendering(cmd, rrParams, rrResources); PushFullScreenDebugTexture(hdCamera, cmd, debugBuffer, FullScreenDebugMode.RecursiveRayTracing); } }
TextureHandle RenderSubsurfaceScatteringRT(RenderGraph renderGraph, HDCamera hdCamera, TextureHandle depthStencilBuffer, TextureHandle normalBuffer, TextureHandle colorBuffer, TextureHandle sssColor, TextureHandle diffuseBuffer, TextureHandle motionVectorsBuffer, TextureHandle historyValidationTexture, TextureHandle ssgiBuffer) { using (new RenderGraphProfilingScope(renderGraph, ProfilingSampler.Get(HDProfileId.RaytracingSSS))) { // Trace the signal TextureHandle rtsssResult = TraceRTSSS(renderGraph, hdCamera, depthStencilBuffer, normalBuffer, sssColor, ssgiBuffer, colorBuffer); // Denoise the result rtsssResult = DenoiseRTSSS(renderGraph, hdCamera, rtsssResult, depthStencilBuffer, normalBuffer, motionVectorsBuffer, historyValidationTexture); // Compose it rtsssResult = CombineRTSSS(renderGraph, hdCamera, rtsssResult, depthStencilBuffer, sssColor, ssgiBuffer, diffuseBuffer, colorBuffer); // Push this version of the texture for debug PushFullScreenDebugTexture(renderGraph, rtsssResult, FullScreenDebugMode.RayTracedSubSurface); // Return the result return(rtsssResult); } }
void BlitOctahedralTexturePadding(CommandBuffer cmd, Vector4 scaleOffset, Texture texture, Vector4 sourceScaleOffset, bool blitMips = true) { int mipCount = GetTextureMipmapCount(texture.width, texture.height); int pixelPadding = GetTexturePadding(); Vector2 textureSize = GetPowerOfTwoTextureSize(texture); bool bilinear = texture.filterMode != FilterMode.Point; if (!blitMips) { mipCount = 1; } using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.BlitTextureInPotAtlas))) { for (int mipLevel = 0; mipLevel < mipCount; mipLevel++) { cmd.SetRenderTarget(m_AtlasTexture, mipLevel); HDUtils.BlitOctahedralWithPadding(cmd, texture, textureSize, sourceScaleOffset, scaleOffset, mipLevel, bilinear, pixelPadding); } } }
bool RenderLightScreenSpaceShadows(HDCamera hdCamera, CommandBuffer cmd) { using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.RaytracingLightShadow))) { using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.RaytracingLightShadow))) { // Loop through all the potential screen space light shadows for (int lightIdx = 0; lightIdx < m_ScreenSpaceShadowIndex; ++lightIdx) { // This matches the directional light if (!m_CurrentScreenSpaceShadowData[lightIdx].valid) { continue; } // Fetch the light data and additional light data LightData currentLight = m_lightList.lights[m_CurrentScreenSpaceShadowData[lightIdx].lightDataIndex]; HDAdditionalLightData currentAdditionalLightData = m_CurrentScreenSpaceShadowData[lightIdx].additionalLightData; // Trigger the right algorithm based on the light type switch (currentLight.lightType) { case GPULightType.Rectangle: { RenderAreaScreenSpaceShadow(cmd, hdCamera, currentLight, currentAdditionalLightData, m_CurrentScreenSpaceShadowData[lightIdx].lightDataIndex); } break; case GPULightType.Point: case GPULightType.Spot: { RenderPunctualScreenSpaceShadow(cmd, hdCamera, currentLight, currentAdditionalLightData, m_CurrentScreenSpaceShadowData[lightIdx].lightDataIndex); } break; } } } return(true); } }
void RenderOpaqueMotion(Camera RenderCamera, CullingResults CullingData) { RenderCamera.depthTextureMode |= DepthTextureMode.MotionVectors | DepthTextureMode.Depth; //Request Resource RendererList RenderList = RendererList.Create(CreateRendererListDesc(CullingData, RenderCamera, InfinityPassIDs.OpaqueMotion)); RDGTextureRef DepthTexture = GraphBuilder.ScopeTexture(InfinityShaderIDs.RT_DepthBuffer); RDGTextureDesc MotionDesc = new RDGTextureDesc(RenderCamera.pixelWidth, RenderCamera.pixelHeight) { clearBuffer = true, dimension = TextureDimension.Tex2D, clearColor = Color.clear, enableMSAA = false, bindTextureMS = false, name = "MotionBufferTexture", colorFormat = GraphicsFormat.R16G16_SFloat }; RDGTextureRef MotionTexture = GraphBuilder.CreateTexture(MotionDesc, InfinityShaderIDs.RT_MotionBuffer); GraphBuilder.ScopeTexture(InfinityShaderIDs.RT_MotionBuffer, MotionTexture); //Add RenderPass GraphBuilder.AddRenderPass <FOpaqueMotionData>("OpaqueMotion", ProfilingSampler.Get(CustomSamplerId.OpaqueMotion), (ref FOpaqueMotionData PassData, ref RDGPassBuilder PassBuilder) => { PassData.RendererList = RenderList; PassData.MotionBuffer = PassBuilder.UseColorBuffer(MotionTexture, 0); PassData.DepthBuffer = PassBuilder.UseDepthBuffer(DepthTexture, EDepthAccess.Read); }, (ref FOpaqueMotionData PassData, RDGContext GraphContext) => { RendererList MotionRenderList = PassData.RendererList; MotionRenderList.drawSettings.sortingSettings = new SortingSettings(RenderCamera) { criteria = SortingCriteria.CommonOpaque }; MotionRenderList.drawSettings.perObjectData = PerObjectData.MotionVectors; MotionRenderList.drawSettings.enableInstancing = RenderPipelineAsset.EnableInstanceBatch; MotionRenderList.drawSettings.enableDynamicBatching = RenderPipelineAsset.EnableDynamicBatch; MotionRenderList.filteringSettings.renderQueueRange = RenderQueueRange.opaque; MotionRenderList.filteringSettings.excludeMotionVectorObjects = false; GraphContext.RenderContext.DrawRenderers(MotionRenderList.cullingResult, ref MotionRenderList.drawSettings, ref MotionRenderList.filteringSettings); }); }
void RenderIndirectDiffuseQuality(HDCamera hdCamera, CommandBuffer cmd, ScriptableRenderContext renderContext, int frameCount) { // First thing to check is: Do we have a valid ray-tracing environment? GlobalIllumination giSettings = hdCamera.volumeStack.GetComponent <GlobalIllumination>(); LightCluster lightClusterSettings = hdCamera.volumeStack.GetComponent <LightCluster>(); RayTracingSettings rtSettings = hdCamera.volumeStack.GetComponent <RayTracingSettings>(); // Shaders that are used RayTracingShader indirectDiffuseRT = m_Asset.renderPipelineRayTracingResources.indirectDiffuseRaytracingRT; // Request the intermediate texture we will be using RTHandle intermediateBuffer1 = GetRayTracingBuffer(InternalRayTracingBuffers.RGBA1); // Bind all the parameters for ray tracing BindRayTracedIndirectDiffuseData(cmd, hdCamera, indirectDiffuseRT, giSettings, lightClusterSettings, rtSettings, m_IndirectDiffuseBuffer, intermediateBuffer1); // Compute the actual resolution that is needed base on the quality int widthResolution = hdCamera.actualWidth; int heightResolution = hdCamera.actualHeight; // Only use the shader variant that has multi bounce if the bounce count > 1 CoreUtils.SetKeyword(cmd, "MULTI_BOUNCE_INDIRECT", giSettings.bounceCount.value > 1); // Run the computation CoreUtils.SetKeyword(cmd, "DIFFUSE_LIGHTING_ONLY", true); cmd.DispatchRays(indirectDiffuseRT, m_RayGenIndirectDiffuseIntegrationName, (uint)widthResolution, (uint)heightResolution, (uint)hdCamera.viewCount); // Disable the keywords we do not need anymore CoreUtils.SetKeyword(cmd, "DIFFUSE_LIGHTING_ONLY", false); CoreUtils.SetKeyword(cmd, "MULTI_BOUNCE_INDIRECT", false); using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.RaytracingFilterIndirectDiffuse))) { if (giSettings.denoise.value) { DenoiseIndirectDiffuseBuffer(hdCamera, cmd, giSettings); } } }
public int FetchSlice(CommandBuffer cmd, Texture texture) { bool needUpdate; var sliceIndex = m_TextureCache.ReserveSlice(texture, out needUpdate); if (sliceIndex != -1) { if (needUpdate || m_ProbeBakingState[sliceIndex] != ProbeFilteringState.Ready) { using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.ConvolveReflectionProbe))) { // For now baking is done directly but will be time sliced in the future. Just preparing the code here. m_ProbeBakingState[sliceIndex] = ProbeFilteringState.Convolving; Texture[] result = ConvolveProbeTexture(cmd, texture); if (result == null) { return(-1); } if (m_PerformBC6HCompression) { cmd.BC6HEncodeFastCubemap( result[0], m_ProbeSize, m_TextureCache.GetTexCache(), 0, int.MaxValue, sliceIndex); m_TextureCache.SetSliceHash(sliceIndex, m_TextureCache.GetTextureHash(texture)); } else { m_TextureCache.UpdateSlice(cmd, sliceIndex, result, m_TextureCache.GetTextureHash(texture)); // Be careful to provide the update count from the input texture, not the temporary one used for convolving. } m_ProbeBakingState[sliceIndex] = ProbeFilteringState.Ready; } } } return(sliceIndex); }
public override void RenderInit(CommandBuffer cmd) { if (m_precomputedFGDTablesAreInit || m_preIntegratedFGDMaterial_Ward == null || m_preIntegratedFGDMaterial_CookTorrance == null) { return; } if (GL.wireframe) { m_preIntegratedFGD_Ward.Create(); m_preIntegratedFGD_CookTorrance.Create(); return; } using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.PreIntegradeWardCookTorrance))) { CoreUtils.DrawFullScreen(cmd, m_preIntegratedFGDMaterial_Ward, new RenderTargetIdentifier(m_preIntegratedFGD_Ward)); CoreUtils.DrawFullScreen(cmd, m_preIntegratedFGDMaterial_CookTorrance, new RenderTargetIdentifier(m_preIntegratedFGD_CookTorrance)); } m_precomputedFGDTablesAreInit = true; }
public void ResolveSharedRT(CommandBuffer cmd, HDCamera hdCamera) { if (hdCamera.frameSettings.IsEnabled(FrameSettingsField.MSAA)) { Debug.Assert(m_MSAASupported); using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.ResolveMSAADepth))) { // Grab the RTIs and set the output render targets m_RTIDs3[0] = m_CameraDepthValuesBuffer.nameID; m_RTIDs3[1] = m_NormalRT.nameID; m_RTIDs3[2] = m_MotionVectorsRT.nameID; CoreUtils.SetRenderTarget(cmd, m_RTIDs3, m_CameraDepthStencilBuffer); // Set the input textures Shader.SetGlobalTexture(HDShaderIDs._NormalTextureMS, m_NormalMSAART); Shader.SetGlobalTexture(HDShaderIDs._DepthTextureMS, m_DepthAsColorMSAART); Shader.SetGlobalTexture(HDShaderIDs._MotionVectorTextureMS, m_MotionVectorsMSAART); // Resolve the depth and normal buffers cmd.DrawProcedural(Matrix4x4.identity, m_DepthResolveMaterial, SampleCountToPassIndex(m_MSAASamples), MeshTopology.Triangles, 3, 1); } } }
void BuildLightCluster(HDCamera hdCamera, CommandBuffer cmd) { using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.RaytracingBuildCluster))) { // Grab the kernel ComputeShader lightClusterCS = m_RenderPipelineRayTracingResources.lightClusterBuildCS; int lightClusterKernel = lightClusterCS.FindKernel(m_LightClusterKernelName); // Inject all the parameters cmd.SetComputeBufferParam(lightClusterCS, lightClusterKernel, HDShaderIDs._RaytracingLightClusterRW, m_LightCluster); cmd.SetComputeVectorParam(lightClusterCS, _ClusterCellSize, clusterCellSize); cmd.SetComputeBufferParam(lightClusterCS, lightClusterKernel, _LightVolumes, m_LightVolumeGPUArray); cmd.SetComputeFloatParam(lightClusterCS, _LightVolumeCount, HDShadowUtils.Asfloat(totalLightCount)); cmd.SetComputeBufferParam(lightClusterCS, lightClusterKernel, _RaytracingLightCullResult, m_LightCullResult); // Dispatch a compute int numGroupsX = 8; int numGroupsY = 8; int numGroupsZ = 4; cmd.DispatchCompute(lightClusterCS, lightClusterKernel, numGroupsX, numGroupsY, numGroupsZ); } }
void RenderPresentView(Camera RenderCamera, RDGTextureRef SourceTexture, RenderTexture DestTexture) { // Add PresentPass GraphBuilder.AddPass <PresentViewData>("Present", ProfilingSampler.Get(CustomSamplerId.Present), (ref PresentViewData PassData, ref RDGPassBuilder PassBuilder) => { PassData.SrcBuffer = PassBuilder.ReadTexture(SourceTexture); PassData.DestBuffer = new RenderTargetIdentifier(DestTexture); }, (ref PresentViewData PassData, RDGContext GraphContext) => { RenderTexture SrcBuffer = PassData.SrcBuffer; float4 ScaleBias = new float4((float)RenderCamera.pixelWidth / (float)SrcBuffer.width, (float)RenderCamera.pixelHeight / (float)SrcBuffer.height, 0.0f, 0.0f); if (DestTexture == null) { ScaleBias.w = ScaleBias.y; ScaleBias.y *= -1; } GraphContext.CmdBuffer.SetGlobalVector(InfinityShaderIDs.ScaleBias, ScaleBias); GraphContext.CmdBuffer.DrawFullScreen(GraphicsUtility.GetViewport(RenderCamera), PassData.SrcBuffer, PassData.DestBuffer, 1); }); }
private void SortLightKeys() { using (new ProfilingScope(null, ProfilingSampler.Get(HDProfileId.SortVisibleLights))) { //Tunning against ps4 console, //32 items insertion sort has a workst case of 3 micro seconds. //200 non recursive merge sort has around 23 micro seconds. //From 200 and more, Radix sort beats everything. var sortSize = sortedLightCounts; if (sortSize <= 32) { CoreUnsafeUtils.InsertionSort(m_SortKeys, sortSize); } else if (m_Size <= 200) { CoreUnsafeUtils.MergeSort(m_SortKeys, sortSize, ref m_SortSupportArray); } else { CoreUnsafeUtils.RadixSort(m_SortKeys, sortSize, ref m_SortSupportArray); } } }
bool UpdatePlanarTexture(CommandBuffer cmd, Texture texture, ref IBLFilterBSDF.PlanarTextureFilteringParameters planarTextureFilteringParameters, ref Vector4 scaleOffset) { bool success = false; using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.ConvolvePlanarReflectionProbe))) { m_ProbeBakingState[scaleOffset] = ProbeFilteringState.Convolving; Vector4 sourceScaleOffset; Texture convolvedTexture = ConvolveProbeTexture(cmd, texture, ref planarTextureFilteringParameters, out sourceScaleOffset); if (convolvedTexture == null) { return(false); } if (m_TextureAtlas.IsCached(out scaleOffset, texture)) { success = m_TextureAtlas.UpdateTexture(cmd, texture, convolvedTexture, ref scaleOffset, sourceScaleOffset); } else { // Reserve space for the rendertarget and then blit the result of the convolution at this // location, we don't use the UpdateTexture because it will keep the reference to the // temporary target used to convolve the result of the probe rendering. if (!m_TextureAtlas.AllocateTextureWithoutBlit(texture, texture.width, texture.height, ref scaleOffset)) { return(false); } m_TextureAtlas.BlitTexture(cmd, scaleOffset, convolvedTexture, sourceScaleOffset); success = true; } m_ProbeBakingState[scaleOffset] = ProbeFilteringState.Ready; } return(success); }
/// <inheritdoc/> public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData) { if (m_ScreenSpaceShadowsMaterial == null) { Debug.LogErrorFormat("Missing {0}. {1} render pass will not execute. Check for missing reference in the renderer resources.", m_ScreenSpaceShadowsMaterial, GetType().Name); return; } if (renderingData.lightData.mainLightIndex == -1) { return; } Camera camera = renderingData.cameraData.camera; CommandBuffer cmd = CommandBufferPool.Get(); using (new ProfilingScope(cmd, ProfilingSampler.Get(URPProfileId.ResolveShadows))) { if (!renderingData.cameraData.xr.enabled) { cmd.SetViewProjectionMatrices(Matrix4x4.identity, Matrix4x4.identity); cmd.DrawMesh(RenderingUtils.fullscreenMesh, Matrix4x4.identity, m_ScreenSpaceShadowsMaterial); cmd.SetViewProjectionMatrices(camera.worldToCameraMatrix, camera.projectionMatrix); } else { // Avoid setting and restoring camera view and projection matrices when in stereo. RenderTargetIdentifier screenSpaceOcclusionTexture = m_ScreenSpaceShadowmap.Identifier(); Blit(cmd, screenSpaceOcclusionTexture, screenSpaceOcclusionTexture, m_ScreenSpaceShadowsMaterial); } } context.ExecuteCommandBuffer(cmd); CommandBufferPool.Release(cmd); }
void RenderReflectionsQuality(HDCamera hdCamera, CommandBuffer cmd, RTHandle outputTexture, ScriptableRenderContext renderContext, int frameCount, bool transparent) { // Request the buffers we shall be using RTHandle intermediateBuffer0 = GetRayTracingBuffer(InternalRayTracingBuffers.RGBA0); RTHandle intermediateBuffer1 = GetRayTracingBuffer(InternalRayTracingBuffers.RGBA1); var settings = hdCamera.volumeStack.GetComponent <ScreenSpaceReflection>(); LightCluster lightClusterSettings = hdCamera.volumeStack.GetComponent <LightCluster>(); using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.RaytracingReflectionEvaluation))) { // Render the signal RTRQualityRenderingParameters rtrQRenderingParameters = PrepareRTRQualityRenderingParameters(hdCamera, settings, transparent); RTRQualityRenderingResources rtrQRenderingResources = PrepareRTRQualityRenderingResources(hdCamera, outputTexture); // Bind all the required data for ray tracing RenderQualityRayTracedReflections(cmd, rtrQRenderingParameters, rtrQRenderingResources); } using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.RaytracingFilterReflection))) { if (settings.denoise && !transparent) { // Grab the history buffer RTHandle reflectionHistory = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.RaytracedReflection) ?? hdCamera.AllocHistoryFrameRT((int)HDCameraFrameHistoryType.RaytracedReflection, ReflectionHistoryBufferAllocatorFunction, 1); // Prepare the parameters and the resources HDReflectionDenoiser reflectionDenoiser = GetReflectionDenoiser(); ReflectionDenoiserParameters reflDenoiserParameters = reflectionDenoiser.PrepareReflectionDenoiserParameters(hdCamera, EvaluateHistoryValidity(hdCamera), settings.denoiserRadius); ReflectionDenoiserResources reflectionDenoiserResources = reflectionDenoiser.PrepareReflectionDenoiserResources(hdCamera, outputTexture, reflectionHistory, intermediateBuffer0, intermediateBuffer1); HDReflectionDenoiser.DenoiseBuffer(cmd, reflDenoiserParameters, reflectionDenoiserResources); } } }
void RenderDirectionalLightScreenSpaceShadow(CommandBuffer cmd, HDCamera hdCamera) { // Should we be executing anything really? bool screenSpaceShadowRequired = m_CurrentSunLightAdditionalLightData != null && m_CurrentSunLightAdditionalLightData.WillRenderScreenSpaceShadow(); // Render directional screen space shadow if required if (screenSpaceShadowRequired) { using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.RaytracingDirectionalLightShadow))) { bool rayTracedDirectionalRequired = m_CurrentSunLightAdditionalLightData.WillRenderRayTracedShadow(); // If the shadow is flagged as ray traced, we need to evaluate it completely if (rayTracedDirectionalRequired) { RenderRayTracedDirectionalScreenSpaceShadow(cmd, hdCamera); } else { SSShadowDirectionalParameters sssdParams = PrepareSSShadowDirectionalParameters(); ExecuteSSShadowDirectional(cmd, sssdParams, m_ScreenSpaceShadowTextureArray); } } } }
void BuildLightCluster(HDCamera hdCamera, CommandBuffer cmd) { using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.RaytracingBuildCluster))) { var lightClusterSettings = hdCamera.volumeStack.GetComponent <LightCluster>(); numLightsPerCell = lightClusterSettings.maxNumLightsPercell.value; // Make sure the Cluster buffer has the right size int bufferSize = 64 * 64 * 32 * (numLightsPerCell + 4); if (m_LightCluster.count != bufferSize) { ResizeClusterBuffer(bufferSize); } // Grab the kernel ComputeShader lightClusterCS = m_RenderPipelineRayTracingResources.lightClusterBuildCS; int lightClusterKernel = lightClusterCS.FindKernel(m_LightClusterKernelName); // Inject all the parameters cmd.SetComputeBufferParam(lightClusterCS, lightClusterKernel, HDShaderIDs._RaytracingLightCluster, m_LightCluster); cmd.SetComputeVectorParam(lightClusterCS, HDShaderIDs._MinClusterPos, minClusterPos); cmd.SetComputeVectorParam(lightClusterCS, HDShaderIDs._MaxClusterPos, maxClusterPos); cmd.SetComputeVectorParam(lightClusterCS, _ClusterCellSize, clusterCellSize); cmd.SetComputeFloatParam(lightClusterCS, HDShaderIDs._LightPerCellCount, HDShadowUtils.Asfloat(numLightsPerCell)); cmd.SetComputeBufferParam(lightClusterCS, lightClusterKernel, _LightVolumes, m_LightVolumeGPUArray); cmd.SetComputeFloatParam(lightClusterCS, _LightVolumeCount, HDShadowUtils.Asfloat(totalLightCount)); cmd.SetComputeBufferParam(lightClusterCS, lightClusterKernel, _RaytracingLightCullResult, m_LightCullResult); // Dispatch a compute int numGroupsX = 8; int numGroupsY = 8; int numGroupsZ = 4; cmd.DispatchCompute(lightClusterCS, lightClusterKernel, numGroupsX, numGroupsY, numGroupsZ); } }
TextureHandle RenderSSGI(RenderGraph renderGraph, HDCamera hdCamera, TextureHandle depthPyramid, TextureHandle normalBuffer, TextureHandle motionVectorsBuffer, ShaderVariablesRaytracing shaderVariablesRayTracingCB) { // Grab the global illumination volume component GlobalIllumination giSettings = hdCamera.volumeStack.GetComponent <GlobalIllumination>(); using (new RenderGraphProfilingScope(renderGraph, ProfilingSampler.Get(HDProfileId.SSGIPass))) { // Trace the signal TextureHandle colorBuffer = TraceSSGI(renderGraph, hdCamera, giSettings, depthPyramid, normalBuffer, motionVectorsBuffer); // Denoise the signal float historyValidity = EvaluateHistoryValidity(hdCamera); SSGIDenoiser ssgiDenoiser = GetSSGIDenoiser(); ssgiDenoiser.Denoise(renderGraph, hdCamera, depthPyramid, normalBuffer, motionVectorsBuffer, colorBuffer, !giSettings.fullResolutionSS, historyValidity: historyValidity); // Upscale it if required // If this was a half resolution effect, we still have to upscale it if (!giSettings.fullResolutionSS) { colorBuffer = UpscaleSSGI(renderGraph, hdCamera, giSettings, depthPyramid, colorBuffer); } return(colorBuffer); } }
public void EvaluateClusterDebugView(RenderGraph renderGraph, HDCamera hdCamera, TextureHandle depthStencilBuffer, TextureHandle depthPyramid) { TextureHandle debugTexture; using (var builder = renderGraph.AddRenderPass <LightClusterDebugPassData>("Debug Texture for the Light Cluster", out var passData, ProfilingSampler.Get(HDProfileId.RaytracingDebugCluster))) { builder.EnableAsyncCompute(false); passData.parameters = PrepareLightClusterDebugParameters(hdCamera); passData.depthStencilBuffer = builder.UseDepthBuffer(depthStencilBuffer, DepthAccess.Read); passData.depthPyramid = builder.ReadTexture(depthStencilBuffer); passData.outputBuffer = builder.WriteTexture(renderGraph.CreateTexture(new TextureDesc(Vector2.one, true, true) { colorFormat = GraphicsFormat.R16G16B16A16_SFloat, enableRandomWrite = true, name = "Light Cluster Debug Texture" })); builder.SetRenderFunc( (LightClusterDebugPassData data, RenderGraphContext ctx) => { // We need to fill the structure that holds the various resources LightClusterDebugResources resources = new LightClusterDebugResources(); resources.depthStencilBuffer = data.depthStencilBuffer; resources.depthTexture = data.depthPyramid; resources.debugLightClusterTexture = data.outputBuffer; ExecuteLightClusterDebug(ctx.cmd, data.parameters, resources); }); debugTexture = passData.outputBuffer; } m_RenderPipeline.PushFullScreenDebugTexture(renderGraph, debugTexture, FullScreenDebugMode.LightCluster); }
public void BlitCachedIntoAtlas(RenderGraph renderGraph, HDCachedShadowAtlas cachedAtlas, Material blitMaterial, string passName, HDProfileId profileID) { if (m_MixedRequestsPendingBlits.Count > 0) { using (var builder = renderGraph.AddRenderPass <BlitCachedShadowPassData>(passName, out var passData, ProfilingSampler.Get(profileID))) { passData.requestsWaitingBlits = m_MixedRequestsPendingBlits; passData.blitMaterial = blitMaterial; passData.cachedShadowAtlasSize = new Vector2Int(cachedAtlas.width, cachedAtlas.height); passData.sourceCachedAtlas = builder.ReadTexture(cachedAtlas.GetOutputTexture(renderGraph)); passData.atlasTexture = builder.WriteTexture(GetOutputTexture(renderGraph)); builder.SetRenderFunc( (BlitCachedShadowPassData data, RenderGraphContext ctx) => { foreach (var request in data.requestsWaitingBlits) { var mpb = ctx.renderGraphPool.GetTempMaterialPropertyBlock(); ctx.cmd.SetRenderTarget(data.atlasTexture); ctx.cmd.SetViewport(request.dynamicAtlasViewport); Vector4 sourceScaleBias = new Vector4(request.cachedAtlasViewport.width / data.cachedShadowAtlasSize.x, request.cachedAtlasViewport.height / data.cachedShadowAtlasSize.y, request.cachedAtlasViewport.x / data.cachedShadowAtlasSize.x, request.cachedAtlasViewport.y / data.cachedShadowAtlasSize.y); mpb.SetTexture(HDShaderIDs._CachedShadowmapAtlas, data.sourceCachedAtlas); mpb.SetVector(HDShaderIDs._BlitScaleBias, sourceScaleBias); CoreUtils.DrawFullScreen(ctx.cmd, data.blitMaterial, mpb, 0); } data.requestsWaitingBlits.Clear(); }); } } }