Exemplo n.º 1
0
        void PushVolumetricLightingGlobalParams(HDCamera hdCamera, CommandBuffer cmd, int frameIndex)
        {
            if (!Fog.IsVolumetricLightingEnabled(hdCamera))
            {
                cmd.SetGlobalTexture(HDShaderIDs._VBufferLighting, HDUtils.clearTexture3D);
                return;
            }

            // Get the interpolated anisotropy value.
            var fog = VolumeManager.instance.stack.GetComponent <Fog>();

            SetPreconvolvedAmbientLightProbe(cmd, fog.globalLightProbeDimmer.value, fog.anisotropy.value);

            var currFrameParams = hdCamera.vBufferParams[0];
            var prevFrameParams = hdCamera.vBufferParams[1];

            // The lighting & density buffers are shared by all cameras.
            // The history & feedback buffers are specific to the camera.
            // These 2 types of buffers can have different sizes.
            // Additionally, history buffers can have different sizes, since they are not resized at the same time
            // (every frame, we swap the buffers, and resize the feedback buffer but not the history buffer).
            // The viewport size is the same for all of these buffers.
            // All of these buffers may have sub-native-resolution viewports.
            // The 3rd dimension (number of slices) is the same for all of these buffers.
            Vector2Int sharedBufferSize = new Vector2Int(m_LightingBufferHandle.rt.width, m_LightingBufferHandle.rt.height);

            Debug.Assert(m_LightingBufferHandle.rt.width == m_DensityBufferHandle.rt.width);
            Debug.Assert(m_LightingBufferHandle.rt.height == m_DensityBufferHandle.rt.height);

            Vector2Int historyBufferSize = Vector2Int.zero;

            if (hdCamera.IsVolumetricReprojectionEnabled())
            {
                var historyRT = hdCamera.GetPreviousFrameRT((int)HDCameraFrameHistoryType.VolumetricLighting);

                historyBufferSize = new Vector2Int(historyRT.rt.width, historyRT.rt.height);
            }

            var cvp = currFrameParams.viewportSize;
            var pvp = prevFrameParams.viewportSize;

            // Adjust slices for XR rendering: VBuffer is shared for all single-pass views
            int sliceCount = cvp.z / hdCamera.viewCount;

            cmd.SetGlobalVector(HDShaderIDs._VBufferViewportSize, new Vector4(cvp.x, cvp.y, 1.0f / cvp.x, 1.0f / cvp.y));
            cmd.SetGlobalInt(HDShaderIDs._VBufferSliceCount, sliceCount);
            cmd.SetGlobalFloat(HDShaderIDs._VBufferRcpSliceCount, 1.0f / sliceCount);
            cmd.SetGlobalVector(HDShaderIDs._VBufferSharedUvScaleAndLimit, currFrameParams.ComputeUvScaleAndLimit(sharedBufferSize));
            cmd.SetGlobalVector(HDShaderIDs._VBufferDistanceEncodingParams, currFrameParams.depthEncodingParams);
            cmd.SetGlobalVector(HDShaderIDs._VBufferDistanceDecodingParams, currFrameParams.depthDecodingParams);
            cmd.SetGlobalFloat(HDShaderIDs._VBufferLastSliceDist, currFrameParams.ComputeLastSliceDistance(sliceCount));
            cmd.SetGlobalFloat(HDShaderIDs._VBufferRcpInstancedViewCount, 1.0f / hdCamera.viewCount);

            cmd.SetGlobalVector(HDShaderIDs._VBufferPrevViewportSize, new Vector4(pvp.x, pvp.y, 1.0f / pvp.x, 1.0f / pvp.y));
            cmd.SetGlobalVector(HDShaderIDs._VBufferHistoryPrevUvScaleAndLimit, prevFrameParams.ComputeUvScaleAndLimit(historyBufferSize));
            cmd.SetGlobalVector(HDShaderIDs._VBufferPrevDepthEncodingParams, prevFrameParams.depthEncodingParams);
            cmd.SetGlobalVector(HDShaderIDs._VBufferPrevDepthDecodingParams, prevFrameParams.depthDecodingParams);

            cmd.SetGlobalTexture(HDShaderIDs._VBufferLighting, m_LightingBufferHandle);
        }
Exemplo n.º 2
0
        public TextureHandle Render(RenderGraph renderGraph, HDCamera hdCamera, TextureHandle depthPyramid, TextureHandle motionVectors, int frameCount)
        {
            var settings = hdCamera.volumeStack.GetComponent <AmbientOcclusion>();

            TextureHandle result;

            // AO has side effects (as it uses an imported history buffer)
            // So we can't rely on automatic pass stripping. This is why we have to be explicit here.
            if (IsActive(hdCamera, settings))
            {
                {
                    EnsureRTSize(settings, hdCamera);

                    var historyRT      = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.AmbientOcclusion);
                    var currentHistory = renderGraph.ImportTexture(historyRT);
                    var outputHistory  = renderGraph.ImportTexture(hdCamera.GetPreviousFrameRT((int)HDCameraFrameHistoryType.AmbientOcclusion));

                    Vector2 historySize = new Vector2(historyRT.referenceSize.x * historyRT.scaleFactor.x,
                                                      historyRT.referenceSize.y * historyRT.scaleFactor.y);
                    var rtScaleForHistory = hdCamera.historyRTHandleProperties.rtHandleScale;

                    var aoParameters = PrepareRenderAOParameters(hdCamera, renderGraph.rtHandleProperties, historySize * rtScaleForHistory, frameCount);

                    var packedData = RenderAO(renderGraph, aoParameters, depthPyramid);
                    result = DenoiseAO(renderGraph, aoParameters, motionVectors, packedData, currentHistory, outputHistory);
                }
            }
            else
            {
                result = renderGraph.ImportTexture(TextureXR.GetBlackTexture(), HDShaderIDs._AmbientOcclusionTexture);
            }
            return(result);
        }
        public RenderGraphResource Render(RenderGraph renderGraph, HDCamera hdCamera, RenderGraphResource depthPyramid, RenderGraphResource motionVectors, int frameCount)
        {
            var settings = VolumeManager.instance.stack.GetComponent <AmbientOcclusion>();

            RenderGraphResource result;

            // AO has side effects (as it uses an imported history buffer)
            // So we can't rely on automatic pass stripping. This is why we have to be explicit here.
            if (IsActive(hdCamera, settings))
            {
                {
                    EnsureRTSize(settings, hdCamera);

                    var aoParameters = PrepareRenderAOParameters(hdCamera, renderGraph.rtHandleProperties, frameCount);

                    var currentHistory = renderGraph.ImportTexture(hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.AmbientOcclusion));
                    var outputHistory  = renderGraph.ImportTexture(hdCamera.GetPreviousFrameRT((int)HDCameraFrameHistoryType.AmbientOcclusion));

                    var packedData = RenderAO(renderGraph, aoParameters, depthPyramid);
                    result = DenoiseAO(renderGraph, aoParameters, motionVectors, packedData, currentHistory, outputHistory);
                }
            }
            else
            {
                result = renderGraph.ImportTexture(TextureXR.GetBlackTexture(), HDShaderIDs._AmbientOcclusionTexture);
            }
            return(result);
        }
        TraceOutput TraceSSGI(RenderGraph renderGraph, HDCamera hdCamera, GlobalIllumination giSettings, TextureHandle depthPyramid, TextureHandle normalBuffer, TextureHandle motionVectorsBuffer)
        {
            using (var builder = renderGraph.AddRenderPass <TraceSSGIPassData>("Trace SSGI", out var passData, ProfilingSampler.Get(HDProfileId.SSGITrace)))
            {
                builder.EnableAsyncCompute(false);

                passData.parameters   = PrepareSSGITraceParameters(hdCamera, giSettings);
                passData.depthTexture = builder.ReadTexture(depthPyramid);
                passData.normalBuffer = builder.ReadTexture(normalBuffer);
                if (!hdCamera.frameSettings.IsEnabled(FrameSettingsField.ObjectMotionVectors))
                {
                    passData.motionVectorsBuffer = builder.ReadTexture(renderGraph.defaultResources.blackTextureXR);
                }
                else
                {
                    passData.motionVectorsBuffer = builder.ReadTexture(motionVectorsBuffer);
                }

                var colorPyramid = hdCamera.GetPreviousFrameRT((int)HDCameraFrameHistoryType.ColorBufferMipChain);
                passData.colorPyramid = colorPyramid != null?builder.ReadTexture(renderGraph.ImportTexture(colorPyramid)) : renderGraph.defaultResources.blackTextureXR;

                var historyDepth = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.Depth);
                passData.historyDepth = historyDepth != null?builder.ReadTexture(renderGraph.ImportTexture(historyDepth)) : renderGraph.defaultResources.blackTextureXR;

                passData.hitPointBuffer = builder.CreateTransientTexture(new TextureDesc(Vector2.one, true, true)
                {
                    colorFormat = GraphicsFormat.R16G16B16A16_SFloat, enableRandomWrite = true, name = "SSGI Hit Point"
                });
                passData.outputBuffer0 = builder.WriteTexture(renderGraph.CreateTexture(new TextureDesc(Vector2.one, true, true)
                {
                    colorFormat = GraphicsFormat.R16G16B16A16_SFloat, enableRandomWrite = true, name = "SSGI Signal0"
                }));
                passData.outputBuffer1 = builder.WriteTexture(renderGraph.CreateTexture(new TextureDesc(Vector2.one, true, true)
                {
                    colorFormat = GraphicsFormat.R16G16B16A16_SFloat, enableRandomWrite = true, name = "SSGI Signal1"
                }));

                builder.SetRenderFunc(
                    (TraceSSGIPassData data, RenderGraphContext ctx) =>
                {
                    // We need to fill the structure that holds the various resources
                    SSGITraceResources resources  = new SSGITraceResources();
                    resources.depthTexture        = data.depthTexture;
                    resources.normalBuffer        = data.normalBuffer;
                    resources.motionVectorsBuffer = data.motionVectorsBuffer;
                    resources.colorPyramid        = data.colorPyramid;
                    resources.historyDepth        = data.historyDepth;
                    resources.hitPointBuffer      = data.hitPointBuffer;
                    resources.outputBuffer0       = data.outputBuffer0;
                    resources.outputBuffer1       = data.outputBuffer1;
                    ExecuteSSGITrace(ctx.cmd, data.parameters, resources);
                });
                TraceOutput traceOutput = new TraceOutput();
                traceOutput.outputBuffer0 = passData.outputBuffer0;
                traceOutput.outputBuffer1 = passData.outputBuffer1;
                return(traceOutput);
            }
        }
Exemplo n.º 5
0
        SSGITraceResources PrepareSSGITraceResources(HDCamera hdCamera, RTHandle outputBuffer, RTHandle hitPointBuffer)
        {
            SSGITraceResources ssgiTraceResources = new SSGITraceResources();

            // Input buffers
            ssgiTraceResources.depthTexture        = m_SharedRTManager.GetDepthTexture();
            ssgiTraceResources.normalBuffer        = m_SharedRTManager.GetNormalBuffer();
            ssgiTraceResources.motionVectorsBuffer = m_SharedRTManager.GetMotionVectorsBuffer();
            var previousColorPyramid = hdCamera.GetPreviousFrameRT((int)HDCameraFrameHistoryType.ColorBufferMipChain);

            ssgiTraceResources.colorPyramid = previousColorPyramid != null ? previousColorPyramid : TextureXR.GetBlackTexture();
            var historyDepthBuffer = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.Depth);

            ssgiTraceResources.historyDepth = historyDepthBuffer != null ? historyDepthBuffer : TextureXR.GetBlackTexture();

            // Output buffers
            ssgiTraceResources.hitPointBuffer = hitPointBuffer;

            // Output buffers
            ssgiTraceResources.outputBuffer = outputBuffer;

            return(ssgiTraceResources);
        }
 static RTHandle RequestPreviousVolumetricCloudsHistoryTexture1(HDCamera hdCamera)
 {
     return(hdCamera.GetPreviousFrameRT((int)HDCameraFrameHistoryType.VolumetricClouds1)
            ?? hdCamera.AllocHistoryFrameRT((int)HDCameraFrameHistoryType.VolumetricClouds1,
                                            VolumetricClouds1HistoryBufferAllocatorFunction, 2));
 }
Exemplo n.º 7
0
        TraceOutput TraceSSGI(RenderGraph renderGraph, HDCamera hdCamera, GlobalIllumination giSettings, TextureHandle depthPyramid, TextureHandle normalBuffer, TextureHandle stencilBuffer, TextureHandle motionVectorsBuffer)
        {
            using (var builder = renderGraph.AddRenderPass <TraceSSGIPassData>("Trace SSGI", out var passData, ProfilingSampler.Get(HDProfileId.SSGITrace)))
            {
                builder.EnableAsyncCompute(false);

                //if (true)
                {
                    passData.texWidth  = hdCamera.actualWidth;
                    passData.texHeight = hdCamera.actualHeight;
                    passData.halfScreenSize.Set(passData.texWidth * 0.5f, passData.texHeight * 0.5f, 2.0f / passData.texWidth, 2.0f / passData.texHeight);
                }

                /*
                 * else
                 * {
                 *  passData.texWidth = hdCamera.actualWidth / 2;
                 *  passData.texHeight = hdCamera.actualHeight / 2;
                 *  passData.halfScreenSize.Set(passData.texWidth, passData.texHeight, 1.0f / passData.texWidth, 1.0f / passData.texHeight);
                 * }
                 */
                passData.viewCount = hdCamera.viewCount;

                // Set the generation parameters
                passData.nearClipPlane    = hdCamera.camera.nearClipPlane;
                passData.farClipPlane     = hdCamera.camera.farClipPlane;
                passData.fullResolutionSS = true;
                passData.thickness        = giSettings.depthBufferThickness.value;
                passData.raySteps         = giSettings.maxRaySteps;
                passData.frameIndex       = RayTracingFrameIndex(hdCamera, 16);
                passData.colorPyramidUvScaleAndLimitPrevFrame = HDUtils.ComputeViewportScaleAndLimit(hdCamera.historyRTHandleProperties.previousViewportSize, hdCamera.historyRTHandleProperties.previousRenderTargetSize);

                // Grab the right kernel
                passData.ssGICS        = asset.renderPipelineResources.shaders.screenSpaceGlobalIlluminationCS;
                passData.traceKernel   = true ? m_TraceGlobalIlluminationKernel : m_TraceGlobalIlluminationHalfKernel;
                passData.projectKernel = true ? m_ReprojectGlobalIlluminationKernel : m_ReprojectGlobalIlluminationHalfKernel;

                BlueNoise blueNoise = GetBlueNoiseManager();
                passData.ditheredTextureSet          = blueNoise.DitheredTextureSet8SPP();
                passData.shaderVariablesRayTracingCB = m_ShaderVariablesRayTracingCB;
                passData.offsetBuffer = m_DepthBufferMipChainInfo.GetOffsetBufferData(m_DepthPyramidMipLevelOffsetsBuffer);

                passData.depthTexture  = builder.ReadTexture(depthPyramid);
                passData.normalBuffer  = builder.ReadTexture(normalBuffer);
                passData.stencilBuffer = builder.ReadTexture(stencilBuffer);

                if (!hdCamera.frameSettings.IsEnabled(FrameSettingsField.ObjectMotionVectors))
                {
                    passData.motionVectorsBuffer = builder.ReadTexture(renderGraph.defaultResources.blackTextureXR);
                }
                else
                {
                    passData.motionVectorsBuffer = builder.ReadTexture(motionVectorsBuffer);
                }

                // History buffers
                var colorPyramid = hdCamera.GetPreviousFrameRT((int)HDCameraFrameHistoryType.ColorBufferMipChain);
                passData.colorPyramid = colorPyramid != null?builder.ReadTexture(renderGraph.ImportTexture(colorPyramid)) : renderGraph.defaultResources.blackTextureXR;

                var historyDepth = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.Depth);
                passData.historyDepth = historyDepth != null?builder.ReadTexture(renderGraph.ImportTexture(historyDepth)) : renderGraph.defaultResources.blackTextureXR;

                // Temporary textures
                passData.hitPointBuffer = builder.CreateTransientTexture(new TextureDesc(Vector2.one, true, true)
                {
                    colorFormat = GraphicsFormat.R16G16_SFloat, enableRandomWrite = true, name = "SSGI Hit Point"
                });

                // Output textures
                passData.outputBuffer0 = builder.WriteTexture(renderGraph.CreateTexture(new TextureDesc(Vector2.one, true, true)
                {
                    colorFormat = GraphicsFormat.B10G11R11_UFloatPack32, enableRandomWrite = true, name = "SSGI Color"
                }));
                passData.outputBuffer1 = builder.WriteTexture(renderGraph.CreateTexture(new TextureDesc(Vector2.one, true, true)
                {
                    colorFormat = GraphicsFormat.R16G16_SFloat, enableRandomWrite = true, name = "SSGI Additional Data"
                }));

                builder.SetRenderFunc(
                    (TraceSSGIPassData data, RenderGraphContext ctx) =>
                {
                    int ssgiTileSize = 8;
                    int numTilesXHR  = (data.texWidth + (ssgiTileSize - 1)) / ssgiTileSize;
                    int numTilesYHR  = (data.texHeight + (ssgiTileSize - 1)) / ssgiTileSize;

                    // Inject all the input scalars
                    float n = data.nearClipPlane;
                    float f = data.farClipPlane;
                    float thicknessScale = 1.0f / (1.0f + data.thickness);
                    float thicknessBias  = -n / (f - n) * (data.thickness * thicknessScale);
                    ctx.cmd.SetComputeFloatParam(data.ssGICS, HDShaderIDs._RayMarchingThicknessScale, thicknessScale);
                    ctx.cmd.SetComputeFloatParam(data.ssGICS, HDShaderIDs._RayMarchingThicknessBias, thicknessBias);
                    ctx.cmd.SetComputeIntParam(data.ssGICS, HDShaderIDs._RayMarchingSteps, data.raySteps);
                    ctx.cmd.SetComputeIntParam(data.ssGICS, HDShaderIDs._RayMarchingReflectSky, 1);
                    ctx.cmd.SetComputeIntParam(data.ssGICS, HDShaderIDs._IndirectDiffuseFrameIndex, data.frameIndex);
                    // Inject half screen size if required
                    if (!data.fullResolutionSS)
                    {
                        ctx.cmd.SetComputeVectorParam(data.ssGICS, HDShaderIDs._HalfScreenSize, data.halfScreenSize);
                    }

                    // Inject the ray-tracing sampling data
                    BlueNoise.BindDitheredTextureSet(ctx.cmd, data.ditheredTextureSet);

                    // Inject all the input textures/buffers
                    ctx.cmd.SetComputeTextureParam(data.ssGICS, data.traceKernel, HDShaderIDs._DepthTexture, data.depthTexture);
                    ctx.cmd.SetComputeTextureParam(data.ssGICS, data.traceKernel, HDShaderIDs._NormalBufferTexture, data.normalBuffer);
                    ctx.cmd.SetComputeTextureParam(data.ssGICS, data.traceKernel, HDShaderIDs._IndirectDiffuseHitPointTextureRW, data.hitPointBuffer);
                    ctx.cmd.SetComputeBufferParam(data.ssGICS, data.traceKernel, HDShaderIDs._DepthPyramidMipLevelOffsets, data.offsetBuffer);

                    // Do the ray marching
                    ctx.cmd.DispatchCompute(data.ssGICS, data.traceKernel, numTilesXHR, numTilesYHR, data.viewCount);

                    // Update global constant buffer.
                    // This should probably be a shader specific uniform instead of reusing the global constant buffer one since it's the only one updated here.
                    ConstantBuffer.PushGlobal(ctx.cmd, data.shaderVariablesRayTracingCB, HDShaderIDs._ShaderVariablesRaytracing);

                    // Inject all the input scalars
                    ctx.cmd.SetComputeVectorParam(data.ssGICS, HDShaderIDs._ColorPyramidUvScaleAndLimitPrevFrame, data.colorPyramidUvScaleAndLimitPrevFrame);
                    ctx.cmd.SetComputeIntParam(data.ssGICS, HDShaderIDs._ObjectMotionStencilBit, (int)StencilUsage.ObjectMotionVector);

                    // Bind all the input buffers
                    ctx.cmd.SetComputeTextureParam(data.ssGICS, data.projectKernel, HDShaderIDs._DepthTexture, data.depthTexture);
                    ctx.cmd.SetComputeTextureParam(data.ssGICS, data.projectKernel, HDShaderIDs._StencilTexture, data.stencilBuffer, 0, RenderTextureSubElement.Stencil);
                    ctx.cmd.SetComputeTextureParam(data.ssGICS, data.projectKernel, HDShaderIDs._NormalBufferTexture, data.normalBuffer);
                    ctx.cmd.SetComputeTextureParam(data.ssGICS, data.projectKernel, HDShaderIDs._CameraMotionVectorsTexture, data.motionVectorsBuffer);
                    ctx.cmd.SetComputeTextureParam(data.ssGICS, data.projectKernel, HDShaderIDs._IndirectDiffuseHitPointTexture, data.hitPointBuffer);
                    ctx.cmd.SetComputeTextureParam(data.ssGICS, data.projectKernel, HDShaderIDs._ColorPyramidTexture, data.colorPyramid);
                    ctx.cmd.SetComputeTextureParam(data.ssGICS, data.projectKernel, HDShaderIDs._HistoryDepthTexture, data.historyDepth);
                    ctx.cmd.SetComputeBufferParam(data.ssGICS, data.projectKernel, HDShaderIDs._DepthPyramidMipLevelOffsets, data.offsetBuffer);

                    // Bind the output texture
                    ctx.cmd.SetComputeTextureParam(data.ssGICS, data.projectKernel, HDShaderIDs._IndirectDiffuseTexture0RW, data.outputBuffer0);
                    ctx.cmd.SetComputeTextureParam(data.ssGICS, data.projectKernel, HDShaderIDs._IndirectDiffuseTexture1RW, data.outputBuffer1);

                    // Do the reprojection
                    ctx.cmd.DispatchCompute(data.ssGICS, data.projectKernel, numTilesXHR, numTilesYHR, data.viewCount);
                });

                TraceOutput traceOutput = new TraceOutput();
                traceOutput.outputBuffer0 = passData.outputBuffer0;
                traceOutput.outputBuffer1 = passData.outputBuffer1;
                return(traceOutput);
            }
        }
        void RenderSSGI(HDCamera hdCamera, CommandBuffer cmd, ScriptableRenderContext renderContext, int frameCount)
        {
            // Grab the global illumination volume component
            GlobalIllumination giSettings = hdCamera.volumeStack.GetComponent <GlobalIllumination>();

            // Grab the noise texture manager
            BlueNoise blueNoise = GetBlueNoiseManager();

            // Grab the shaders we shall be using
            ComputeShader ssGICS = m_Asset.renderPipelineResources.shaders.screenSpaceGlobalIlluminationCS;

            // Evaluate the dispatch parameters
            int texWidth, texHeight;

            if (giSettings.fullResolutionSS)
            {
                texWidth  = hdCamera.actualWidth;
                texHeight = hdCamera.actualHeight;
                halfScreenSize.Set(texWidth * 0.5f, texHeight * 0.5f, 2.0f / texWidth, 2.0f / texHeight);
            }
            else
            {
                texWidth  = hdCamera.actualWidth / 2;
                texHeight = hdCamera.actualHeight / 2;
                halfScreenSize.Set(texWidth, texHeight, 1.0f / texWidth, 1.0f / texHeight);
            }
            int areaTileSize = 8;
            int numTilesXHR  = (texWidth + (areaTileSize - 1)) / areaTileSize;
            int numTilesYHR  = (texHeight + (areaTileSize - 1)) / areaTileSize;

            // Based on if we are doing it in half resolution or full, we need to define initial and final buffer to avoid a useless blit
            RTHandle buffer0, buffer1;

            if (!giSettings.fullResolutionSS)
            {
                buffer0 = m_IndirectDiffuseBuffer0;
                buffer1 = m_IndirectDiffuseBuffer1;
            }
            else
            {
                buffer0 = m_IndirectDiffuseBuffer1;
                buffer1 = m_IndirectDiffuseBuffer0;
            }

            using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.SsgiPass)))
            {
                // Fetch the right tracing kernel
                int currentKernel = giSettings.fullResolutionSS ? m_TraceGlobalIlluminationKernel : m_TraceGlobalIlluminationHalfKernel;

                // Inject all the input scalars
                float n              = hdCamera.camera.nearClipPlane;
                float f              = hdCamera.camera.farClipPlane;
                float thickness      = giSettings.depthBufferThickness.value;
                float thicknessScale = 1.0f / (1.0f + thickness);
                float thicknessBias  = -n / (f - n) * (thickness * thicknessScale);
                cmd.SetComputeFloatParam(ssGICS, HDShaderIDs._IndirectDiffuseThicknessScale, thicknessScale);
                cmd.SetComputeFloatParam(ssGICS, HDShaderIDs._IndirectDiffuseThicknessBias, thicknessBias);
                cmd.SetComputeIntParam(ssGICS, HDShaderIDs._IndirectDiffuseSteps, giSettings.raySteps);
                cmd.SetComputeFloatParam(ssGICS, HDShaderIDs._IndirectDiffuseMaximalRadius, giSettings.maximalRadius);
                // Inject half screen size if required
                if (!giSettings.fullResolutionSS)
                {
                    cmd.SetComputeVectorParam(ssGICS, HDShaderIDs._HalfScreenSize, halfScreenSize);
                }

                // Inject the ray-tracing sampling data
                blueNoise.BindDitheredRNGData1SPP(cmd);

                // Inject all the input textures/buffers
                cmd.SetComputeTextureParam(ssGICS, currentKernel, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthTexture());
                cmd.SetComputeTextureParam(ssGICS, currentKernel, HDShaderIDs._NormalBufferTexture, m_SharedRTManager.GetNormalBuffer());
                cmd.SetComputeTextureParam(ssGICS, currentKernel, HDShaderIDs._IndirectDiffuseHitPointTextureRW, m_IndirectDiffuseHitPointBuffer);
                var info = m_SharedRTManager.GetDepthBufferMipChainInfo();
                cmd.SetComputeBufferParam(ssGICS, currentKernel, HDShaderIDs._DepthPyramidMipLevelOffsets, info.GetOffsetBufferData(m_DepthPyramidMipLevelOffsetsBuffer));
                cmd.SetGlobalBuffer(HDShaderIDs.g_vLightListGlobal, m_TileAndClusterData.lightList);

                // Do the ray marching
                cmd.DispatchCompute(ssGICS, currentKernel, numTilesXHR, numTilesYHR, hdCamera.viewCount);

                // Fetch the right kernel to use
                currentKernel = giSettings.fullResolutionSS ? m_ReprojectGlobalIlluminationKernel : m_ReprojectGlobalIlluminationHalfKernel;

                // Update global constant buffer.
                // This should probably be a shader specific uniform instead of reusing the global constant buffer one since it's the only one udpated here.
                m_ShaderVariablesRayTracingCB._RaytracingIntensityClamp = giSettings.clampValueSS;
                ConstantBuffer.PushGlobal(cmd, m_ShaderVariablesRayTracingCB, HDShaderIDs._ShaderVariablesRaytracing);

                // Inject all the input scalars
                cmd.SetComputeVectorParam(ssGICS, HDShaderIDs._ColorPyramidUvScaleAndLimitPrevFrame, HDUtils.ComputeViewportScaleAndLimit(hdCamera.historyRTHandleProperties.previousViewportSize, hdCamera.historyRTHandleProperties.previousRenderTargetSize));

                // Bind all the input buffers
                cmd.SetComputeTextureParam(ssGICS, currentKernel, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthStencilBuffer());
                cmd.SetComputeTextureParam(ssGICS, currentKernel, HDShaderIDs._NormalBufferTexture, m_SharedRTManager.GetNormalBuffer());
                cmd.SetComputeTextureParam(ssGICS, currentKernel, HDShaderIDs._IndirectDiffuseHitPointTexture, m_IndirectDiffuseHitPointBuffer);
                var previousColorPyramid = hdCamera.GetPreviousFrameRT((int)HDCameraFrameHistoryType.ColorBufferMipChain);
                cmd.SetComputeTextureParam(ssGICS, currentKernel, HDShaderIDs._ColorPyramidTexture, previousColorPyramid != null ? previousColorPyramid : TextureXR.GetBlackTexture());
                var historyDepthBuffer = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.Depth);
                cmd.SetComputeTextureParam(ssGICS, currentKernel, HDShaderIDs._HistoryDepthTexture, historyDepthBuffer != null ? historyDepthBuffer : TextureXR.GetBlackTexture());
                cmd.SetComputeBufferParam(ssGICS, currentKernel, HDShaderIDs._DepthPyramidMipLevelOffsets, info.GetOffsetBufferData(m_DepthPyramidMipLevelOffsetsBuffer));

                // Bind the output texture
                cmd.SetComputeTextureParam(ssGICS, currentKernel, HDShaderIDs._IndirectDiffuseTextureRW, buffer1);

                // Do the reprojection
                cmd.DispatchCompute(ssGICS, currentKernel, numTilesXHR, numTilesYHR, hdCamera.viewCount);

                float historyValidity = 1.0f;
#if UNITY_HDRP_DXR_TESTS_DEFINE
                if (Application.isPlaying)
                {
                    historyValidity = 0.0f;
                }
                else
#endif
                // We need to check if something invalidated the history buffers
                historyValidity *= ValidRayTracingHistory(hdCamera) ? 1.0f : 0.0f;

                // Do the denoising part
                SSGIDenoiser ssgiDenoiser = GetSSGIDenoiser();
                ssgiDenoiser.Denoise(cmd, hdCamera, buffer1, buffer0, halfResolution: !giSettings.fullResolutionSS, historyValidity: historyValidity);

                // If this was a half resolution effect, we still have to upscale it
                if (!giSettings.fullResolutionSS)
                {
                    ComputeShader bilateralUpsampleCS = m_Asset.renderPipelineResources.shaders.bilateralUpsampleCS;

                    // Re-evaluate the dispatch parameters (we are evaluating the upsample in full resolution)
                    numTilesXHR = (hdCamera.actualWidth + (areaTileSize - 1)) / areaTileSize;
                    numTilesYHR = (hdCamera.actualHeight + (areaTileSize - 1)) / areaTileSize;

                    // Inject the input scalars
                    cmd.SetComputeVectorParam(bilateralUpsampleCS, HDShaderIDs._HalfScreenSize, halfScreenSize);
                    firstMipOffset.Set(HDShadowUtils.Asfloat((uint)info.mipLevelOffsets[1].x), HDShadowUtils.Asfloat((uint)info.mipLevelOffsets[1].y));
                    cmd.SetComputeVectorParam(bilateralUpsampleCS, HDShaderIDs._DepthPyramidFirstMipLevelOffset, firstMipOffset);

                    // Inject all the input buffers
                    cmd.SetComputeTextureParam(bilateralUpsampleCS, m_BilateralUpSampleColorTMKernel, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthTexture());
                    cmd.SetComputeTextureParam(bilateralUpsampleCS, m_BilateralUpSampleColorTMKernel, HDShaderIDs._LowResolutionTexture, buffer1);
                    cmd.SetComputeBufferParam(bilateralUpsampleCS, m_BilateralUpSampleColorTMKernel, HDShaderIDs._DepthPyramidMipLevelOffsets, info.GetOffsetBufferData(m_DepthPyramidMipLevelOffsetsBuffer));

                    // Inject the output textures
                    cmd.SetComputeTextureParam(bilateralUpsampleCS, m_BilateralUpSampleColorTMKernel, HDShaderIDs._OutputUpscaledTexture, buffer0);

                    // Upscale the buffer to full resolution
                    cmd.DispatchCompute(bilateralUpsampleCS, m_BilateralUpSampleColorTMKernel, numTilesXHR, numTilesYHR, hdCamera.viewCount);
                }

                (RenderPipelineManager.currentPipeline as HDRenderPipeline).PushFullScreenDebugTexture(hdCamera, cmd, m_IndirectDiffuseBuffer0, FullScreenDebugMode.ScreenSpaceGlobalIllumination);
            }
        }