Beispiel #1
0
        public void Render(RenderGraph renderGraph,
                           HDCamera hdCamera,
                           BlueNoise blueNoise,
                           TextureHandle colorBuffer,
                           TextureHandle afterPostProcessTexture,
                           TextureHandle depthBuffer,
                           TextureHandle finalRT,
                           bool flipY)
        {
            var dynResHandler = DynamicResolutionHandler.instance;

            bool          isSceneView  = hdCamera.camera.cameraType == CameraType.SceneView;
            var           source       = colorBuffer;
            TextureHandle alphaTexture = renderGraph.defaultResources.whiteTextureXR;

            // Save the alpha and apply it back into the final pass if rendering in fp16 and post-processing in r11g11b10
            if (m_KeepAlpha)
            {
                using (var builder = renderGraph.AddRenderPass <AlphaCopyPassData>("Alpha Copy", out var passData, ProfilingSampler.Get(HDProfileId.AlphaCopy)))
                {
                    passData.parameters  = PrepareCopyAlphaParameters(hdCamera);
                    passData.source      = builder.ReadTexture(source);
                    passData.outputAlpha = builder.WriteTexture(renderGraph.CreateTexture(new TextureDesc(Vector2.one, true, true)
                    {
                        name = "Alpha Channel Copy", colorFormat = GraphicsFormat.R16_SFloat, enableRandomWrite = true
                    }));

                    builder.SetRenderFunc(
                        (AlphaCopyPassData data, RenderGraphContext ctx) =>
                    {
                        DoCopyAlpha(data.parameters,
                                    ctx.resources.GetTexture(data.source),
                                    ctx.resources.GetTexture(data.outputAlpha),
                                    ctx.cmd);
                    });

                    alphaTexture = passData.outputAlpha;
                }
            }

            // TODO RENDERGRAPH: Implement
            //            if (m_PostProcessEnabled)
            //            {
            //                // Guard bands (also known as "horrible hack") to avoid bleeding previous RTHandle
            //                // content into smaller viewports with some effects like Bloom that rely on bilinear
            //                // filtering and can't use clamp sampler and the likes
            //                // Note: some platforms can't clear a partial render target so we directly draw black triangles
            //                {
            //                    int w = camera.actualWidth;
            //                    int h = camera.actualHeight;
            //                    cmd.SetRenderTarget(source, 0, CubemapFace.Unknown, -1);

            //                    if (w < source.rt.width || h < source.rt.height)
            //                    {
            //                        cmd.SetViewport(new Rect(w, 0, k_RTGuardBandSize, h));
            //                        cmd.DrawProcedural(Matrix4x4.identity, m_ClearBlackMaterial, 0, MeshTopology.Triangles, 3, 1);
            //                        cmd.SetViewport(new Rect(0, h, w + k_RTGuardBandSize, k_RTGuardBandSize));
            //                        cmd.DrawProcedural(Matrix4x4.identity, m_ClearBlackMaterial, 0, MeshTopology.Triangles, 3, 1);
            //                    }
            //                }

            //                // Optional NaN killer before post-processing kicks in
            //                bool stopNaNs = camera.stopNaNs && m_StopNaNFS;

            //#if UNITY_EDITOR
            //                if (isSceneView)
            //                    stopNaNs = HDAdditionalSceneViewSettings.sceneViewStopNaNs;
            //#endif

            //                if (stopNaNs)
            //                {
            //                    using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.StopNaNs)))
            //                    {
            //                        var destination = m_Pool.Get(Vector2.one, m_ColorFormat);
            //                        DoStopNaNs(cmd, camera, source, destination);
            //                        PoolSource(ref source, destination);
            //                    }
            //                }
            //            }

            //            // Dynamic exposure - will be applied in the next frame
            //            // Not considered as a post-process so it's not affected by its enabled state
            //            if (!IsExposureFixed() && m_ExposureControlFS)
            //            {
            //                using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.DynamicExposure)))
            //                {
            //                    if (m_Exposure.mode.value == ExposureMode.AutomaticHistogram)
            //                    {
            //                        DoHistogramBasedExposure(cmd, camera, source);
            //                    }
            //                    else
            //                    {
            //                        DoDynamicExposure(cmd, camera, source);
            //                    }

            //                    // On reset history we need to apply dynamic exposure immediately to avoid
            //                    // white or black screen flashes when the current exposure isn't anywhere
            //                    // near 0
            //                    if (camera.resetPostProcessingHistory)
            //                    {
            //                        var destination = m_Pool.Get(Vector2.one, m_ColorFormat);

            //                        var cs = m_Resources.shaders.applyExposureCS;
            //                        int kernel = cs.FindKernel("KMain");

            //                        // Note: we call GetPrevious instead of GetCurrent because the textures
            //                        // are swapped internally as the system expects the texture will be used
            //                        // on the next frame. So the actual "current" for this frame is in
            //                        // "previous".
            //                        cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._ExposureTexture, GetPreviousExposureTexture(camera));
            //                        cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._InputTexture, source);
            //                        cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._OutputTexture, destination);
            //                        cmd.DispatchCompute(cs, kernel, (camera.actualWidth + 7) / 8, (camera.actualHeight + 7) / 8, camera.viewCount);

            //                        PoolSource(ref source, destination);
            //                    }
            //                }
            //            }

            if (m_PostProcessEnabled)
            {
                //                // Temporal anti-aliasing goes first
                //                bool taaEnabled = false;

                //                if (m_AntialiasingFS)
                //                {
                //                    taaEnabled = camera.antialiasing == AntialiasingMode.TemporalAntialiasing;

                //                    if (taaEnabled)
                //                    {
                //                        using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.TemporalAntialiasing)))
                //                        {
                //                            var destination = m_Pool.Get(Vector2.one, m_ColorFormat);
                //                            DoTemporalAntialiasing(cmd, camera, source, destination, depthBuffer, depthMipChain);
                //                            PoolSource(ref source, destination);
                //                        }
                //                    }
                //                    else if (camera.antialiasing == AntialiasingMode.SubpixelMorphologicalAntiAliasing)
                //                    {
                //                        using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.SMAA)))
                //                        {
                //                            var destination = m_Pool.Get(Vector2.one, m_ColorFormat);
                //                            DoSMAA(cmd, camera, source, destination, depthBuffer);
                //                            PoolSource(ref source, destination);
                //                        }
                //                    }
                //                }

                //                if (camera.frameSettings.IsEnabled(FrameSettingsField.CustomPostProcess))
                //                {
                //                    using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.CustomPostProcessBeforePP)))
                //                    {
                //                        foreach (var typeString in HDRenderPipeline.defaultAsset.beforePostProcessCustomPostProcesses)
                //                            RenderCustomPostProcess(cmd, camera, ref source, colorBuffer, Type.GetType(typeString));
                //                    }
                //                }

                //                // If Path tracing is enabled, then DoF is computed in the path tracer by sampling the lens aperure (when using the physical camera mode)
                //                bool isDoFPathTraced = (camera.frameSettings.IsEnabled(FrameSettingsField.RayTracing) &&
                //                     camera.volumeStack.GetComponent<PathTracing>().enable.value &&
                //                     camera.camera.cameraType != CameraType.Preview &&
                //                     m_DepthOfField.focusMode == DepthOfFieldMode.UsePhysicalCamera);

                //                // Depth of Field is done right after TAA as it's easier to just re-project the CoC
                //                // map rather than having to deal with all the implications of doing it before TAA
                //                if (m_DepthOfField.IsActive() && !isSceneView && m_DepthOfFieldFS && !isDoFPathTraced)
                //                {
                //                    using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.DepthOfField)))
                //                    {
                //                        var destination = m_Pool.Get(Vector2.one, m_ColorFormat);
                //                        DoDepthOfField(cmd, camera, source, destination, taaEnabled);
                //                        PoolSource(ref source, destination);
                //                    }
                //                }

                //                // Motion blur after depth of field for aesthetic reasons (better to see motion
                //                // blurred bokeh rather than out of focus motion blur)
                //                if (m_MotionBlur.IsActive() && m_AnimatedMaterialsEnabled && !camera.resetPostProcessingHistory && m_MotionBlurFS)
                //                {
                //                    using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.MotionBlur)))
                //                    {
                //                        var destination = m_Pool.Get(Vector2.one, m_ColorFormat);
                //                        DoMotionBlur(cmd, camera, source, destination);
                //                        PoolSource(ref source, destination);
                //                    }
                //                }

                //                // Panini projection is done as a fullscreen pass after all depth-based effects are
                //                // done and before bloom kicks in
                //                // This is one effect that would benefit from an overscan mode or supersampling in
                //                // HDRP to reduce the amount of resolution lost at the center of the screen
                //                if (m_PaniniProjection.IsActive() && !isSceneView && m_PaniniProjectionFS)
                //                {
                //                    using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.PaniniProjection)))
                //                    {
                //                        var destination = m_Pool.Get(Vector2.one, m_ColorFormat);
                //                        DoPaniniProjection(cmd, camera, source, destination);
                //                        PoolSource(ref source, destination);
                //                    }
                //                }

                // Uber post-process
                //// Generate the bloom texture
                //bool bloomActive = m_Bloom.IsActive() && m_BloomFS;

                //if (bloomActive)
                //{
                //    using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.Bloom)))
                //    {
                //        DoBloom(cmd, camera, source, uberPostParams.uberPostCS, uberPostParams.uberPostKernel);
                //    }
                //}
                //else
                //{
                //    cmd.SetComputeTextureParam(uberPostParams.uberPostCS, uberPostParams.uberPostKernel, HDShaderIDs._BloomTexture, TextureXR.GetBlackTexture());
                //    cmd.SetComputeTextureParam(uberPostParams.uberPostCS, uberPostParams.uberPostKernel, HDShaderIDs._BloomDirtTexture, Texture2D.blackTexture);
                //    cmd.SetComputeVectorParam(uberPostParams.uberPostCS, HDShaderIDs._BloomParams, Vector4.zero);
                //}

                TextureHandle logLutOutput;
                using (var builder = renderGraph.AddRenderPass <ColorGradingPassData>("Color Grading", out var passData, ProfilingSampler.Get(HDProfileId.ColorGradingLUTBuilder)))
                {
                    TextureHandle logLut = renderGraph.CreateTexture(new TextureDesc(m_LutSize, m_LutSize)
                    {
                        name              = "Color Grading Log Lut",
                        dimension         = TextureDimension.Tex3D,
                        slices            = m_LutSize,
                        depthBufferBits   = DepthBits.None,
                        colorFormat       = m_LutFormat,
                        filterMode        = FilterMode.Bilinear,
                        wrapMode          = TextureWrapMode.Clamp,
                        anisoLevel        = 0,
                        useMipMap         = false,
                        enableRandomWrite = true
                    });

                    passData.parameters = PrepareColorGradingParameters();
                    passData.logLut     = builder.WriteTexture(logLut);
                    logLutOutput        = passData.logLut;

                    builder.SetRenderFunc(
                        (ColorGradingPassData data, RenderGraphContext ctx) =>
                    {
                        DoColorGrading(data.parameters, ctx.resources.GetTexture(data.logLut), ctx.cmd);
                    });
                }

                using (var builder = renderGraph.AddRenderPass <UberPostPassData>("Uber Post", out var passData, ProfilingSampler.Get(HDProfileId.UberPost)))
                {
                    TextureHandle dest = renderGraph.CreateTexture(new TextureDesc(Vector2.one, true, true)
                    {
                        name              = "Uber Post Destination",
                        colorFormat       = m_ColorFormat,
                        useMipMap         = false,
                        enableRandomWrite = true
                    });

                    passData.parameters  = PrepareUberPostParameters(hdCamera, isSceneView);
                    passData.source      = builder.ReadTexture(source);
                    passData.logLut      = builder.ReadTexture(logLutOutput);
                    passData.destination = builder.WriteTexture(dest);

                    builder.SetRenderFunc(
                        (UberPostPassData data, RenderGraphContext ctx) =>
                    {
                        // Temp until bloom is implemented.
                        ctx.cmd.SetComputeTextureParam(data.parameters.uberPostCS, data.parameters.uberPostKernel, HDShaderIDs._BloomTexture, TextureXR.GetBlackTexture());
                        ctx.cmd.SetComputeTextureParam(data.parameters.uberPostCS, data.parameters.uberPostKernel, HDShaderIDs._BloomDirtTexture, Texture2D.blackTexture);
                        ctx.cmd.SetComputeVectorParam(data.parameters.uberPostCS, HDShaderIDs._BloomParams, Vector4.zero);


                        DoUberPostProcess(data.parameters,
                                          ctx.resources.GetTexture(data.source),
                                          ctx.resources.GetTexture(data.destination),
                                          ctx.resources.GetTexture(data.logLut),
                                          ctx.cmd);
                    });

                    source = passData.destination;
                }

                m_HDInstance.PushFullScreenDebugTexture(renderGraph, source, FullScreenDebugMode.ColorLog);

                //                if (camera.frameSettings.IsEnabled(FrameSettingsField.CustomPostProcess))
                //                {
                //                    using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.CustomPostProcessAfterPP)))
                //                    {
                //                        foreach (var typeString in HDRenderPipeline.defaultAsset.afterPostProcessCustomPostProcesses)
                //                            RenderCustomPostProcess(cmd, camera, ref source, colorBuffer, Type.GetType(typeString));
                //                    }
                //                }
            }

            //            if (dynResHandler.DynamicResolutionEnabled() &&     // Dynamic resolution is on.
            //                camera.antialiasing == AntialiasingMode.FastApproximateAntialiasing &&
            //                m_AntialiasingFS)
            //            {
            //                using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.FXAA)))
            //                {
            //                    var destination = m_Pool.Get(Vector2.one, m_ColorFormat);
            //                    DoFXAA(cmd, camera, source, destination);
            //                    PoolSource(ref source, destination);
            //                }
            //            }

            //            // Contrast Adaptive Sharpen Upscaling
            //            if (dynResHandler.DynamicResolutionEnabled() &&
            //                dynResHandler.filter == DynamicResUpscaleFilter.ContrastAdaptiveSharpen)
            //            {
            //                using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.ContrastAdaptiveSharpen)))
            //                {
            //                    var destination = m_Pool.Get(Vector2.one, m_ColorFormat);

            //                    var cs = m_Resources.shaders.contrastAdaptiveSharpenCS;
            //                    int kInit = cs.FindKernel("KInitialize");
            //                    int kMain = cs.FindKernel("KMain");
            //                    if (kInit >= 0 && kMain >= 0)
            //                    {
            //                        cmd.SetComputeFloatParam(cs, HDShaderIDs._Sharpness, 1);
            //                        cmd.SetComputeTextureParam(cs, kMain, HDShaderIDs._InputTexture, source);
            //                        cmd.SetComputeVectorParam(cs, HDShaderIDs._InputTextureDimensions, new Vector4(source.rt.width, source.rt.height));
            //                        cmd.SetComputeTextureParam(cs, kMain, HDShaderIDs._OutputTexture, destination);
            //                        cmd.SetComputeVectorParam(cs, HDShaderIDs._OutputTextureDimensions, new Vector4(destination.rt.width, destination.rt.height));

            //                        ValidateComputeBuffer(ref m_ContrastAdaptiveSharpen, 2, sizeof(uint) * 4);

            //                        cmd.SetComputeBufferParam(cs, kInit, "CasParameters", m_ContrastAdaptiveSharpen);
            //                        cmd.SetComputeBufferParam(cs, kMain, "CasParameters", m_ContrastAdaptiveSharpen);

            //                        cmd.DispatchCompute(cs, kInit, 1, 1, 1);

            //                        int dispatchX = (int)System.Math.Ceiling(destination.rt.width / 16.0f);
            //                        int dispatchY = (int)System.Math.Ceiling(destination.rt.height / 16.0f);

            //                        cmd.DispatchCompute(cs, kMain, dispatchX, dispatchY, camera.viewCount);
            //                    }

            //                    PoolSource(ref source, destination);
            //                }
            //            }

            using (var builder = renderGraph.AddRenderPass <FinalPassData>("Final Pass", out var passData, ProfilingSampler.Get(HDProfileId.FinalPost)))
            {
                passData.parameters = PrepareFinalPass(hdCamera, blueNoise, flipY);
                passData.source     = builder.ReadTexture(source);
                passData.afterPostProcessTexture = builder.ReadTexture(afterPostProcessTexture);
                passData.alphaTexture            = builder.ReadTexture(alphaTexture);
                passData.destination             = builder.WriteTexture(finalRT);

                builder.SetRenderFunc(
                    (FinalPassData data, RenderGraphContext ctx) =>
                {
                    DoFinalPass(data.parameters,
                                ctx.resources.GetTexture(data.source),
                                ctx.resources.GetTexture(data.afterPostProcessTexture),
                                ctx.resources.GetTexture(data.destination),
                                ctx.resources.GetTexture(data.alphaTexture),
                                ctx.cmd);
                });
            }
        }
        void RenderSSGI(HDCamera hdCamera, CommandBuffer cmd, ScriptableRenderContext renderContext, int frameCount)
        {
            // Grab the global illumination volume component
            GlobalIllumination giSettings = hdCamera.volumeStack.GetComponent <GlobalIllumination>();

            // Grab the noise texture manager
            BlueNoise blueNoise = GetBlueNoiseManager();

            // Grab the shaders we shall be using
            ComputeShader ssGICS = m_Asset.renderPipelineResources.shaders.screenSpaceGlobalIlluminationCS;

            // Evaluate the dispatch parameters
            int texWidth, texHeight;

            if (giSettings.fullResolutionSS)
            {
                texWidth  = hdCamera.actualWidth;
                texHeight = hdCamera.actualHeight;
                halfScreenSize.Set(texWidth * 0.5f, texHeight * 0.5f, 2.0f / texWidth, 2.0f / texHeight);
            }
            else
            {
                texWidth  = hdCamera.actualWidth / 2;
                texHeight = hdCamera.actualHeight / 2;
                halfScreenSize.Set(texWidth, texHeight, 1.0f / texWidth, 1.0f / texHeight);
            }
            int areaTileSize = 8;
            int numTilesXHR  = (texWidth + (areaTileSize - 1)) / areaTileSize;
            int numTilesYHR  = (texHeight + (areaTileSize - 1)) / areaTileSize;

            // Based on if we are doing it in half resolution or full, we need to define initial and final buffer to avoid a useless blit
            RTHandle buffer0, buffer1;

            if (!giSettings.fullResolutionSS)
            {
                buffer0 = m_IndirectDiffuseBuffer0;
                buffer1 = m_IndirectDiffuseBuffer1;
            }
            else
            {
                buffer0 = m_IndirectDiffuseBuffer1;
                buffer1 = m_IndirectDiffuseBuffer0;
            }

            using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.SsgiPass)))
            {
                // Fetch the right tracing kernel
                int currentKernel = giSettings.fullResolutionSS ? m_TraceGlobalIlluminationKernel : m_TraceGlobalIlluminationHalfKernel;

                // Inject all the input scalars
                float n              = hdCamera.camera.nearClipPlane;
                float f              = hdCamera.camera.farClipPlane;
                float thickness      = giSettings.depthBufferThickness.value;
                float thicknessScale = 1.0f / (1.0f + thickness);
                float thicknessBias  = -n / (f - n) * (thickness * thicknessScale);
                cmd.SetComputeFloatParam(ssGICS, HDShaderIDs._IndirectDiffuseThicknessScale, thicknessScale);
                cmd.SetComputeFloatParam(ssGICS, HDShaderIDs._IndirectDiffuseThicknessBias, thicknessBias);
                cmd.SetComputeIntParam(ssGICS, HDShaderIDs._IndirectDiffuseSteps, giSettings.raySteps);
                cmd.SetComputeFloatParam(ssGICS, HDShaderIDs._IndirectDiffuseMaximalRadius, giSettings.maximalRadius);
                // Inject half screen size if required
                if (!giSettings.fullResolutionSS)
                {
                    cmd.SetComputeVectorParam(ssGICS, HDShaderIDs._HalfScreenSize, halfScreenSize);
                }

                // Inject the ray-tracing sampling data
                blueNoise.BindDitheredRNGData1SPP(cmd);

                // Inject all the input textures/buffers
                cmd.SetComputeTextureParam(ssGICS, currentKernel, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthTexture());
                cmd.SetComputeTextureParam(ssGICS, currentKernel, HDShaderIDs._NormalBufferTexture, m_SharedRTManager.GetNormalBuffer());
                cmd.SetComputeTextureParam(ssGICS, currentKernel, HDShaderIDs._IndirectDiffuseHitPointTextureRW, m_IndirectDiffuseHitPointBuffer);
                var info = m_SharedRTManager.GetDepthBufferMipChainInfo();
                cmd.SetComputeBufferParam(ssGICS, currentKernel, HDShaderIDs._DepthPyramidMipLevelOffsets, info.GetOffsetBufferData(m_DepthPyramidMipLevelOffsetsBuffer));
                cmd.SetGlobalBuffer(HDShaderIDs.g_vLightListGlobal, m_TileAndClusterData.lightList);

                // Do the ray marching
                cmd.DispatchCompute(ssGICS, currentKernel, numTilesXHR, numTilesYHR, hdCamera.viewCount);

                // Fetch the right kernel to use
                currentKernel = giSettings.fullResolutionSS ? m_ReprojectGlobalIlluminationKernel : m_ReprojectGlobalIlluminationHalfKernel;

                // Update global constant buffer.
                // This should probably be a shader specific uniform instead of reusing the global constant buffer one since it's the only one udpated here.
                m_ShaderVariablesRayTracingCB._RaytracingIntensityClamp = giSettings.clampValueSS;
                ConstantBuffer.PushGlobal(cmd, m_ShaderVariablesRayTracingCB, HDShaderIDs._ShaderVariablesRaytracing);

                // Inject all the input scalars
                cmd.SetComputeVectorParam(ssGICS, HDShaderIDs._ColorPyramidUvScaleAndLimitPrevFrame, HDUtils.ComputeViewportScaleAndLimit(hdCamera.historyRTHandleProperties.previousViewportSize, hdCamera.historyRTHandleProperties.previousRenderTargetSize));

                // Bind all the input buffers
                cmd.SetComputeTextureParam(ssGICS, currentKernel, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthStencilBuffer());
                cmd.SetComputeTextureParam(ssGICS, currentKernel, HDShaderIDs._NormalBufferTexture, m_SharedRTManager.GetNormalBuffer());
                cmd.SetComputeTextureParam(ssGICS, currentKernel, HDShaderIDs._IndirectDiffuseHitPointTexture, m_IndirectDiffuseHitPointBuffer);
                var previousColorPyramid = hdCamera.GetPreviousFrameRT((int)HDCameraFrameHistoryType.ColorBufferMipChain);
                cmd.SetComputeTextureParam(ssGICS, currentKernel, HDShaderIDs._ColorPyramidTexture, previousColorPyramid != null ? previousColorPyramid : TextureXR.GetBlackTexture());
                var historyDepthBuffer = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.Depth);
                cmd.SetComputeTextureParam(ssGICS, currentKernel, HDShaderIDs._HistoryDepthTexture, historyDepthBuffer != null ? historyDepthBuffer : TextureXR.GetBlackTexture());
                cmd.SetComputeBufferParam(ssGICS, currentKernel, HDShaderIDs._DepthPyramidMipLevelOffsets, info.GetOffsetBufferData(m_DepthPyramidMipLevelOffsetsBuffer));

                // Bind the output texture
                cmd.SetComputeTextureParam(ssGICS, currentKernel, HDShaderIDs._IndirectDiffuseTextureRW, buffer1);

                // Do the reprojection
                cmd.DispatchCompute(ssGICS, currentKernel, numTilesXHR, numTilesYHR, hdCamera.viewCount);

                float historyValidity = 1.0f;
#if UNITY_HDRP_DXR_TESTS_DEFINE
                if (Application.isPlaying)
                {
                    historyValidity = 0.0f;
                }
                else
#endif
                // We need to check if something invalidated the history buffers
                historyValidity *= ValidRayTracingHistory(hdCamera) ? 1.0f : 0.0f;

                // Do the denoising part
                SSGIDenoiser ssgiDenoiser = GetSSGIDenoiser();
                ssgiDenoiser.Denoise(cmd, hdCamera, buffer1, buffer0, halfResolution: !giSettings.fullResolutionSS, historyValidity: historyValidity);

                // If this was a half resolution effect, we still have to upscale it
                if (!giSettings.fullResolutionSS)
                {
                    ComputeShader bilateralUpsampleCS = m_Asset.renderPipelineResources.shaders.bilateralUpsampleCS;

                    // Re-evaluate the dispatch parameters (we are evaluating the upsample in full resolution)
                    numTilesXHR = (hdCamera.actualWidth + (areaTileSize - 1)) / areaTileSize;
                    numTilesYHR = (hdCamera.actualHeight + (areaTileSize - 1)) / areaTileSize;

                    // Inject the input scalars
                    cmd.SetComputeVectorParam(bilateralUpsampleCS, HDShaderIDs._HalfScreenSize, halfScreenSize);
                    firstMipOffset.Set(HDShadowUtils.Asfloat((uint)info.mipLevelOffsets[1].x), HDShadowUtils.Asfloat((uint)info.mipLevelOffsets[1].y));
                    cmd.SetComputeVectorParam(bilateralUpsampleCS, HDShaderIDs._DepthPyramidFirstMipLevelOffset, firstMipOffset);

                    // Inject all the input buffers
                    cmd.SetComputeTextureParam(bilateralUpsampleCS, m_BilateralUpSampleColorTMKernel, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthTexture());
                    cmd.SetComputeTextureParam(bilateralUpsampleCS, m_BilateralUpSampleColorTMKernel, HDShaderIDs._LowResolutionTexture, buffer1);
                    cmd.SetComputeBufferParam(bilateralUpsampleCS, m_BilateralUpSampleColorTMKernel, HDShaderIDs._DepthPyramidMipLevelOffsets, info.GetOffsetBufferData(m_DepthPyramidMipLevelOffsetsBuffer));

                    // Inject the output textures
                    cmd.SetComputeTextureParam(bilateralUpsampleCS, m_BilateralUpSampleColorTMKernel, HDShaderIDs._OutputUpscaledTexture, buffer0);

                    // Upscale the buffer to full resolution
                    cmd.DispatchCompute(bilateralUpsampleCS, m_BilateralUpSampleColorTMKernel, numTilesXHR, numTilesYHR, hdCamera.viewCount);
                }

                (RenderPipelineManager.currentPipeline as HDRenderPipeline).PushFullScreenDebugTexture(hdCamera, cmd, m_IndirectDiffuseBuffer0, FullScreenDebugMode.ScreenSpaceGlobalIllumination);
            }
        }
Beispiel #3
0
        bool RenderAreaShadows(HDCamera hdCamera, CommandBuffer cmd, int frameCount)
        {
            // Let's check all the resources and states to see if we should render the effect
            HDRaytracingEnvironment rtEnvironment = m_RayTracingManager.CurrentEnvironment();
            BlueNoise blueNoise = m_RayTracingManager.GetBlueNoiseManager();

            // Make sure everything is valid
            bool invalidState = !hdCamera.frameSettings.IsEnabled(FrameSettingsField.RayTracing) ||
                                rtEnvironment == null ||
                                hdCamera.frameSettings.litShaderMode != LitShaderMode.Deferred;

            // If invalid state or ray-tracing acceleration structure, we stop right away
            if (invalidState)
            {
                return(false);
            }

            RayTracingShader shadowRayTrace = m_Asset.renderPipelineRayTracingResources.shadowRaytracingRT;
            ComputeShader    shadowsCompute = m_Asset.renderPipelineRayTracingResources.shadowRaytracingCS;
            ComputeShader    shadowFilter   = m_Asset.renderPipelineRayTracingResources.shadowFilterCS;

            // Grab the TAA history buffers (SN/UN and Analytic value)
            RTHandle shadowHistoryArray = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.RaytracedShadow)
                                          ?? hdCamera.AllocHistoryFrameRT((int)HDCameraFrameHistoryType.RaytracedShadow, ShadowHistoryBufferAllocatorFunction, 1);
            RTHandle areaAnalyticHistoryArray = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.RaytracedAreaAnalytic)
                                                ?? hdCamera.AllocHistoryFrameRT((int)HDCameraFrameHistoryType.RaytracedAreaAnalytic, AreaAnalyticHistoryBufferAllocatorFunction, 1);

            // Grab the acceleration structure for the target camera
            RayTracingAccelerationStructure accelerationStructure = m_RayTracingManager.RequestAccelerationStructure(rtEnvironment.shadowLayerMask);

            // Define the shader pass to use for the reflection pass
            cmd.SetRayTracingShaderPass(shadowRayTrace, "VisibilityDXR");

            // Set the acceleration structure for the pass
            cmd.SetRayTracingAccelerationStructure(shadowRayTrace, HDShaderIDs._RaytracingAccelerationStructureName, accelerationStructure);

            // Inject the ray-tracing sampling data
            blueNoise.BindDitheredRNGData8SPP(cmd);

            int frameIndex = hdCamera.IsTAAEnabled() ? hdCamera.taaFrameIndex : (int)frameCount % 8;

            cmd.SetGlobalInt(HDShaderIDs._RaytracingFrameIndex, frameIndex);

            // Temporal Filtering kernels
            int applyTAAKernel        = shadowFilter.FindKernel("AreaShadowApplyTAA");
            int updateAnalyticHistory = shadowFilter.FindKernel("AreaAnalyticHistoryCopy");
            int updateShadowHistory   = shadowFilter.FindKernel("AreaShadowHistoryCopy");

            // Spatial Filtering kernels
            int estimateNoiseKernel = shadowFilter.FindKernel("AreaShadowEstimateNoise");
            int firstDenoiseKernel  = shadowFilter.FindKernel("AreaShadowDenoiseFirstPass");
            int secondDenoiseKernel = shadowFilter.FindKernel("AreaShadowDenoiseSecondPass");

            // Texture dimensions
            int texWidth  = hdCamera.actualWidth;
            int texHeight = hdCamera.actualHeight;

            // Evaluate the dispatch parameters
            int areaTileSize = 8;
            int numTilesX    = (texWidth + (areaTileSize - 1)) / areaTileSize;
            int numTilesY    = (texHeight + (areaTileSize - 1)) / areaTileSize;

            // Inject the ray generation data
            cmd.SetGlobalFloat(HDShaderIDs._RaytracingRayBias, rtEnvironment.rayBias);

            int numLights = m_lightList.lights.Count;

            for (int lightIdx = 0; lightIdx < numLights; ++lightIdx)
            {
                // If this is not a rectangular area light or it won't have shadows, skip it
                if (m_lightList.lights[lightIdx].lightType != GPULightType.Rectangle || m_lightList.lights[lightIdx].screenSpaceShadowIndex == -1)
                {
                    continue;
                }

                LightData             currentLight = m_lightList.lights[lightIdx];
                HDAdditionalLightData currentAdditionalLightData = GetCurrentRayTracedShadow(currentLight.screenSpaceShadowIndex);

                using (new ProfilingSample(cmd, "Ray Traced Area Shadow", CustomSamplerId.RaytracingShadowIntegration.GetSampler()))
                {
                    // We need to build the world to area light matrix
                    worldToLocalArea.SetColumn(0, currentLight.right);
                    worldToLocalArea.SetColumn(1, currentLight.up);
                    worldToLocalArea.SetColumn(2, currentLight.forward);

                    // Compensate the  relative rendering if active
                    Vector3 lightPositionWS = currentLight.positionRWS;
                    if (ShaderConfig.s_CameraRelativeRendering != 0)
                    {
                        lightPositionWS += hdCamera.camera.transform.position;
                    }
                    worldToLocalArea.SetColumn(3, lightPositionWS);
                    worldToLocalArea.m33 = 1.0f;
                    worldToLocalArea     = worldToLocalArea.inverse;

                    // We have noticed from extensive profiling that ray-trace shaders are not as effective for running per-pixel computation. In order to reduce that,
                    // we do a first prepass that compute the analytic term and probability and generates the first integration sample
                    if (true)
                    {
                        int shadowComputeKernel = shadowsCompute.FindKernel("RaytracingAreaShadowPrepass");

                        // This pass evaluates the analytic value and the generates and outputs the first sample
                        cmd.SetComputeBufferParam(shadowsCompute, shadowComputeKernel, HDShaderIDs._LightDatas, m_LightLoopLightData.lightData);
                        cmd.SetComputeIntParam(shadowsCompute, HDShaderIDs._RaytracingTargetAreaLight, lightIdx);
                        cmd.SetComputeIntParam(shadowsCompute, HDShaderIDs._RaytracingNumSamples, currentAdditionalLightData.numRayTracingSamples);
                        cmd.SetComputeMatrixParam(shadowsCompute, HDShaderIDs._RaytracingAreaWorldToLocal, worldToLocalArea);
                        cmd.SetComputeTextureParam(shadowsCompute, shadowComputeKernel, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthStencilBuffer());
                        cmd.SetComputeTextureParam(shadowsCompute, shadowComputeKernel, HDShaderIDs._NormalBufferTexture, m_SharedRTManager.GetNormalBuffer());
                        cmd.SetComputeTextureParam(shadowsCompute, shadowComputeKernel, HDShaderIDs._GBufferTexture[0], m_GbufferManager.GetBuffer(0));
                        cmd.SetComputeTextureParam(shadowsCompute, shadowComputeKernel, HDShaderIDs._GBufferTexture[1], m_GbufferManager.GetBuffer(1));
                        cmd.SetComputeTextureParam(shadowsCompute, shadowComputeKernel, HDShaderIDs._GBufferTexture[2], m_GbufferManager.GetBuffer(2));
                        cmd.SetComputeTextureParam(shadowsCompute, shadowComputeKernel, HDShaderIDs._GBufferTexture[3], m_GbufferManager.GetBuffer(3));
                        cmd.SetComputeTextureParam(shadowsCompute, shadowComputeKernel, HDShaderIDs._AreaCookieTextures, m_TextureCaches.areaLightCookieManager.GetTexCache());
                        cmd.SetComputeTextureParam(shadowsCompute, shadowComputeKernel, HDShaderIDs._RaytracedAreaShadowIntegration, m_DenoiseBuffer0);
                        cmd.SetComputeTextureParam(shadowsCompute, shadowComputeKernel, HDShaderIDs._RaytracedAreaShadowSample, m_DenoiseBuffer1);
                        cmd.SetComputeTextureParam(shadowsCompute, shadowComputeKernel, HDShaderIDs._RaytracingDirectionBuffer, m_RaytracingDirectionBuffer);
                        cmd.SetComputeTextureParam(shadowsCompute, shadowComputeKernel, HDShaderIDs._RaytracingDistanceBuffer, m_RaytracingDistanceBuffer);
                        cmd.SetComputeTextureParam(shadowsCompute, shadowComputeKernel, HDShaderIDs._AnalyticProbBuffer, m_AnalyticProbBuffer);
                        cmd.DispatchCompute(shadowsCompute, shadowComputeKernel, numTilesX, numTilesY, 1);

                        // This pass will use the previously generated sample and add it to the integration buffer
                        cmd.SetRayTracingBufferParam(shadowRayTrace, HDShaderIDs._LightDatas, m_LightLoopLightData.lightData);
                        cmd.SetRayTracingTextureParam(shadowRayTrace, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthStencilBuffer());
                        cmd.SetRayTracingTextureParam(shadowRayTrace, HDShaderIDs._RaytracedAreaShadowSample, m_DenoiseBuffer1);
                        cmd.SetRayTracingTextureParam(shadowRayTrace, HDShaderIDs._RaytracedAreaShadowIntegration, m_DenoiseBuffer0);
                        cmd.SetRayTracingTextureParam(shadowRayTrace, HDShaderIDs._RaytracingDirectionBuffer, m_RaytracingDirectionBuffer);
                        cmd.SetRayTracingTextureParam(shadowRayTrace, HDShaderIDs._RaytracingDistanceBuffer, m_RaytracingDistanceBuffer);
                        cmd.SetRayTracingTextureParam(shadowRayTrace, HDShaderIDs._AnalyticProbBuffer, m_AnalyticProbBuffer);
                        cmd.DispatchRays(shadowRayTrace, m_RayGenAreaShadowSingleName, (uint)hdCamera.actualWidth, (uint)hdCamera.actualHeight, 1);

                        // Let's do the following samples (if any)
                        for (int sampleIndex = 1; sampleIndex < currentAdditionalLightData.numRayTracingSamples; ++sampleIndex)
                        {
                            shadowComputeKernel = shadowsCompute.FindKernel("RaytracingAreaShadowNewSample");

                            // This pass generates a new sample based on the initial pre-pass
                            cmd.SetComputeBufferParam(shadowsCompute, shadowComputeKernel, HDShaderIDs._LightDatas, m_LightLoopLightData.lightData);
                            cmd.SetComputeIntParam(shadowsCompute, HDShaderIDs._RaytracingTargetAreaLight, lightIdx);
                            cmd.SetComputeIntParam(shadowsCompute, HDShaderIDs._RaytracingNumSamples, currentAdditionalLightData.numRayTracingSamples);
                            cmd.SetComputeIntParam(shadowsCompute, HDShaderIDs._RaytracingSampleIndex, sampleIndex);
                            cmd.SetComputeMatrixParam(shadowsCompute, HDShaderIDs._RaytracingAreaWorldToLocal, worldToLocalArea);
                            cmd.SetComputeTextureParam(shadowsCompute, shadowComputeKernel, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthStencilBuffer());
                            cmd.SetComputeTextureParam(shadowsCompute, shadowComputeKernel, HDShaderIDs._NormalBufferTexture, m_SharedRTManager.GetNormalBuffer());
                            cmd.SetComputeTextureParam(shadowsCompute, shadowComputeKernel, HDShaderIDs._GBufferTexture[0], m_GbufferManager.GetBuffer(0));
                            cmd.SetComputeTextureParam(shadowsCompute, shadowComputeKernel, HDShaderIDs._GBufferTexture[1], m_GbufferManager.GetBuffer(1));
                            cmd.SetComputeTextureParam(shadowsCompute, shadowComputeKernel, HDShaderIDs._GBufferTexture[2], m_GbufferManager.GetBuffer(2));
                            cmd.SetComputeTextureParam(shadowsCompute, shadowComputeKernel, HDShaderIDs._GBufferTexture[3], m_GbufferManager.GetBuffer(3));
                            cmd.SetComputeTextureParam(shadowsCompute, shadowComputeKernel, HDShaderIDs._AreaCookieTextures, m_TextureCaches.areaLightCookieManager.GetTexCache());
                            cmd.SetComputeTextureParam(shadowsCompute, shadowComputeKernel, HDShaderIDs._RaytracedAreaShadowIntegration, m_DenoiseBuffer0);
                            cmd.SetComputeTextureParam(shadowsCompute, shadowComputeKernel, HDShaderIDs._RaytracedAreaShadowSample, m_DenoiseBuffer1);
                            cmd.SetComputeTextureParam(shadowsCompute, shadowComputeKernel, HDShaderIDs._RaytracingDirectionBuffer, m_RaytracingDirectionBuffer);
                            cmd.SetComputeTextureParam(shadowsCompute, shadowComputeKernel, HDShaderIDs._RaytracingDistanceBuffer, m_RaytracingDistanceBuffer);
                            cmd.SetComputeTextureParam(shadowsCompute, shadowComputeKernel, HDShaderIDs._AnalyticProbBuffer, m_AnalyticProbBuffer);
                            cmd.DispatchCompute(shadowsCompute, shadowComputeKernel, numTilesX, numTilesY, 1);

                            // This pass will use the previously generated sample and add it to the integration buffer
                            cmd.SetRayTracingBufferParam(shadowRayTrace, HDShaderIDs._LightDatas, m_LightLoopLightData.lightData);
                            cmd.SetRayTracingTextureParam(shadowRayTrace, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthStencilBuffer());
                            cmd.SetRayTracingTextureParam(shadowRayTrace, HDShaderIDs._RaytracedAreaShadowSample, m_DenoiseBuffer1);
                            cmd.SetRayTracingTextureParam(shadowRayTrace, HDShaderIDs._RaytracedAreaShadowIntegration, m_DenoiseBuffer0);
                            cmd.SetRayTracingTextureParam(shadowRayTrace, HDShaderIDs._RaytracingDirectionBuffer, m_RaytracingDirectionBuffer);
                            cmd.SetRayTracingTextureParam(shadowRayTrace, HDShaderIDs._RaytracingDistanceBuffer, m_RaytracingDistanceBuffer);
                            cmd.SetRayTracingTextureParam(shadowRayTrace, HDShaderIDs._AnalyticProbBuffer, m_AnalyticProbBuffer);
                            cmd.DispatchRays(shadowRayTrace, m_RayGenAreaShadowSingleName, (uint)hdCamera.actualWidth, (uint)hdCamera.actualHeight, 1);
                        }
                    }
                    else
                    {
                        // This pass generates the analytic value and will do the full integration
                        cmd.SetRayTracingBufferParam(shadowRayTrace, HDShaderIDs._LightDatas, m_LightLoopLightData.lightData);
                        cmd.SetRayTracingIntParam(shadowRayTrace, HDShaderIDs._RaytracingTargetAreaLight, lightIdx);
                        cmd.SetRayTracingIntParam(shadowRayTrace, HDShaderIDs._RaytracingNumSamples, currentAdditionalLightData.numRayTracingSamples);
                        cmd.SetRayTracingMatrixParam(shadowRayTrace, HDShaderIDs._RaytracingAreaWorldToLocal, worldToLocalArea);
                        cmd.SetRayTracingTextureParam(shadowRayTrace, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthStencilBuffer());
                        cmd.SetRayTracingTextureParam(shadowRayTrace, HDShaderIDs._NormalBufferTexture, m_SharedRTManager.GetNormalBuffer());
                        cmd.SetRayTracingTextureParam(shadowRayTrace, HDShaderIDs._GBufferTexture[0], m_GbufferManager.GetBuffer(0));
                        cmd.SetRayTracingTextureParam(shadowRayTrace, HDShaderIDs._GBufferTexture[1], m_GbufferManager.GetBuffer(1));
                        cmd.SetRayTracingTextureParam(shadowRayTrace, HDShaderIDs._GBufferTexture[2], m_GbufferManager.GetBuffer(2));
                        cmd.SetRayTracingTextureParam(shadowRayTrace, HDShaderIDs._GBufferTexture[3], m_GbufferManager.GetBuffer(3));
                        cmd.SetRayTracingIntParam(shadowRayTrace, HDShaderIDs._RayCountEnabled, m_RayTracingManager.rayCountManager.RayCountIsEnabled());
                        cmd.SetRayTracingTextureParam(shadowRayTrace, HDShaderIDs._RayCountTexture, m_RayTracingManager.rayCountManager.GetRayCountTexture());
                        cmd.SetRayTracingTextureParam(shadowRayTrace, HDShaderIDs._AreaCookieTextures, m_TextureCaches.areaLightCookieManager.GetTexCache());
                        cmd.SetRayTracingTextureParam(shadowRayTrace, HDShaderIDs._AnalyticProbBuffer, m_AnalyticProbBuffer);
                        cmd.SetRayTracingTextureParam(shadowRayTrace, HDShaderIDs._RaytracedAreaShadowIntegration, m_DenoiseBuffer0);
                        cmd.DispatchRays(shadowRayTrace, m_RayGenAreaShadowName, (uint)hdCamera.actualWidth, (uint)hdCamera.actualHeight, 1);
                    }
                }

                using (new ProfilingSample(cmd, "Combine Area Shadow", CustomSamplerId.RaytracingShadowCombination.GetSampler()))
                {
                    // Global parameters
                    cmd.SetComputeIntParam(shadowFilter, HDShaderIDs._RaytracingDenoiseRadius, currentAdditionalLightData.filterSizeTraced);
                    cmd.SetComputeIntParam(shadowFilter, HDShaderIDs._RaytracingShadowSlot, m_lightList.lights[lightIdx].screenSpaceShadowIndex);

                    // Apply a vectorized temporal filtering pass and store it back in the denoisebuffer0 with the analytic value in the third channel
                    var historyScale = new Vector2(hdCamera.actualWidth / (float)shadowHistoryArray.rt.width, hdCamera.actualHeight / (float)shadowHistoryArray.rt.height);
                    cmd.SetComputeVectorParam(shadowFilter, HDShaderIDs._RTHandleScaleHistory, historyScale);

                    cmd.SetComputeTextureParam(shadowFilter, applyTAAKernel, HDShaderIDs._AnalyticProbBuffer, m_AnalyticProbBuffer);
                    cmd.SetComputeTextureParam(shadowFilter, applyTAAKernel, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthStencilBuffer());
                    cmd.SetComputeTextureParam(shadowFilter, applyTAAKernel, HDShaderIDs._AreaShadowHistory, shadowHistoryArray);
                    cmd.SetComputeTextureParam(shadowFilter, applyTAAKernel, HDShaderIDs._AnalyticHistoryBuffer, areaAnalyticHistoryArray);
                    cmd.SetComputeTextureParam(shadowFilter, applyTAAKernel, HDShaderIDs._DenoiseInputTexture, m_DenoiseBuffer0);
                    cmd.SetComputeTextureParam(shadowFilter, applyTAAKernel, HDShaderIDs._DenoiseOutputTextureRW, m_DenoiseBuffer1);
                    cmd.DispatchCompute(shadowFilter, applyTAAKernel, numTilesX, numTilesY, 1);

                    // Update the shadow history buffer
                    cmd.SetComputeTextureParam(shadowFilter, updateAnalyticHistory, HDShaderIDs._AnalyticProbBuffer, m_AnalyticProbBuffer);
                    cmd.SetComputeTextureParam(shadowFilter, updateAnalyticHistory, HDShaderIDs._AnalyticHistoryBuffer, areaAnalyticHistoryArray);
                    cmd.DispatchCompute(shadowFilter, updateAnalyticHistory, numTilesX, numTilesY, 1);

                    // Update the analytic history buffer
                    cmd.SetComputeTextureParam(shadowFilter, updateShadowHistory, HDShaderIDs._DenoiseInputTexture, m_DenoiseBuffer1);
                    cmd.SetComputeTextureParam(shadowFilter, updateShadowHistory, HDShaderIDs._AreaShadowHistoryRW, shadowHistoryArray);
                    cmd.DispatchCompute(shadowFilter, updateShadowHistory, numTilesX, numTilesY, 1);

                    if (currentAdditionalLightData.filterSizeTraced > 0)
                    {
                        // Inject parameters for noise estimation
                        cmd.SetComputeTextureParam(shadowFilter, estimateNoiseKernel, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthStencilBuffer());
                        cmd.SetComputeTextureParam(shadowFilter, estimateNoiseKernel, HDShaderIDs._NormalBufferTexture, m_SharedRTManager.GetNormalBuffer());
                        cmd.SetComputeTextureParam(shadowFilter, estimateNoiseKernel, HDShaderIDs._ScramblingTexture, m_Asset.renderPipelineResources.textures.scramblingTex);

                        // Noise estimation pre-pass
                        cmd.SetComputeTextureParam(shadowFilter, estimateNoiseKernel, HDShaderIDs._DenoiseInputTexture, m_DenoiseBuffer1);
                        cmd.SetComputeTextureParam(shadowFilter, estimateNoiseKernel, HDShaderIDs._DenoiseOutputTextureRW, m_DenoiseBuffer0);
                        cmd.DispatchCompute(shadowFilter, estimateNoiseKernel, numTilesX, numTilesY, 1);

                        // Reinject parameters for denoising
                        cmd.SetComputeTextureParam(shadowFilter, firstDenoiseKernel, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthStencilBuffer());
                        cmd.SetComputeTextureParam(shadowFilter, firstDenoiseKernel, HDShaderIDs._NormalBufferTexture, m_SharedRTManager.GetNormalBuffer());
                        cmd.SetComputeTextureParam(shadowFilter, firstDenoiseKernel, HDShaderIDs._ScreenSpaceShadowsTextureRW, m_ScreenSpaceShadowTextureArray);

                        // First denoising pass
                        cmd.SetComputeTextureParam(shadowFilter, firstDenoiseKernel, HDShaderIDs._DenoiseInputTexture, m_DenoiseBuffer0);
                        cmd.SetComputeTextureParam(shadowFilter, firstDenoiseKernel, HDShaderIDs._DenoiseOutputTextureRW, m_DenoiseBuffer1);
                        cmd.DispatchCompute(shadowFilter, firstDenoiseKernel, numTilesX, numTilesY, 1);
                    }

                    // Re-inject parameters for denoising
                    cmd.SetComputeTextureParam(shadowFilter, secondDenoiseKernel, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthStencilBuffer());
                    cmd.SetComputeTextureParam(shadowFilter, secondDenoiseKernel, HDShaderIDs._NormalBufferTexture, m_SharedRTManager.GetNormalBuffer());
                    cmd.SetComputeTextureParam(shadowFilter, secondDenoiseKernel, HDShaderIDs._ScreenSpaceShadowsTextureRW, m_ScreenSpaceShadowTextureArray);

                    // Second (and final) denoising pass
                    cmd.SetComputeTextureParam(shadowFilter, secondDenoiseKernel, HDShaderIDs._DenoiseInputTexture, m_DenoiseBuffer1);
                    cmd.DispatchCompute(shadowFilter, secondDenoiseKernel, numTilesX, numTilesY, 1);
                }
            }
            return(true);
        }