Beispiel #1
0
        public void Initialize(HDRenderPipeline renderPipeline)
        {
            // Keep track of the external buffers
            m_RenderPipelineResources           = renderPipeline.asset.renderPipelineResources;
            m_RenderPipelineRayTracingResources = renderPipeline.asset.renderPipelineRayTracingResources;

            // Keep track of the render pipeline
            m_RenderPipeline = renderPipeline;

            // Pre allocate the cluster with a dummy size
            m_LightDataGPUArray    = new ComputeBuffer(1, System.Runtime.InteropServices.Marshal.SizeOf(typeof(LightData)));
            m_EnvLightDataGPUArray = new ComputeBuffer(1, System.Runtime.InteropServices.Marshal.SizeOf(typeof(EnvLightData)));

            // Allocate the light cluster buffer at the right size
            m_NumLightsPerCell = renderPipeline.asset.currentPlatformRenderPipelineSettings.lightLoopSettings.maxLightsPerClusterCell;
            int bufferSize = 64 * 64 * 32 * (renderPipeline.asset.currentPlatformRenderPipelineSettings.lightLoopSettings.maxLightsPerClusterCell + 4);

            ResizeClusterBuffer(bufferSize);

            // Create the material required for debug
            m_DebugMaterial = CoreUtils.CreateEngineMaterial(m_RenderPipelineRayTracingResources.lightClusterDebugS);
        }
Beispiel #2
0
        AmbientOcclusionDenoiseParameters PrepareAmbientOcclusionDenoiseParameters(HDCamera hdCamera, ShaderVariablesRaytracing raytracingCB)
        {
            AmbientOcclusionDenoiseParameters rtAOParameters = new AmbientOcclusionDenoiseParameters();
            var aoSettings = hdCamera.volumeStack.GetComponent <AmbientOcclusion>();

            rtAOParameters.denoise         = aoSettings.denoise;
            rtAOParameters.denoiserRadius  = aoSettings.denoiserRadius;
            rtAOParameters.historyValidity = 1.0f;
#if UNITY_HDRP_DXR_TESTS_DEFINE
            if (Application.isPlaying)
            {
                rtAOParameters.historyValidity = 0.0f;
            }
            else
#endif
            // We need to check if something invalidated the history buffers
            rtAOParameters.historyValidity = HDRenderPipeline.ValidRayTracingHistory(hdCamera) ? 1.0f : 0.0f;
            rtAOParameters.actualWidth     = hdCamera.actualWidth;
            rtAOParameters.actualHeight    = hdCamera.actualHeight;
            rtAOParameters.viewCount       = hdCamera.viewCount;
            return(rtAOParameters);
        }
        public TemporalFilterParameters PrepareTemporalFilterParameters(HDCamera hdCamera, bool singleChannel, float historyValidity)
        {
            TemporalFilterParameters temporalFilterParameters = new TemporalFilterParameters();

            // Camera parameters
            temporalFilterParameters.texWidth  = hdCamera.actualWidth;
            temporalFilterParameters.texHeight = hdCamera.actualHeight;
            temporalFilterParameters.viewCount = hdCamera.viewCount;

            // Denoising parameters
            temporalFilterParameters.pixelSpreadTangent = HDRenderPipeline.GetPixelSpreadTangent(hdCamera.camera.fieldOfView, hdCamera.actualWidth, hdCamera.actualHeight);
            temporalFilterParameters.historyValidity    = historyValidity;

            // Kernels
            temporalFilterParameters.temporalAccKernel = singleChannel ? m_TemporalAccumulationSingleKernel : m_TemporalAccumulationColorKernel;
            temporalFilterParameters.copyHistoryKernel = singleChannel ? m_CopyHistorySingleKernel : m_CopyHistoryColorKernel;

            // Other parameters
            temporalFilterParameters.temporalFilterCS = m_TemporalFilterCS;

            return(temporalFilterParameters);
        }
Beispiel #4
0
        public DiffuseDenoiserParameters PrepareDiffuseDenoiserParameters(HDCamera hdCamera, bool singleChannel, float kernelSize, bool halfResolutionFilter)
        {
            DiffuseDenoiserParameters ddParams = new DiffuseDenoiserParameters();

            // Camera parameters
            ddParams.texWidth  = hdCamera.actualWidth;
            ddParams.texHeight = hdCamera.actualHeight;
            ddParams.viewCount = hdCamera.viewCount;

            // Denoising parameters
            ddParams.pixelSpreadTangent   = HDRenderPipeline.GetPixelSpreadTangent(hdCamera.camera.fieldOfView, hdCamera.actualWidth, hdCamera.actualHeight);
            ddParams.kernelSize           = kernelSize;
            ddParams.halfResolutionFilter = halfResolutionFilter;

            // Kernels
            ddParams.bilateralFilterKernel = singleChannel ? m_BilateralFilterSingleKernel : m_BilateralFilterColorKernel;
            ddParams.gatherKernel          = singleChannel ? m_GatherSingleKernel : m_GatherColorKernel;

            // Other parameters
            ddParams.owenScrambleRGBA  = m_OwenScrambleRGBA;
            ddParams.diffuseDenoiserCS = m_DiffuseDenoiser;
            return(ddParams);
        }
Beispiel #5
0
        public void Init(HDRenderPipelineRayTracingResources rpRTResources, SharedRTManager sharedRTManager, HDRenderPipeline renderPipeline)
        {
            // Keep track of the resources
            m_TemporalFilterCS = rpRTResources.temporalFilterCS;

            // Keep track of the shared rt manager
            m_SharedRTManager = sharedRTManager;
            m_RenderPipeline  = renderPipeline;

            // Grab all the kernels we'll eventually need
            m_ValidateHistoryKernel            = m_TemporalFilterCS.FindKernel("ValidateHistory");
            m_TemporalAccumulationSingleKernel = m_TemporalFilterCS.FindKernel("TemporalAccumulationSingle");
            m_TemporalAccumulationColorKernel  = m_TemporalFilterCS.FindKernel("TemporalAccumulationColor");
            m_CopyHistorySingleKernel          = m_TemporalFilterCS.FindKernel("CopyHistorySingle");
            m_CopyHistoryColorKernel           = m_TemporalFilterCS.FindKernel("CopyHistoryColor");

            m_TemporalAccumulationSingleArrayKernel = m_TemporalFilterCS.FindKernel("TemporalAccumulationSingleArray");
            m_TemporalAccumulationColorArrayKernel  = m_TemporalFilterCS.FindKernel("TemporalAccumulationColorArray");

            m_CopyHistorySingleArrayKernel = m_TemporalFilterCS.FindKernel("CopyHistorySingleArray");
            m_CopyHistoryColorArrayKernel  = m_TemporalFilterCS.FindKernel("CopyHistoryColorArray");

            m_CopyHistorySingleArrayNoValidityKernel = m_TemporalFilterCS.FindKernel("CopyHistorySingleArrayNoValidity");
        }
Beispiel #6
0
        public void Initialize(RenderPipelineResources rpResources, HDRenderPipelineRayTracingResources rpRTResources, HDRaytracingManager raytracingManager, SharedRTManager sharedRTManager, HDRenderPipeline renderPipeline)
        {
            // Keep track of the external buffers
            m_RenderPipelineResources           = rpResources;
            m_RenderPipelineRayTracingResources = rpRTResources;
            m_RaytracingManager = raytracingManager;

            // Keep track of the render pipeline
            m_RenderPipeline = renderPipeline;

            // Keep track of the shader rt manager
            m_SharedRTManager = sharedRTManager;

            // Texture used to output debug information
            m_DebugLightClusterTexture = RTHandles.Alloc(Vector2.one, TextureXR.slices, dimension: TextureXR.dimension, filterMode: FilterMode.Point, colorFormat: GraphicsFormat.R16G16B16A16_SFloat, enableRandomWrite: true, useDynamicScale: true, useMipMap: false, name: "DebugLightClusterTexture");

            // Pre allocate the cluster with a dummy size
            m_LightCluster         = new ComputeBuffer(1, sizeof(uint));
            m_LightDataGPUArray    = new ComputeBuffer(1, System.Runtime.InteropServices.Marshal.SizeOf(typeof(LightData)));
            m_EnvLightDataGPUArray = new ComputeBuffer(1, System.Runtime.InteropServices.Marshal.SizeOf(typeof(EnvLightData)));

            // Create the material required for debug
            m_DebugMaterial = CoreUtils.CreateEngineMaterial(m_RenderPipelineRayTracingResources.lightClusterDebugS);
        }
Beispiel #7
0
        public SSGIDenoiserOutput Denoise(RenderGraph renderGraph, HDCamera hdCamera,
                                          TextureHandle depthPyramid, TextureHandle normalBuffer, TextureHandle motionVectorsBuffer, TextureHandle inputBuffer0, TextureHandle inputBuffer1,
                                          HDUtils.PackedMipChainInfo depthMipInfo, bool halfResolution = false, float historyValidity = 1.0f)
        {
            using (var builder = renderGraph.AddRenderPass <DenoiseSSGIPassData>("Denoise SSGI", out var passData, ProfilingSampler.Get(HDProfileId.SSGIDenoise)))
            {
                builder.EnableAsyncCompute(false);

                // Prepass buffers
                passData.depthTexture        = builder.ReadTexture(depthPyramid);
                passData.normalBuffer        = builder.ReadTexture(normalBuffer);
                passData.motionVectorsBuffer = builder.ReadTexture(motionVectorsBuffer);

                // History buffers
                bool     historyRequireClear     = false;
                RTHandle indirectDiffuseHistory0 = RequestIndirectDiffuseHistory0(hdCamera, out historyRequireClear);
                passData.indirectDiffuseHistory0 = builder.ReadWriteTexture(renderGraph.ImportTexture(indirectDiffuseHistory0));
                RTHandle indirectDiffuseHistory1 = RequestIndirectDiffuseHistory1(hdCamera, out historyRequireClear);
                passData.indirectDiffuseHistory1 = builder.ReadWriteTexture(renderGraph.ImportTexture(indirectDiffuseHistory1));
                var historyDepthBuffer = halfResolution ? hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.Depth1) : hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.Depth);
                passData.historyDepthBuffer = historyDepthBuffer != null?builder.ReadTexture(renderGraph.ImportTexture(historyDepthBuffer)) : renderGraph.defaultResources.blackTextureXR;

                // Input buffers
                passData.inputBuffer0 = builder.ReadTexture(inputBuffer0);
                passData.inputBuffer1 = builder.ReadTexture(inputBuffer1);

                // Output buffers
                passData.outputBuffer0 = builder.WriteTexture(renderGraph.CreateTexture(new TextureDesc(Vector2.one, true, true)
                {
                    colorFormat = GraphicsFormat.B10G11R11_UFloatPack32, enableRandomWrite = true, name = "SSGI Denoised 0"
                }));
                passData.outputBuffer1 = builder.WriteTexture(renderGraph.CreateTexture(new TextureDesc(Vector2.one, true, true)
                {
                    colorFormat = GraphicsFormat.R16G16_SFloat, enableRandomWrite = true, name = "SSGI Denoised 1"
                }));

                // Parameters
                var giSettings = hdCamera.volumeStack.GetComponent <UnityEngine.Rendering.HighDefinition.GlobalIllumination>();

                // Compute the dispatch parameters based on if we are half res or not
                int tileSize = 8;
                EvaluateDispatchParameters(hdCamera, halfResolution, tileSize, out passData.numTilesX, out passData.numTilesY, out passData.halfScreenSize);
                passData.firstMipOffset.Set(HDShadowUtils.Asfloat((uint)depthMipInfo.mipLevelOffsets[1].x), HDShadowUtils.Asfloat((uint)depthMipInfo.mipLevelOffsets[1].y));
                passData.historyValidity    = historyValidity;
                passData.viewCount          = hdCamera.viewCount;
                passData.pixelSpreadTangent = HDRenderPipeline.GetPixelSpreadTangent(hdCamera.camera.fieldOfView, hdCamera.actualWidth, hdCamera.actualHeight);

                // Denoising parameters
                passData.filterRadius      = giSettings.filterRadius;
                passData.halfResolution    = halfResolution;
                passData.historyValidity   = historyValidity;
                passData.historyNeedsClear = historyRequireClear;
                passData.exclusiveMode     = !hdCamera.frameSettings.IsEnabled(FrameSettingsField.ProbeVolume);

                // Compute shader
                passData.ssgiDenoiserCS = m_SSGIDenoiserCS;

                // Kernels
                passData.spatialFilterKernel  = halfResolution ? m_SpatialFilterHalfKernel : m_SpatialFilterKernel;
                passData.temporalFilterKernel = halfResolution ? m_TemporalFilterHalfKernel : m_TemporalFilterKernel;
                passData.copyHistory          = m_CopyHistory;

                builder.SetRenderFunc(
                    (DenoiseSSGIPassData data, RenderGraphContext ctx) =>
                {
                    int effectiveRadius = data.exclusiveMode ? data.filterRadius / 2 : data.filterRadius;
                    if (data.exclusiveMode)
                    {
                        // Horizontal Filter
                        SpatialFilter(ctx.cmd, data, effectiveRadius, new Vector2(1.0f, 0.0f), data.inputBuffer0, data.inputBuffer1, data.outputBuffer0, data.outputBuffer1);
                        // Vertical Filter
                        SpatialFilter(ctx.cmd, data, effectiveRadius, new Vector2(0.0f, 1.0f), data.outputBuffer0, data.outputBuffer1, data.inputBuffer0, data.inputBuffer1);
                    }

                    // Grab the history buffer
                    if (data.historyNeedsClear)
                    {
                        // clear it to black if this is the first pass to avoid nans
                        CoreUtils.SetRenderTarget(ctx.cmd, data.indirectDiffuseHistory0, ClearFlag.Color, Color.black);
                        CoreUtils.SetRenderTarget(ctx.cmd, data.indirectDiffuseHistory1, ClearFlag.Color, Color.black);
                    }

                    // Bind the input buffers
                    ctx.cmd.SetComputeTextureParam(data.ssgiDenoiserCS, data.temporalFilterKernel, HDShaderIDs._DepthTexture, data.depthTexture);
                    ctx.cmd.SetComputeTextureParam(data.ssgiDenoiserCS, data.temporalFilterKernel, HDShaderIDs._NormalBufferTexture, data.normalBuffer);
                    ctx.cmd.SetComputeTextureParam(data.ssgiDenoiserCS, data.temporalFilterKernel, HDShaderIDs._CameraMotionVectorsTexture, data.motionVectorsBuffer);
                    ctx.cmd.SetComputeFloatParam(data.ssgiDenoiserCS, HDShaderIDs._HistoryValidity, data.historyValidity);
                    if (data.halfResolution)
                    {
                        ctx.cmd.SetComputeTextureParam(data.ssgiDenoiserCS, data.temporalFilterKernel, HDShaderIDs._HistoryDepthTexture, data.historyDepthBuffer);
                        ctx.cmd.SetComputeVectorParam(data.ssgiDenoiserCS, HDShaderIDs._DepthPyramidFirstMipLevelOffset, data.firstMipOffset);
                    }
                    else
                    {
                        ctx.cmd.SetComputeTextureParam(data.ssgiDenoiserCS, data.temporalFilterKernel, HDShaderIDs._HistoryDepthTexture, data.historyDepthBuffer);
                    }
                    ctx.cmd.SetComputeTextureParam(data.ssgiDenoiserCS, data.temporalFilterKernel, HDShaderIDs._HistoryBuffer0, data.indirectDiffuseHistory0);
                    ctx.cmd.SetComputeTextureParam(data.ssgiDenoiserCS, data.temporalFilterKernel, HDShaderIDs._HistoryBuffer1, data.indirectDiffuseHistory1);
                    ctx.cmd.SetComputeTextureParam(data.ssgiDenoiserCS, data.temporalFilterKernel, HDShaderIDs._InputNoisyBuffer0, data.inputBuffer0);
                    ctx.cmd.SetComputeTextureParam(data.ssgiDenoiserCS, data.temporalFilterKernel, HDShaderIDs._InputNoisyBuffer1, data.inputBuffer1);

                    // Bind the output buffer
                    ctx.cmd.SetComputeTextureParam(data.ssgiDenoiserCS, data.temporalFilterKernel, HDShaderIDs._OutputFilteredBuffer0, data.outputBuffer0);
                    ctx.cmd.SetComputeTextureParam(data.ssgiDenoiserCS, data.temporalFilterKernel, HDShaderIDs._OutputFilteredBuffer1, data.outputBuffer1);

                    // Do the temporal pass
                    ctx.cmd.DispatchCompute(data.ssgiDenoiserCS, data.temporalFilterKernel, data.numTilesX, data.numTilesY, data.viewCount);

                    // Copy the new version into the history buffer
                    ctx.cmd.SetComputeTextureParam(data.ssgiDenoiserCS, data.copyHistory, HDShaderIDs._InputNoisyBuffer0, data.outputBuffer0);
                    ctx.cmd.SetComputeTextureParam(data.ssgiDenoiserCS, data.copyHistory, HDShaderIDs._InputNoisyBuffer1, data.outputBuffer1);
                    ctx.cmd.SetComputeTextureParam(data.ssgiDenoiserCS, data.copyHistory, HDShaderIDs._OutputFilteredBuffer0, data.indirectDiffuseHistory0);
                    ctx.cmd.SetComputeTextureParam(data.ssgiDenoiserCS, data.copyHistory, HDShaderIDs._OutputFilteredBuffer1, data.indirectDiffuseHistory1);
                    ctx.cmd.DispatchCompute(data.ssgiDenoiserCS, data.copyHistory, data.numTilesX, data.numTilesY, data.viewCount);

                    // Horizontal Filter
                    SpatialFilter(ctx.cmd, data, effectiveRadius, new Vector2(1.0f, 0.0f), data.outputBuffer0, data.outputBuffer1, data.inputBuffer0, data.inputBuffer1);
                    // Vertical Filter
                    SpatialFilter(ctx.cmd, data, effectiveRadius, new Vector2(0.0f, 1.0f), data.inputBuffer0, data.inputBuffer1, data.outputBuffer0, data.outputBuffer1);
                });

                SSGIDenoiserOutput denoiserOutput = new SSGIDenoiserOutput();
                denoiserOutput.outputBuffer0 = passData.outputBuffer0;
                denoiserOutput.outputBuffer1 = passData.outputBuffer1;
                return(denoiserOutput);
            }
        }
Beispiel #8
0
        public void Init(RenderPipelineResources rpResources, HDRenderPipelineRayTracingResources rpRTResources, SharedRTManager sharedRTManager, HDRenderPipeline renderPipeline)
        {
            // Keep track of the resources
            m_DiffuseDenoiser  = rpRTResources.diffuseDenoiserCS;
            m_OwenScrambleRGBA = rpResources.textures.owenScrambledRGBATex;

            // Keep track of the shared rt manager
            m_SharedRTManager = sharedRTManager;
            m_RenderPipeline  = renderPipeline;

            // Grab all the kernels we'll eventually need
            m_BilateralFilterSingleKernel = m_DiffuseDenoiser.FindKernel("BilateralFilterSingle");
            m_BilateralFilterColorKernel  = m_DiffuseDenoiser.FindKernel("BilateralFilterColor");
            m_GatherSingleKernel          = m_DiffuseDenoiser.FindKernel("GatherSingle");
            m_GatherColorKernel           = m_DiffuseDenoiser.FindKernel("GatherColor");
        }
Beispiel #9
0
        public TextureHandle Denoise(RenderGraph renderGraph, HDCamera hdCamera, DiffuseDenoiserParameters denoiserParams,
                                     TextureHandle noisyBuffer, TextureHandle depthBuffer, TextureHandle normalBuffer, TextureHandle outputBuffer)
        {
            using (var builder = renderGraph.AddRenderPass <DiffuseDenoiserPassData>("DiffuseDenoiser", out var passData, ProfilingSampler.Get(HDProfileId.DiffuseFilter)))
            {
                // Cannot run in async
                builder.EnableAsyncCompute(false);

                // Initialization data
                passData.needInit             = !m_DenoiserInitialized;
                m_DenoiserInitialized         = true;
                passData.owenScrambledTexture = m_OwnenScrambledTexture;

                // Camera parameters
                if (denoiserParams.fullResolutionInput)
                {
                    passData.texWidth  = hdCamera.actualWidth;
                    passData.texHeight = hdCamera.actualHeight;
                }
                else
                {
                    passData.texWidth  = hdCamera.actualWidth / 2;
                    passData.texHeight = hdCamera.actualHeight / 2;
                }
                passData.viewCount = hdCamera.viewCount;

                // Parameters
                passData.pixelSpreadTangent   = HDRenderPipeline.GetPixelSpreadTangent(hdCamera.camera.fieldOfView, passData.texWidth, passData.texHeight);
                passData.kernelSize           = denoiserParams.kernelSize;
                passData.halfResolutionFilter = denoiserParams.halfResolutionFilter;
                passData.jitterFilter         = denoiserParams.jitterFilter;
                passData.frameIndex           = HDRenderPipeline.RayTracingFrameIndex(hdCamera);
                passData.fullResolutionInput  = denoiserParams.fullResolutionInput;

                // Kernels
                passData.bilateralFilterKernel = denoiserParams.singleChannel ? m_BilateralFilterSingleKernel : m_BilateralFilterColorKernel;
                passData.gatherKernel          = denoiserParams.singleChannel ? m_GatherSingleKernel : m_GatherColorKernel;

                // Other parameters
                passData.diffuseDenoiserCS = m_DiffuseDenoiser;

                passData.pointDistribution  = builder.ReadComputeBuffer(renderGraph.ImportComputeBuffer(m_PointDistribution));
                passData.depthStencilBuffer = builder.ReadTexture(depthBuffer);
                passData.normalBuffer       = builder.ReadTexture(normalBuffer);
                passData.noisyBuffer        = builder.ReadTexture(noisyBuffer);
                passData.intermediateBuffer = builder.CreateTransientTexture(new TextureDesc(Vector2.one, true, true)
                {
                    colorFormat = GraphicsFormat.B10G11R11_UFloatPack32, enableRandomWrite = true, name = "DiffuseDenoiserIntermediate"
                });
                passData.outputBuffer = builder.WriteTexture(outputBuffer);

                builder.SetRenderFunc(
                    (DiffuseDenoiserPassData data, RenderGraphContext ctx) =>
                {
                    // Generate the point distribution if needed (this is only ran once)
                    if (passData.needInit)
                    {
                        int m_GeneratePointDistributionKernel = data.diffuseDenoiserCS.FindKernel("GeneratePointDistribution");
                        ctx.cmd.SetComputeTextureParam(data.diffuseDenoiserCS, m_GeneratePointDistributionKernel, HDShaderIDs._OwenScrambledRGTexture, data.owenScrambledTexture);
                        ctx.cmd.SetComputeBufferParam(data.diffuseDenoiserCS, m_GeneratePointDistributionKernel, "_PointDistributionRW", data.pointDistribution);
                        ctx.cmd.DispatchCompute(data.diffuseDenoiserCS, m_GeneratePointDistributionKernel, 1, 1, 1);
                    }

                    // Evaluate the dispatch parameters
                    int areaTileSize = 8;
                    int numTilesX    = (data.texWidth + (areaTileSize - 1)) / areaTileSize;
                    int numTilesY    = (data.texHeight + (areaTileSize - 1)) / areaTileSize;

                    // Request the intermediate buffers that we need
                    ctx.cmd.SetComputeFloatParam(data.diffuseDenoiserCS, HDShaderIDs._DenoiserFilterRadius, data.kernelSize);
                    ctx.cmd.SetComputeBufferParam(data.diffuseDenoiserCS, data.bilateralFilterKernel, HDShaderIDs._PointDistribution, data.pointDistribution);
                    ctx.cmd.SetComputeTextureParam(data.diffuseDenoiserCS, data.bilateralFilterKernel, HDShaderIDs._DenoiseInputTexture, data.noisyBuffer);
                    ctx.cmd.SetComputeTextureParam(data.diffuseDenoiserCS, data.bilateralFilterKernel, HDShaderIDs._DepthTexture, data.depthStencilBuffer);
                    ctx.cmd.SetComputeTextureParam(data.diffuseDenoiserCS, data.bilateralFilterKernel, HDShaderIDs._NormalBufferTexture, data.normalBuffer);
                    ctx.cmd.SetComputeTextureParam(data.diffuseDenoiserCS, data.bilateralFilterKernel, HDShaderIDs._DenoiseOutputTextureRW, data.halfResolutionFilter ? data.intermediateBuffer : data.outputBuffer);
                    ctx.cmd.SetComputeIntParam(data.diffuseDenoiserCS, HDShaderIDs._HalfResolutionFilter, data.halfResolutionFilter ? 1 : 0);
                    ctx.cmd.SetComputeFloatParam(data.diffuseDenoiserCS, HDShaderIDs._PixelSpreadAngleTangent, data.pixelSpreadTangent);
                    if (data.jitterFilter)
                    {
                        ctx.cmd.SetComputeIntParam(data.diffuseDenoiserCS, HDShaderIDs._JitterFramePeriod, (data.frameIndex % 4));
                    }
                    else
                    {
                        ctx.cmd.SetComputeIntParam(data.diffuseDenoiserCS, HDShaderIDs._JitterFramePeriod, -1);
                    }

                    CoreUtils.SetKeyword(ctx.cmd, "FULL_RESOLUTION_INPUT", data.fullResolutionInput);
                    ctx.cmd.DispatchCompute(data.diffuseDenoiserCS, data.bilateralFilterKernel, numTilesX, numTilesY, data.viewCount);

                    if (data.halfResolutionFilter)
                    {
                        ctx.cmd.SetComputeTextureParam(data.diffuseDenoiserCS, data.gatherKernel, HDShaderIDs._DenoiseInputTexture, data.intermediateBuffer);
                        ctx.cmd.SetComputeTextureParam(data.diffuseDenoiserCS, data.gatherKernel, HDShaderIDs._DepthTexture, data.depthStencilBuffer);
                        ctx.cmd.SetComputeTextureParam(data.diffuseDenoiserCS, data.gatherKernel, HDShaderIDs._DenoiseOutputTextureRW, data.outputBuffer);
                        ctx.cmd.DispatchCompute(data.diffuseDenoiserCS, data.gatherKernel, numTilesX, numTilesY, data.viewCount);
                    }
                    CoreUtils.SetKeyword(ctx.cmd, "FULL_RESOLUTION_INPUT", false);
                });
                return(passData.outputBuffer);
            }
        }
        public void Init(HDRenderPipelineRayTracingResources rpRTResources, SharedRTManager sharedRTManager, HDRenderPipeline renderPipeline)
        {
            m_ReflectionDenoiserCS    = rpRTResources.reflectionDenoiserCS;
            m_ReflectionFilterMapping = rpRTResources.reflectionFilterMapping;
            m_SharedRTManager         = sharedRTManager;
            m_RenderPipeline          = renderPipeline;

            // Fetch all the kernels we shall be using
            s_TemporalAccumulationKernel = m_ReflectionDenoiserCS.FindKernel("TemporalAccumulation");
            s_CopyHistoryKernel          = m_ReflectionDenoiserCS.FindKernel("CopyHistory");
            s_BilateralFilterHKernel     = m_ReflectionDenoiserCS.FindKernel("BilateralFilterH");
            s_BilateralFilterVKernel     = m_ReflectionDenoiserCS.FindKernel("BilateralFilterV");
        }
        public TemporalDenoiserArrayOutputData DenoiseBuffer(RenderGraph renderGraph, HDCamera hdCamera,
                                                             TextureHandle depthBuffer, TextureHandle normalBuffer, TextureHandle motionVectorBuffer, TextureHandle historyValidationBuffer,
                                                             TextureHandle noisyBuffer, RTHandle historyBuffer,
                                                             TextureHandle distanceBuffer, RTHandle distanceHistorySignal,
                                                             TextureHandle velocityBuffer,
                                                             RTHandle validationHistoryBuffer,
                                                             int sliceIndex, Vector4 channelMask, Vector4 distanceChannelMask,
                                                             bool distanceBased, bool singleChannel, float historyValidity)
        {
            TemporalDenoiserArrayOutputData resultData = new TemporalDenoiserArrayOutputData();

            using (var builder = renderGraph.AddRenderPass <TemporalFilterArrayPassData>("TemporalDenoiser", out var passData, ProfilingSampler.Get(HDProfileId.TemporalFilter)))
            {
                // Cannot run in async
                builder.EnableAsyncCompute(false);

                // Set the camera parameters
                passData.texWidth  = hdCamera.actualWidth;
                passData.texHeight = hdCamera.actualHeight;
                passData.viewCount = hdCamera.viewCount;

                // Denoising parameters
                passData.distanceBasedDenoiser = distanceBased;
                passData.historyValidity       = historyValidity;
                passData.pixelSpreadTangent    = HDRenderPipeline.GetPixelSpreadTangent(hdCamera.camera.fieldOfView, hdCamera.actualWidth, hdCamera.actualHeight);
                passData.sliceIndex            = sliceIndex;
                passData.channelMask           = channelMask;
                passData.distanceChannelMask   = distanceChannelMask;

                // Kernels
                passData.temporalAccKernel            = singleChannel ? m_TemporalAccumulationSingleArrayKernel : m_TemporalAccumulationColorArrayKernel;
                passData.blendHistoryKernel           = singleChannel ? m_BlendHistorySingleArrayKernel : m_BlendHistoryColorArrayKernel;
                passData.temporalAccSingleKernel      = m_TemporalAccumulationSingleArrayKernel;
                passData.blendHistoryNoValidityKernel = m_BlendHistorySingleArrayNoValidityKernel;
                passData.outputHistoryKernel          = m_OutputHistoryArrayKernel;

                // Other parameters
                passData.temporalFilterCS = m_TemporalFilterCS;

                // Input buffers
                passData.depthStencilBuffer = builder.ReadTexture(depthBuffer);
                passData.normalBuffer       = builder.ReadTexture(normalBuffer);
                passData.motionVectorBuffer = builder.ReadTexture(motionVectorBuffer);

                passData.velocityBuffer   = builder.ReadTexture(velocityBuffer);
                passData.noisyBuffer      = builder.ReadTexture(noisyBuffer);
                passData.distanceBuffer   = distanceBased ? builder.ReadTexture(distanceBuffer) : renderGraph.defaultResources.blackTextureXR;
                passData.validationBuffer = builder.ReadTexture(historyValidationBuffer);

                // History buffers
                passData.historyBuffer           = builder.ReadWriteTexture(renderGraph.ImportTexture(historyBuffer));
                passData.validationHistoryBuffer = builder.ReadWriteTexture(renderGraph.ImportTexture(validationHistoryBuffer));
                passData.distanceHistorySignal   = distanceBased ? builder.ReadWriteTexture(renderGraph.ImportTexture(distanceHistorySignal)) : renderGraph.defaultResources.blackTextureXR;

                // Intermediate buffers
                passData.intermediateSignalOutput = builder.CreateTransientTexture(new TextureDesc(Vector2.one, true, true)
                {
                    colorFormat = GraphicsFormat.R16G16B16A16_SFloat, enableRandomWrite = true, name = "Intermediate Filter Output"
                });
                passData.intermediateValidityOutput = builder.CreateTransientTexture(new TextureDesc(Vector2.one, true, true)
                {
                    colorFormat = GraphicsFormat.R16G16B16A16_SFloat, enableRandomWrite = true, name = "Intermediate Validity output"
                });

                // Output textures
                passData.outputBuffer = builder.ReadWriteTexture(renderGraph.CreateTexture(new TextureDesc(Vector2.one, true, true)
                {
                    colorFormat = GraphicsFormat.R16G16B16A16_SFloat, enableRandomWrite = true, name = "Temporal Filter Output"
                }));
                passData.outputDistanceSignal = distanceBased ? builder.ReadWriteTexture(renderGraph.CreateTexture(new TextureDesc(Vector2.one, true, true)
                {
                    colorFormat = GraphicsFormat.R16G16B16A16_SFloat, enableRandomWrite = true, name = "Temporal Filter Distance output"
                })) : new TextureHandle();


                builder.SetRenderFunc(
                    (TemporalFilterArrayPassData data, RenderGraphContext ctx) =>
                {
                    // Evaluate the dispatch parameters
                    int tfTileSize = 8;
                    int numTilesX  = (data.texWidth + (tfTileSize - 1)) / tfTileSize;
                    int numTilesY  = (data.texHeight + (tfTileSize - 1)) / tfTileSize;

                    // Now that we have validated our history, let's accumulate
                    ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.temporalAccKernel, HDShaderIDs._DenoiseInputTexture, data.noisyBuffer);
                    ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.temporalAccKernel, HDShaderIDs._HistoryBuffer, data.historyBuffer);
                    ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.temporalAccKernel, HDShaderIDs._HistoryValidityBuffer, data.validationHistoryBuffer);
                    ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.temporalAccKernel, HDShaderIDs._DepthTexture, data.depthStencilBuffer);
                    ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.temporalAccKernel, HDShaderIDs._CameraMotionVectorsTexture, data.motionVectorBuffer);
                    ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.temporalAccKernel, HDShaderIDs._ValidationBuffer, data.validationBuffer);
                    ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.temporalAccKernel, HDShaderIDs._VelocityBuffer, data.velocityBuffer);

                    // Bind the constants
                    ctx.cmd.SetComputeIntParam(data.temporalFilterCS, HDShaderIDs._DenoisingHistorySlice, data.sliceIndex);
                    ctx.cmd.SetComputeVectorParam(data.temporalFilterCS, HDShaderIDs._DenoisingHistoryMask, data.channelMask);
                    ctx.cmd.SetComputeFloatParam(data.temporalFilterCS, HDShaderIDs._HistoryValidity, data.historyValidity);
                    ctx.cmd.SetComputeIntParam(data.temporalFilterCS, HDShaderIDs._ReceiverMotionRejection, 1);
                    ctx.cmd.SetComputeIntParam(data.temporalFilterCS, HDShaderIDs._OccluderMotionRejection, 1);

                    // Bind the output buffer
                    ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.temporalAccKernel, HDShaderIDs._AccumulationOutputTextureRW, data.outputBuffer);

                    // Combine with the history
                    ctx.cmd.DispatchCompute(data.temporalFilterCS, data.temporalAccKernel, numTilesX, numTilesY, data.viewCount);

                    // Combine with the history buffer
                    ctx.cmd.SetComputeIntParam(data.temporalFilterCS, HDShaderIDs._DenoisingHistorySlice, data.sliceIndex);
                    ctx.cmd.SetComputeVectorParam(data.temporalFilterCS, HDShaderIDs._DenoisingHistoryMask, data.channelMask);
                    ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.blendHistoryKernel, HDShaderIDs._DenoiseInputTexture, data.outputBuffer);
                    ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.blendHistoryKernel, HDShaderIDs._DenoiseInputArrayTexture, data.historyBuffer);
                    ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.blendHistoryKernel, HDShaderIDs._ValidityInputArrayTexture, data.validationHistoryBuffer);
                    ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.blendHistoryKernel, HDShaderIDs._IntermediateDenoiseOutputTextureRW, data.intermediateSignalOutput);
                    ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.blendHistoryKernel, HDShaderIDs._IntermediateValidityOutputTextureRW, data.intermediateValidityOutput);
                    ctx.cmd.DispatchCompute(data.temporalFilterCS, data.blendHistoryKernel, numTilesX, numTilesY, data.viewCount);

                    // Output the combination to the history buffer
                    ctx.cmd.SetComputeIntParam(data.temporalFilterCS, HDShaderIDs._DenoisingHistorySlice, data.sliceIndex);
                    ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.outputHistoryKernel, HDShaderIDs._IntermediateDenoiseOutputTexture, data.intermediateSignalOutput);
                    ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.outputHistoryKernel, HDShaderIDs._IntermediateValidityOutputTexture, data.intermediateValidityOutput);
                    ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.outputHistoryKernel, HDShaderIDs._DenoiseOutputArrayTextureRW, data.historyBuffer);
                    ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.outputHistoryKernel, HDShaderIDs._ValidityOutputTextureRW, data.validationHistoryBuffer);
                    ctx.cmd.DispatchCompute(data.temporalFilterCS, data.outputHistoryKernel, numTilesX, numTilesY, data.viewCount);

                    if (data.distanceBasedDenoiser)
                    {
                        // Bind the input buffers
                        ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.temporalAccSingleKernel, HDShaderIDs._DenoiseInputTexture, data.distanceBuffer);
                        ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.temporalAccSingleKernel, HDShaderIDs._HistoryBuffer, data.distanceHistorySignal);
                        ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.temporalAccSingleKernel, HDShaderIDs._HistoryValidityBuffer, data.validationHistoryBuffer);
                        ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.temporalAccSingleKernel, HDShaderIDs._DepthTexture, data.depthStencilBuffer);
                        ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.temporalAccSingleKernel, HDShaderIDs._ValidationBuffer, data.validationBuffer);
                        ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.temporalAccSingleKernel, HDShaderIDs._VelocityBuffer, data.velocityBuffer);
                        ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.temporalAccSingleKernel, HDShaderIDs._CameraMotionVectorsTexture, data.motionVectorBuffer);

                        // Bind the constant inputs
                        ctx.cmd.SetComputeIntParam(data.temporalFilterCS, HDShaderIDs._DenoisingHistorySlice, data.sliceIndex);
                        ctx.cmd.SetComputeVectorParam(data.temporalFilterCS, HDShaderIDs._DenoisingHistoryMask, data.distanceChannelMask);

                        // Bind the output buffers
                        ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.temporalAccSingleKernel, HDShaderIDs._AccumulationOutputTextureRW, data.outputDistanceSignal);

                        // Dispatch the temporal accumulation
                        ctx.cmd.DispatchCompute(data.temporalFilterCS, data.temporalAccSingleKernel, numTilesX, numTilesY, data.viewCount);

                        // Make sure to copy the new-accumulated signal in our history buffer
                        ctx.cmd.SetComputeIntParam(data.temporalFilterCS, HDShaderIDs._DenoisingHistorySlice, data.sliceIndex);
                        ctx.cmd.SetComputeVectorParam(data.temporalFilterCS, HDShaderIDs._DenoisingHistoryMask, data.distanceChannelMask);
                        ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.blendHistoryNoValidityKernel, HDShaderIDs._DenoiseInputTexture, data.outputDistanceSignal);
                        ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.blendHistoryNoValidityKernel, HDShaderIDs._DenoiseInputArrayTexture, data.distanceHistorySignal);
                        ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.blendHistoryNoValidityKernel, HDShaderIDs._IntermediateDenoiseOutputTextureRW, data.intermediateSignalOutput);
                        ctx.cmd.DispatchCompute(data.temporalFilterCS, data.blendHistoryNoValidityKernel, numTilesX, numTilesY, data.viewCount);

                        // Output the combination to the history buffer
                        ctx.cmd.SetComputeIntParam(data.temporalFilterCS, HDShaderIDs._DenoisingHistorySlice, data.sliceIndex);
                        ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.outputHistoryKernel, HDShaderIDs._IntermediateDenoiseOutputTexture, data.intermediateSignalOutput);
                        ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.outputHistoryKernel, HDShaderIDs._DenoiseOutputArrayTextureRW, data.intermediateSignalOutput);
                        ctx.cmd.DispatchCompute(data.temporalFilterCS, data.outputHistoryKernel, numTilesX, numTilesY, data.viewCount);
                    }
                });

                resultData.outputSignal         = passData.outputBuffer;
                resultData.outputSignalDistance = passData.outputDistanceSignal;
            }
            return(resultData);
        }
Beispiel #12
0
 internal void InitRaytracing(HDRenderPipeline renderPipeline)
 {
     m_RaytracingAmbientOcclusion.Init(renderPipeline);
 }
Beispiel #13
0
        // Denoiser variant when history is stored in an array and the validation buffer is seperate
        public void DenoiseBuffer(CommandBuffer cmd, HDCamera hdCamera,
                                  RTHandle noisySignal, RTHandle historySignal,
                                  RTHandle validationHistory,
                                  RTHandle velocityBuffer,
                                  RTHandle outputSignal,
                                  int sliceIndex, Vector4 channelMask,
                                  RTHandle distanceSignal, RTHandle distanceHistorySignal, RTHandle outputDistanceSignal, Vector4 distanceChannelMask,
                                  bool singleChannel = true, float historyValidity = 1.0f)
        {
            // If we do not have a depth and normal history buffers, we can skip right away
            var historyDepthBuffer  = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.Depth);
            var historyNormalBuffer = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.Normal);

            if (historyDepthBuffer == null || historyNormalBuffer == null)
            {
                HDUtils.BlitCameraTexture(cmd, noisySignal, historySignal);
                HDUtils.BlitCameraTexture(cmd, noisySignal, outputSignal);
                if (distanceSignal != null && distanceHistorySignal != null && outputDistanceSignal != null)
                {
                    HDUtils.BlitCameraTexture(cmd, distanceSignal, distanceHistorySignal);
                    HDUtils.BlitCameraTexture(cmd, distanceSignal, outputDistanceSignal);
                }
                return;
            }

            // Fetch texture dimensions
            int texWidth  = hdCamera.actualWidth;
            int texHeight = hdCamera.actualHeight;

            // Evaluate the dispatch parameters
            int areaTileSize = 8;
            int numTilesX    = (texWidth + (areaTileSize - 1)) / areaTileSize;
            int numTilesY    = (texHeight + (areaTileSize - 1)) / areaTileSize;

            // Request the intermediate buffer we need
            RTHandle validationBuffer = m_RenderPipeline.GetRayTracingBuffer(InternalRayTracingBuffers.R0);

            // First of all we need to validate the history to know where we can or cannot use the history signal
            int m_KernelFilter = m_TemporalFilterCS.FindKernel("ValidateHistory");

            cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthStencilBuffer());
            cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._HistoryDepthTexture, historyDepthBuffer);
            cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._NormalBufferTexture, m_SharedRTManager.GetNormalBuffer());
            cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._HistoryNormalBufferTexture, historyNormalBuffer);
            cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._ValidationBufferRW, validationBuffer);
            cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._VelocityBuffer, velocityBuffer);
            cmd.SetComputeFloatParam(m_TemporalFilterCS, HDShaderIDs._HistoryValidity, historyValidity);
            cmd.SetComputeFloatParam(m_TemporalFilterCS, HDShaderIDs._PixelSpreadAngleTangent, HDRenderPipeline.GetPixelSpreadTangent(hdCamera.camera.fieldOfView, hdCamera.actualWidth, hdCamera.actualHeight));
            cmd.DispatchCompute(m_TemporalFilterCS, m_KernelFilter, numTilesX, numTilesY, hdCamera.viewCount);

            // Now that we have validated our history, let's accumulate
            m_KernelFilter = m_TemporalFilterCS.FindKernel(singleChannel ? "TemporalAccumulationSingleArray" : "TemporalAccumulationColorArray");
            cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._DenoiseInputTexture, noisySignal);
            cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._HistoryBuffer, historySignal);
            cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._HistoryValidityBuffer, validationHistory);
            cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthStencilBuffer());
            cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._DenoiseOutputTextureRW, outputSignal);
            cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._ValidationBuffer, validationBuffer);
            cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._VelocityBuffer, velocityBuffer);
            cmd.SetComputeIntParam(m_TemporalFilterCS, HDShaderIDs._DenoisingHistorySlice, sliceIndex);
            cmd.SetComputeVectorParam(m_TemporalFilterCS, HDShaderIDs._DenoisingHistoryMask, channelMask);
            cmd.DispatchCompute(m_TemporalFilterCS, m_KernelFilter, numTilesX, numTilesY, hdCamera.viewCount);

            // Make sure to copy the new-accumulated signal in our history buffer
            m_KernelFilter = m_TemporalFilterCS.FindKernel(singleChannel ? "CopyHistorySingleArray" : "CopyHistoryColorArray");
            cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._DenoiseInputTexture, outputSignal);
            cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._DenoiseOutputTextureRW, historySignal);
            cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._ValidityOutputTextureRW, validationHistory);
            cmd.SetComputeIntParam(m_TemporalFilterCS, HDShaderIDs._DenoisingHistorySlice, sliceIndex);
            cmd.SetComputeVectorParam(m_TemporalFilterCS, HDShaderIDs._DenoisingHistoryMask, channelMask);
            cmd.DispatchCompute(m_TemporalFilterCS, m_KernelFilter, numTilesX, numTilesY, hdCamera.viewCount);

            if (distanceSignal != null && distanceHistorySignal != null && outputDistanceSignal != null)
            {
                // Now that we have validated our history, let's accumulate
                m_KernelFilter = m_TemporalFilterCS.FindKernel("TemporalAccumulationSingleArray");

                // Bind the intput buffers
                cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._DenoiseInputTexture, distanceSignal);
                cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._HistoryBuffer, distanceHistorySignal);
                cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._HistoryValidityBuffer, validationHistory);
                cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthStencilBuffer());
                cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._ValidationBuffer, validationBuffer);
                cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._VelocityBuffer, velocityBuffer);

                // Bind the constant inputs
                cmd.SetComputeIntParam(m_TemporalFilterCS, HDShaderIDs._DenoisingHistorySlice, sliceIndex);
                cmd.SetComputeVectorParam(m_TemporalFilterCS, HDShaderIDs._DenoisingHistoryMask, distanceChannelMask);

                // Bind the output buffers
                cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._DenoiseOutputTextureRW, outputDistanceSignal);

                // Dispatch the temporal accumulation
                cmd.DispatchCompute(m_TemporalFilterCS, m_KernelFilter, numTilesX, numTilesY, hdCamera.viewCount);

                // Make sure to copy the new-accumulated signal in our history buffer
                m_KernelFilter = m_TemporalFilterCS.FindKernel("CopyHistorySingleArrayNoValidity");
                cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._DenoiseInputTexture, outputDistanceSignal);
                cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._DenoiseOutputTextureRW, distanceHistorySignal);
                cmd.SetComputeIntParam(m_TemporalFilterCS, HDShaderIDs._DenoisingHistorySlice, sliceIndex);
                cmd.SetComputeVectorParam(m_TemporalFilterCS, HDShaderIDs._DenoisingHistoryMask, distanceChannelMask);
                cmd.DispatchCompute(m_TemporalFilterCS, m_KernelFilter, numTilesX, numTilesY, hdCamera.viewCount);
            }
        }
        void RaytracingRecursiveRender(HDCamera hdCamera, CommandBuffer cmd, ScriptableRenderContext renderContext, CullingResults cull)
        {
            if (!hdCamera.frameSettings.IsEnabled(FrameSettingsField.RayTracing))
            {
                return;
            }

            RecursiveRendering recursiveSettings = hdCamera.volumeStack.GetComponent <RecursiveRendering>();

            if (!recursiveSettings.enable.value)
            {
                return;
            }

            // Recursive rendering works as follow:
            // - Shader have a _RayTracing property
            // When this property is setup to true, a RayTracingPrepass pass on the material is enabled (otherwise it is disabled)
            // - Before prepass we render all object with a RayTracingPrepass pass enabled into the depth buffer for performance saving.
            // Note that we will exclude from the rendering of DepthPrepass, GBuffer and Forward pass the raytraced objects but not from
            // motion vector pass, so we can still benefit from motion vector. This is handled in VertMesh.hlsl (see below).
            // However currently when rendering motion vector this will tag the stencil for deferred lighting, and thus could produce overshading.
            // - After Transparent Depth pass we render all object with a RayTracingPrepass pass enabled into output a mask buffer (need to depth test but not to write depth)
            // Note: we render two times: one to save performance and the other to write the mask, otherwise if we write the mask in the first pass it
            // will not take into account the objects which could render on top of the raytracing one (If we want to do that we need to perform the pass after that
            // the depth buffer is ready, which is after the Gbuffer pass, so we can't save performance).
            // - During RaytracingRecursiveRender we perform a RayTracingRendering.raytrace call on all pixel tag in the mask
            // It is require to exclude mesh from regular pass to save performance (for opaque) and get correct result (for transparent)
            // For this we cull the mesh by setuping their position to NaN if _RayTracing is true and _EnableRecursiveRayTracing true.
            // We use this method to avoid to have to deal with RenderQueue and it allow to dynamically disabled Recursive rendering
            // and fallback to classic rasterize transparent this way. The code for the culling is in VertMesh()
            // If raytracing is disable _EnableRecursiveRayTracing is set to false and no culling happen.
            // Objects are still render in shadow and motion vector pass to keep their properties.

            // We render Recursive render object before transparent, so transparent object can be overlayed on top
            // like lens flare on top of headlight. We write the depth, so it correctly z-test object behind as recursive rendering
            // re-render everything (Mean we should also support fog and sky into it).

            using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.RayTracingRecursiveRendering)))
            {
                RayTracingShader   forwardShader        = m_Asset.renderPipelineRayTracingResources.forwardRaytracing;
                LightCluster       lightClusterSettings = hdCamera.volumeStack.GetComponent <LightCluster>();
                RayTracingSettings rtSettings           = hdCamera.volumeStack.GetComponent <RayTracingSettings>();

                // Grab the acceleration structure and the list of HD lights for the target camera
                RayTracingAccelerationStructure accelerationStructure = RequestAccelerationStructure();
                HDRaytracingLightCluster        lightCluster          = RequestLightCluster();

                // Define the shader pass to use for the reflection pass
                cmd.SetRayTracingShaderPass(forwardShader, "ForwardDXR");

                // Set the acceleration structure for the pass
                cmd.SetRayTracingAccelerationStructure(forwardShader, HDShaderIDs._RaytracingAccelerationStructureName, accelerationStructure);

                // Inject the ray-tracing sampling data
                cmd.SetRayTracingTextureParam(forwardShader, HDShaderIDs._OwenScrambledTexture, m_Asset.renderPipelineResources.textures.owenScrambledRGBATex);
                cmd.SetRayTracingTextureParam(forwardShader, HDShaderIDs._ScramblingTexture, m_Asset.renderPipelineResources.textures.scramblingTex);

                // Inject the ray generation data
                cmd.SetGlobalFloat(HDShaderIDs._RaytracingRayBias, rtSettings.rayBias.value);
                cmd.SetGlobalFloat(HDShaderIDs._RaytracingRayMaxLength, recursiveSettings.rayLength.value);
                cmd.SetGlobalFloat(HDShaderIDs._RaytracingMaxRecursion, recursiveSettings.maxDepth.value);
                cmd.SetGlobalFloat(HDShaderIDs._RaytracingCameraNearPlane, hdCamera.camera.nearClipPlane);

                // Set the data for the ray generation

                // Fecth the temporary buffers we shall be using
                RTHandle flagBuffer = GetRayTracingBuffer(InternalRayTracingBuffers.R0);
                cmd.SetRayTracingTextureParam(forwardShader, HDShaderIDs._RaytracingFlagMask, flagBuffer);
                cmd.SetRayTracingTextureParam(forwardShader, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthStencilBuffer());
                cmd.SetRayTracingTextureParam(forwardShader, HDShaderIDs._CameraColorTextureRW, m_CameraColorBuffer);

                // Set ray count texture
                RayCountManager rayCountManager = GetRayCountManager();
                cmd.SetGlobalInt(HDShaderIDs._RayCountEnabled, rayCountManager.RayCountIsEnabled());
                cmd.SetRayTracingTextureParam(forwardShader, HDShaderIDs._RayCountTexture, rayCountManager.GetRayCountTexture());

                // Compute an approximate pixel spread angle value (in radians)
                cmd.SetGlobalFloat(HDShaderIDs._RaytracingPixelSpreadAngle, GetPixelSpreadAngle(hdCamera.camera.fieldOfView, hdCamera.actualWidth, hdCamera.actualHeight));

                // LightLoop data
                lightCluster.BindLightClusterData(cmd);

                // Note: Just in case, we rebind the directional light data (in case they were not)
                cmd.SetGlobalBuffer(HDShaderIDs._DirectionalLightDatas, m_LightLoopLightData.directionalLightData);

                // Set the data for the ray miss
                cmd.SetRayTracingTextureParam(forwardShader, HDShaderIDs._SkyTexture, m_SkyManager.GetSkyReflection(hdCamera));

                // If this is the right debug mode and we have at least one light, write the first shadow to the de-noised texture
                RTHandle debugBuffer = GetRayTracingBuffer(InternalRayTracingBuffers.RGBA0);
                cmd.SetRayTracingTextureParam(forwardShader, HDShaderIDs._RaytracingPrimaryDebug, debugBuffer);

                // Run the computation
                cmd.DispatchRays(forwardShader, m_RayGenShaderName, (uint)hdCamera.actualWidth, (uint)hdCamera.actualHeight, (uint)hdCamera.viewCount);

                HDRenderPipeline hdrp = (RenderPipelineManager.currentPipeline as HDRenderPipeline);
                hdrp.PushFullScreenDebugTexture(hdCamera, cmd, debugBuffer, FullScreenDebugMode.RecursiveRayTracing);
            }
        }
Beispiel #15
0
        public void RaytracingRecursiveRender(HDCamera hdCamera, CommandBuffer cmd, ScriptableRenderContext renderContext, CullingResults cull)
        {
            // First thing to check is: Do we have a valid ray-tracing environment?
            HDRaytracingEnvironment rtEnvironment     = m_RayTracingManager.CurrentEnvironment();
            RecursiveRendering      recursiveSettings = VolumeManager.instance.stack.GetComponent <RecursiveRendering>();

            // Check the validity of the state before computing the effect
            bool invalidState = !hdCamera.frameSettings.IsEnabled(FrameSettingsField.RayTracing) ||
                                rtEnvironment == null ||
                                !recursiveSettings.enable.value ||
                                m_Asset.currentPlatformRenderPipelineSettings.supportedRaytracingTier == RenderPipelineSettings.RaytracingTier.Tier1;

            // If any resource or game-object is missing We stop right away
            if (invalidState)
            {
                return;
            }

            HDRenderPipeline renderPipeline       = m_RayTracingManager.GetRenderPipeline();
            RayTracingShader forwardShader        = m_Asset.renderPipelineRayTracingResources.forwardRaytracing;
            Shader           raytracingMask       = m_Asset.renderPipelineRayTracingResources.raytracingFlagMask;
            LightCluster     lightClusterSettings = VolumeManager.instance.stack.GetComponent <LightCluster>();

            // Grab the acceleration structure and the list of HD lights for the target camera
            RayTracingAccelerationStructure accelerationStructure = m_RayTracingManager.RequestAccelerationStructure(rtEnvironment.raytracedLayerMask);
            HDRaytracingLightCluster        lightCluster          = m_RayTracingManager.RequestLightCluster(rtEnvironment.raytracedLayerMask);

            if (m_RaytracingFlagMaterial == null)
            {
                m_RaytracingFlagMaterial = CoreUtils.CreateEngineMaterial(raytracingMask);
            }

            // Before going into raytracing, we need to flag which pixels needs to be raytracing
            EvaluateRaytracingMask(cull, hdCamera, cmd, renderContext);

            // Define the shader pass to use for the reflection pass
            cmd.SetRayTracingShaderPass(forwardShader, "ForwardDXR");

            // Set the acceleration structure for the pass
            cmd.SetRayTracingAccelerationStructure(forwardShader, HDShaderIDs._RaytracingAccelerationStructureName, accelerationStructure);

            // Inject the ray-tracing sampling data
            cmd.SetRayTracingTextureParam(forwardShader, HDShaderIDs._OwenScrambledTexture, m_Asset.renderPipelineResources.textures.owenScrambledRGBATex);
            cmd.SetRayTracingTextureParam(forwardShader, HDShaderIDs._ScramblingTexture, m_Asset.renderPipelineResources.textures.scramblingTex);

            // Inject the ray generation data
            cmd.SetGlobalFloat(HDShaderIDs._RaytracingRayBias, rtEnvironment.rayBias);
            cmd.SetGlobalFloat(HDShaderIDs._RaytracingRayMaxLength, recursiveSettings.rayLength.value);
            cmd.SetGlobalFloat(HDShaderIDs._RaytracingMaxRecursion, recursiveSettings.maxDepth.value);
            cmd.SetGlobalFloat(HDShaderIDs._RaytracingCameraNearPlane, hdCamera.camera.nearClipPlane);

            // Set the data for the ray generation
            cmd.SetRayTracingTextureParam(forwardShader, HDShaderIDs._RaytracingFlagMask, m_RaytracingFlagTarget);
            cmd.SetRayTracingTextureParam(forwardShader, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthStencilBuffer());
            cmd.SetRayTracingTextureParam(forwardShader, HDShaderIDs._CameraColorTextureRW, m_CameraColorBuffer);

            // Set ray count texture
            cmd.SetRayTracingIntParam(forwardShader, HDShaderIDs._RayCountEnabled, m_RayTracingManager.rayCountManager.RayCountIsEnabled());
            cmd.SetRayTracingTextureParam(forwardShader, HDShaderIDs._RayCountTexture, m_RayTracingManager.rayCountManager.GetRayCountTexture());

            // Compute an approximate pixel spread angle value (in radians)
            float pixelSpreadAngle = hdCamera.camera.fieldOfView * (Mathf.PI / 180.0f) / Mathf.Min(hdCamera.actualWidth, hdCamera.actualHeight);

            cmd.SetGlobalFloat(HDShaderIDs._RaytracingPixelSpreadAngle, pixelSpreadAngle);

            // LightLoop data
            cmd.SetGlobalBuffer(HDShaderIDs._RaytracingLightCluster, lightCluster.GetCluster());
            cmd.SetGlobalBuffer(HDShaderIDs._LightDatasRT, lightCluster.GetLightDatas());
            cmd.SetGlobalVector(HDShaderIDs._MinClusterPos, lightCluster.GetMinClusterPos());
            cmd.SetGlobalVector(HDShaderIDs._MaxClusterPos, lightCluster.GetMaxClusterPos());
            cmd.SetGlobalInt(HDShaderIDs._LightPerCellCount, lightClusterSettings.maxNumLightsPercell.value);
            cmd.SetGlobalInt(HDShaderIDs._PunctualLightCountRT, lightCluster.GetPunctualLightCount());
            cmd.SetGlobalInt(HDShaderIDs._AreaLightCountRT, lightCluster.GetAreaLightCount());

            // Note: Just in case, we rebind the directional light data (in case they were not)
            cmd.SetGlobalBuffer(HDShaderIDs._DirectionalLightDatas, renderPipeline.m_LightLoopLightData.directionalLightData);
            cmd.SetGlobalInt(HDShaderIDs._DirectionalLightCount, renderPipeline.m_lightList.directionalLights.Count);

            // Set the data for the ray miss
            cmd.SetRayTracingTextureParam(forwardShader, HDShaderIDs._SkyTexture, m_SkyManager.skyReflection);

            // If this is the right debug mode and we have at least one light, write the first shadow to the de-noised texture
            HDRenderPipeline hdrp = (RenderPipelineManager.currentPipeline as HDRenderPipeline);

            cmd.SetRayTracingTextureParam(forwardShader, HDShaderIDs._RaytracingPrimaryDebug, m_DebugRaytracingTexture);
            hdrp.PushFullScreenDebugTexture(hdCamera, cmd, m_DebugRaytracingTexture, FullScreenDebugMode.RecursiveTracing);

            // Run the computation
            cmd.DispatchRays(forwardShader, m_RayGenShaderName, (uint)hdCamera.actualWidth, (uint)hdCamera.actualHeight, 1);
        }
        public TextureHandle DenoiseRTR(RenderGraph renderGraph, HDCamera hdCamera, float historyValidity, int maxKernelSize, bool fullResolution, bool singleReflectionBounce, bool affectSmoothSurfaces,
                                        TextureHandle depthPyramid, TextureHandle normalBuffer, TextureHandle motionVectorBuffer, TextureHandle clearCoatTexture, TextureHandle lightingTexture, RTHandle historyBuffer)
        {
            using (var builder = renderGraph.AddRenderPass <ReflectionDenoiserPassData>("Denoise ray traced reflections", out var passData, ProfilingSampler.Get(HDProfileId.RaytracingReflectionFilter)))
            {
                builder.EnableAsyncCompute(false);

                // Camera parameters
                passData.texWidth  = fullResolution ? hdCamera.actualWidth : (hdCamera.actualWidth / 2);
                passData.texHeight = fullResolution ? hdCamera.actualHeight : (hdCamera.actualHeight / 2);
                passData.viewCount = hdCamera.viewCount;

                // De-noising parameters
                passData.historyValidity         = historyValidity;
                passData.maxKernelSize           = fullResolution ? maxKernelSize : maxKernelSize / 2;
                passData.historyBufferSize       = new Vector2(1.0f / (float)hdCamera.historyRTHandleProperties.currentRenderTargetSize.x, 1.0f / (float)hdCamera.historyRTHandleProperties.currentRenderTargetSize.y);
                passData.currentEffectResolution = new Vector4(passData.texWidth, passData.texHeight, 1.0f / (float)passData.texWidth, 1.0f / (float)passData.texHeight);
                passData.pixelSpreadTangent      = HDRenderPipeline.GetPixelSpreadTangent(hdCamera.camera.fieldOfView, passData.texWidth, passData.texHeight);
                passData.affectSmoothSurfaces    = affectSmoothSurfaces ? 1 : 0;
                passData.singleReflectionBounce  = singleReflectionBounce ? 1 : 0;

                // Other parameters
                passData.reflectionDenoiserCS       = m_ReflectionDenoiserCS;
                passData.temporalAccumulationKernel = fullResolution ? s_TemporalAccumulationFullResKernel : s_TemporalAccumulationHalfResKernel;
                passData.copyHistoryKernel          = s_CopyHistoryKernel;
                passData.bilateralFilterHKernel     = fullResolution ? s_BilateralFilterH_FRKernel : s_BilateralFilterH_HRKernel;
                passData.bilateralFilterVKernel     = fullResolution ? s_BilateralFilterV_FRKernel : s_BilateralFilterV_HRKernel;
                passData.reflectionFilterMapping    = m_ReflectionFilterMapping;

                passData.depthBuffer        = builder.ReadTexture(depthPyramid);
                passData.normalBuffer       = builder.ReadTexture(normalBuffer);
                passData.motionVectorBuffer = builder.ReadTexture(motionVectorBuffer);
                RTHandle depthT = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.Depth);
                passData.historyDepth = depthT != null?renderGraph.ImportTexture(hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.Depth)) : renderGraph.defaultResources.blackTextureXR;

                passData.intermediateBuffer0 = builder.CreateTransientTexture(new TextureDesc(Vector2.one, true, true)
                {
                    colorFormat = GraphicsFormat.R16G16B16A16_SFloat, enableRandomWrite = true, name = "IntermediateTexture0"
                });
                passData.intermediateBuffer1 = builder.CreateTransientTexture(new TextureDesc(Vector2.one, true, true)
                {
                    colorFormat = GraphicsFormat.R16G16B16A16_SFloat, enableRandomWrite = true, name = "IntermediateTexture1"
                });
                passData.historySignal       = builder.ReadWriteTexture(renderGraph.ImportTexture(historyBuffer));
                passData.noisyToOutputSignal = builder.ReadWriteTexture(lightingTexture);

                builder.SetRenderFunc((ReflectionDenoiserPassData data, RenderGraphContext ctx) =>
                {
                    // Evaluate the dispatch parameters
                    int tileSize  = 8;
                    int numTilesX = (data.texWidth + (tileSize - 1)) / tileSize;
                    int numTilesY = (data.texHeight + (tileSize - 1)) / tileSize;

                    // Input data
                    ctx.cmd.SetComputeFloatParam(data.reflectionDenoiserCS, HDShaderIDs._HistoryValidity, data.historyValidity);
                    ctx.cmd.SetComputeFloatParam(data.reflectionDenoiserCS, HDShaderIDs._PixelSpreadAngleTangent, data.pixelSpreadTangent);
                    ctx.cmd.SetComputeVectorParam(data.reflectionDenoiserCS, HDShaderIDs._HistoryBufferSize, data.historyBufferSize);
                    ctx.cmd.SetComputeVectorParam(data.reflectionDenoiserCS, HDShaderIDs._CurrentEffectResolution, data.currentEffectResolution);
                    ctx.cmd.SetComputeIntParam(data.reflectionDenoiserCS, HDShaderIDs._AffectSmoothSurfaces, data.affectSmoothSurfaces);
                    ctx.cmd.SetComputeIntParam(data.reflectionDenoiserCS, HDShaderIDs._SingleReflectionBounce, data.singleReflectionBounce);
                    ctx.cmd.SetComputeTextureParam(data.reflectionDenoiserCS, data.temporalAccumulationKernel, HDShaderIDs._DenoiseInputTexture, data.noisyToOutputSignal);
                    ctx.cmd.SetComputeTextureParam(data.reflectionDenoiserCS, data.temporalAccumulationKernel, HDShaderIDs._DepthTexture, data.depthBuffer);
                    ctx.cmd.SetComputeTextureParam(data.reflectionDenoiserCS, data.temporalAccumulationKernel, HDShaderIDs._HistoryDepthTexture, data.historyDepth);
                    ctx.cmd.SetComputeTextureParam(data.reflectionDenoiserCS, data.temporalAccumulationKernel, HDShaderIDs._NormalBufferTexture, data.normalBuffer);
                    ctx.cmd.SetComputeTextureParam(data.reflectionDenoiserCS, data.temporalAccumulationKernel, HDShaderIDs._CameraMotionVectorsTexture, data.motionVectorBuffer);
                    ctx.cmd.SetComputeTextureParam(data.reflectionDenoiserCS, data.temporalAccumulationKernel, HDShaderIDs._HistoryBuffer, data.historySignal);

                    // Output texture
                    ctx.cmd.SetComputeTextureParam(data.reflectionDenoiserCS, data.temporalAccumulationKernel, HDShaderIDs._DenoiseOutputTextureRW, data.intermediateBuffer0);
                    ctx.cmd.SetComputeTextureParam(data.reflectionDenoiserCS, data.temporalAccumulationKernel, HDShaderIDs._SampleCountTextureRW, data.intermediateBuffer1);

                    // Do the temporal accumulation
                    ctx.cmd.DispatchCompute(data.reflectionDenoiserCS, data.temporalAccumulationKernel, numTilesX, numTilesY, data.viewCount);

                    // Copy the accumulated signal into the history buffer
                    ctx.cmd.SetComputeTextureParam(data.reflectionDenoiserCS, data.copyHistoryKernel, HDShaderIDs._DenoiseInputTexture, data.intermediateBuffer0);
                    ctx.cmd.SetComputeTextureParam(data.reflectionDenoiserCS, data.copyHistoryKernel, HDShaderIDs._DenoiseOutputTextureRW, data.historySignal);
                    ctx.cmd.SetComputeTextureParam(data.reflectionDenoiserCS, data.copyHistoryKernel, HDShaderIDs._SampleCountTextureRW, data.intermediateBuffer1);
                    ctx.cmd.DispatchCompute(data.reflectionDenoiserCS, data.copyHistoryKernel, numTilesX, numTilesY, data.viewCount);

                    // Horizontal pass of the bilateral filter
                    ctx.cmd.SetComputeIntParam(data.reflectionDenoiserCS, HDShaderIDs._DenoiserFilterRadius, data.maxKernelSize);
                    ctx.cmd.SetComputeTextureParam(data.reflectionDenoiserCS, data.bilateralFilterHKernel, HDShaderIDs._DenoiseInputTexture, data.intermediateBuffer0);
                    ctx.cmd.SetComputeTextureParam(data.reflectionDenoiserCS, data.bilateralFilterHKernel, HDShaderIDs._DepthTexture, data.depthBuffer);
                    ctx.cmd.SetComputeTextureParam(data.reflectionDenoiserCS, data.bilateralFilterHKernel, HDShaderIDs._NormalBufferTexture, data.normalBuffer);
                    ctx.cmd.SetComputeTextureParam(data.reflectionDenoiserCS, data.bilateralFilterHKernel, HDShaderIDs._DenoiseOutputTextureRW, data.intermediateBuffer1);
                    ctx.cmd.SetComputeTextureParam(data.reflectionDenoiserCS, data.bilateralFilterHKernel, HDShaderIDs._ReflectionFilterMapping, data.reflectionFilterMapping);
                    ctx.cmd.DispatchCompute(data.reflectionDenoiserCS, data.bilateralFilterHKernel, numTilesX, numTilesY, data.viewCount);

                    // Horizontal pass of the bilateral filter
                    ctx.cmd.SetComputeIntParam(data.reflectionDenoiserCS, HDShaderIDs._DenoiserFilterRadius, data.maxKernelSize);
                    ctx.cmd.SetComputeTextureParam(data.reflectionDenoiserCS, data.bilateralFilterVKernel, HDShaderIDs._DenoiseInputTexture, data.intermediateBuffer1);
                    ctx.cmd.SetComputeTextureParam(data.reflectionDenoiserCS, data.bilateralFilterVKernel, HDShaderIDs._DepthTexture, data.depthBuffer);
                    ctx.cmd.SetComputeTextureParam(data.reflectionDenoiserCS, data.bilateralFilterVKernel, HDShaderIDs._NormalBufferTexture, data.normalBuffer);
                    ctx.cmd.SetComputeTextureParam(data.reflectionDenoiserCS, data.bilateralFilterVKernel, HDShaderIDs._DenoiseOutputTextureRW, data.noisyToOutputSignal);
                    ctx.cmd.SetComputeTextureParam(data.reflectionDenoiserCS, data.bilateralFilterVKernel, HDShaderIDs._ReflectionFilterMapping, data.reflectionFilterMapping);
                    ctx.cmd.DispatchCompute(data.reflectionDenoiserCS, data.bilateralFilterVKernel, numTilesX, numTilesY, data.viewCount);
                });

                return(passData.noisyToOutputSignal);
            }
        }
        public void Init(HDRenderPipelineRayTracingResources rpRTResources, SharedRTManager sharedRTManager, HDRenderPipeline renderPipeline)
        {
            m_SharedRTManager = sharedRTManager;
            m_RenderPipeline  = renderPipeline;

            m_ShadowDenoiser      = rpRTResources.diffuseShadowDenoiserCS;
            m_ShadowFilterMapping = rpRTResources.shadowFilterMapping;

            m_BilateralFilterHSingleDirectionalKernel = m_ShadowDenoiser.FindKernel("BilateralFilterHSingleDirectional");
            m_BilateralFilterVSingleDirectionalKernel = m_ShadowDenoiser.FindKernel("BilateralFilterVSingleDirectional");

            m_BilateralFilterHColorDirectionalKernel = m_ShadowDenoiser.FindKernel("BilateralFilterHColorDirectional");
            m_BilateralFilterVColorDirectionalKernel = m_ShadowDenoiser.FindKernel("BilateralFilterVColorDirectional");

            m_BilateralFilterHSingleSphereKernel = m_ShadowDenoiser.FindKernel("BilateralFilterHSingleSphere");
            m_BilateralFilterVSingleSphereKernel = m_ShadowDenoiser.FindKernel("BilateralFilterVSingleSphere");
        }
Beispiel #18
0
        void BuildLightData(CommandBuffer cmd, HDCamera hdCamera, HDRayTracingLights rayTracingLights)
        {
            // If no lights, exit
            if (rayTracingLights.lightCount == 0)
            {
                ResizeLightDataBuffer(1);
                return;
            }

            // Also we need to build the light list data
            if (m_LightDataGPUArray == null || m_LightDataGPUArray.count != rayTracingLights.lightCount)
            {
                ResizeLightDataBuffer(rayTracingLights.lightCount);
            }

            m_LightDataCPUArray.Clear();

            // Build the data for every light
            for (int lightIdx = 0; lightIdx < rayTracingLights.hdLightArray.Count; ++lightIdx)
            {
                var lightData = new LightData();

                HDAdditionalLightData additionalLightData = rayTracingLights.hdLightArray[lightIdx];
                // When the user deletes a light source in the editor, there is a single frame where the light is null before the collection of light in the scene is triggered
                // the workaround for this is simply to add an invalid light for that frame
                if (additionalLightData == null)
                {
                    m_LightDataCPUArray.Add(lightData);
                    continue;
                }
                Light light = additionalLightData.gameObject.GetComponent <Light>();

                // Both of these positions are non-camera-relative.
                float distanceToCamera  = (light.gameObject.transform.position - hdCamera.camera.transform.position).magnitude;
                float lightDistanceFade = HDUtils.ComputeLinearDistanceFade(distanceToCamera, additionalLightData.fadeDistance);

                bool contributesToLighting = ((additionalLightData.lightDimmer > 0) && (additionalLightData.affectDiffuse || additionalLightData.affectSpecular)) || (additionalLightData.volumetricDimmer > 0);
                contributesToLighting = contributesToLighting && (lightDistanceFade > 0);

                if (!contributesToLighting)
                {
                    continue;
                }

                lightData.lightLayers = additionalLightData.GetLightLayers();
                LightCategory   lightCategory   = LightCategory.Count;
                GPULightType    gpuLightType    = GPULightType.Point;
                LightVolumeType lightVolumeType = LightVolumeType.Count;
                HDRenderPipeline.EvaluateGPULightType(light.type, additionalLightData.lightTypeExtent, additionalLightData.spotLightShape, ref lightCategory, ref gpuLightType, ref lightVolumeType);

                lightData.lightType = gpuLightType;

                lightData.positionRWS = light.gameObject.transform.position;

                bool applyRangeAttenuation = additionalLightData.applyRangeAttenuation && (gpuLightType != GPULightType.ProjectorBox);

                lightData.range = light.range;

                if (applyRangeAttenuation)
                {
                    lightData.rangeAttenuationScale = 1.0f / (light.range * light.range);
                    lightData.rangeAttenuationBias  = 1.0f;

                    if (lightData.lightType == GPULightType.Rectangle)
                    {
                        // Rect lights are currently a special case because they use the normalized
                        // [0, 1] attenuation range rather than the regular [0, r] one.
                        lightData.rangeAttenuationScale = 1.0f;
                    }
                }
                else // Don't apply any attenuation but do a 'step' at range
                {
                    // Solve f(x) = b - (a * x)^2 where x = (d/r)^2.
                    // f(0) = huge -> b = huge.
                    // f(1) = 0    -> huge - a^2 = 0 -> a = sqrt(huge).
                    const float hugeValue = 16777216.0f;
                    const float sqrtHuge  = 4096.0f;
                    lightData.rangeAttenuationScale = sqrtHuge / (light.range * light.range);
                    lightData.rangeAttenuationBias  = hugeValue;

                    if (lightData.lightType == GPULightType.Rectangle)
                    {
                        // Rect lights are currently a special case because they use the normalized
                        // [0, 1] attenuation range rather than the regular [0, r] one.
                        lightData.rangeAttenuationScale = sqrtHuge;
                    }
                }

                Color value = light.color.linear * light.intensity;
                if (additionalLightData.useColorTemperature)
                {
                    value *= Mathf.CorrelatedColorTemperatureToRGB(light.colorTemperature);
                }
                lightData.color = new Vector3(value.r, value.g, value.b);

                lightData.forward = light.transform.forward;
                lightData.up      = light.transform.up;
                lightData.right   = light.transform.right;

                if (lightData.lightType == GPULightType.ProjectorBox)
                {
                    // Rescale for cookies and windowing.
                    lightData.right *= 2.0f / Mathf.Max(additionalLightData.shapeWidth, 0.001f);
                    lightData.up    *= 2.0f / Mathf.Max(additionalLightData.shapeHeight, 0.001f);
                }
                else if (lightData.lightType == GPULightType.ProjectorPyramid)
                {
                    // Get width and height for the current frustum
                    var spotAngle = light.spotAngle;

                    float frustumWidth, frustumHeight;

                    if (additionalLightData.aspectRatio >= 1.0f)
                    {
                        frustumHeight = 2.0f * Mathf.Tan(spotAngle * 0.5f * Mathf.Deg2Rad);
                        frustumWidth  = frustumHeight * additionalLightData.aspectRatio;
                    }
                    else
                    {
                        frustumWidth  = 2.0f * Mathf.Tan(spotAngle * 0.5f * Mathf.Deg2Rad);
                        frustumHeight = frustumWidth / additionalLightData.aspectRatio;
                    }

                    // Rescale for cookies and windowing.
                    lightData.right *= 2.0f / frustumWidth;
                    lightData.up    *= 2.0f / frustumHeight;
                }

                if (lightData.lightType == GPULightType.Spot)
                {
                    var spotAngle = light.spotAngle;

                    var innerConePercent      = additionalLightData.innerSpotPercent01;
                    var cosSpotOuterHalfAngle = Mathf.Clamp(Mathf.Cos(spotAngle * 0.5f * Mathf.Deg2Rad), 0.0f, 1.0f);
                    var sinSpotOuterHalfAngle = Mathf.Sqrt(1.0f - cosSpotOuterHalfAngle * cosSpotOuterHalfAngle);
                    var cosSpotInnerHalfAngle = Mathf.Clamp(Mathf.Cos(spotAngle * 0.5f * innerConePercent * Mathf.Deg2Rad), 0.0f, 1.0f); // inner cone

                    var val = Mathf.Max(0.0001f, (cosSpotInnerHalfAngle - cosSpotOuterHalfAngle));
                    lightData.angleScale  = 1.0f / val;
                    lightData.angleOffset = -cosSpotOuterHalfAngle * lightData.angleScale;

                    // Rescale for cookies and windowing.
                    float cotOuterHalfAngle = cosSpotOuterHalfAngle / sinSpotOuterHalfAngle;
                    lightData.up    *= cotOuterHalfAngle;
                    lightData.right *= cotOuterHalfAngle;
                }
                else
                {
                    // These are the neutral values allowing GetAngleAnttenuation in shader code to return 1.0
                    lightData.angleScale  = 0.0f;
                    lightData.angleOffset = 1.0f;
                }

                if (lightData.lightType != GPULightType.Directional && lightData.lightType != GPULightType.ProjectorBox)
                {
                    // Store the squared radius of the light to simulate a fill light.
                    lightData.size = new Vector2(additionalLightData.shapeRadius * additionalLightData.shapeRadius, 0);
                }

                if (lightData.lightType == GPULightType.Rectangle || lightData.lightType == GPULightType.Tube)
                {
                    lightData.size = new Vector2(additionalLightData.shapeWidth, additionalLightData.shapeHeight);
                }

                lightData.lightDimmer           = lightDistanceFade * (additionalLightData.lightDimmer);
                lightData.diffuseDimmer         = lightDistanceFade * (additionalLightData.affectDiffuse ? additionalLightData.lightDimmer : 0);
                lightData.specularDimmer        = lightDistanceFade * (additionalLightData.affectSpecular ? additionalLightData.lightDimmer * hdCamera.frameSettings.specularGlobalDimmer : 0);
                lightData.volumetricLightDimmer = lightDistanceFade * (additionalLightData.volumetricDimmer);

                lightData.contactShadowMask      = 0;
                lightData.cookieIndex            = -1;
                lightData.shadowIndex            = -1;
                lightData.screenSpaceShadowIndex = -1;

                if (light != null && light.cookie != null)
                {
                    // TODO: add texture atlas support for cookie textures.
                    switch (light.type)
                    {
                    case LightType.Spot:
                        lightData.cookieIndex = m_RenderPipeline.m_TextureCaches.cookieTexArray.FetchSlice(cmd, light.cookie);
                        break;

                    case LightType.Point:
                        lightData.cookieIndex = m_RenderPipeline.m_TextureCaches.cubeCookieTexArray.FetchSlice(cmd, light.cookie);
                        break;
                    }
                }
                else if (light.type == LightType.Spot && additionalLightData.spotLightShape != SpotLightShape.Cone)
                {
                    // Projectors lights must always have a cookie texture.
                    // As long as the cache is a texture array and not an atlas, the 4x4 white texture will be rescaled to 128
                    lightData.cookieIndex = m_RenderPipeline.m_TextureCaches.cookieTexArray.FetchSlice(cmd, Texture2D.whiteTexture);
                }
                else if (lightData.lightType == GPULightType.Rectangle && additionalLightData.areaLightCookie != null)
                {
                    lightData.cookieIndex = m_RenderPipeline.m_TextureCaches.areaLightCookieManager.FetchSlice(cmd, additionalLightData.areaLightCookie);
                }

                {
                    lightData.shadowDimmer           = 1.0f;
                    lightData.volumetricShadowDimmer = 1.0f;
                }

                {
                    // fix up shadow information
                    lightData.shadowIndex = additionalLightData.shadowIndex;
                }

                // Value of max smoothness is from artists point of view, need to convert from perceptual smoothness to roughness
                lightData.minRoughness = (1.0f - additionalLightData.maxSmoothness) * (1.0f - additionalLightData.maxSmoothness);

                // No usage for the shadow masks
                lightData.shadowMaskSelector = Vector4.zero;
                {
                    // use -1 to say that we don't use shadow mask
                    lightData.shadowMaskSelector.x = -1.0f;
                    lightData.nonLightMappedOnly   = 0;
                }

                if (ShaderConfig.s_CameraRelativeRendering != 0)
                {
                    // Caution: 'LightData.positionWS' is camera-relative after this point.
                    Vector3 camPosWS = hdCamera.mainViewConstants.worldSpaceCameraPos;
                    lightData.positionRWS -= camPosWS;
                }

                // Set the data for this light
                m_LightDataCPUArray.Add(lightData);
            }

            // Push the data to the GPU
            m_LightDataGPUArray.SetData(m_LightDataCPUArray);
        }
Beispiel #19
0
        //Preallocates number of lights for bounds arrays and resets all internal counters. Must be called once per frame per view always.
        public void NewFrame(HDCamera hdCamera, int maxLightCount)
        {
            int viewCounts = hdCamera.viewCount;

            if (viewCounts > m_LighsPerViewCapacity)
            {
                m_LighsPerViewCapacity = viewCounts;
                m_LightsPerView.ResizeArray(m_LighsPerViewCapacity);
            }

            m_LightsPerViewCount = viewCounts;

            int totalBoundsCount     = maxLightCount * viewCounts;
            int requestedBoundsCount = Math.Max(totalBoundsCount, 1);

            if (requestedBoundsCount > m_LightBoundsCapacity)
            {
                m_LightBoundsCapacity = Math.Max(Math.Max(m_LightBoundsCapacity * 2, requestedBoundsCount), ArrayCapacity);
                m_LightBounds.ResizeArray(m_LightBoundsCapacity);
                m_LightVolumes.ResizeArray(m_LightBoundsCapacity);
            }
            m_LightBoundsCount = totalBoundsCount;

            m_BoundsEyeDataOffset = maxLightCount;

            for (int viewId = 0; viewId < viewCounts; ++viewId)
            {
                m_LightsPerView[viewId] = new LightsPerView()
                {
                    worldToView  = HDRenderPipeline.GetWorldToViewMatrix(hdCamera, viewId),
                    boundsOffset = viewId * m_BoundsEyeDataOffset,
                    boundsCount  = 0
                };
            }

            if (!m_LightTypeCounters.IsCreated)
            {
                m_LightTypeCounters.ResizeArray(Enum.GetValues(typeof(GPULightTypeCountSlots)).Length);
            }

            m_LightCount                   = 0;
            m_ContactShadowIndex           = 0;
            m_ScreenSpaceShadowIndex       = 0;
            m_ScreenSpaceShadowChannelSlot = 0;
            m_ScreenSpaceShadowsUnion.Clear();

            m_CurrentShadowSortedSunLightIndex   = -1;
            m_CurrentSunLightAdditionalLightData = null;
            m_CurrentSunShadowMapFlags           = HDProcessedVisibleLightsBuilder.ShadowMapFlags.None;

            m_DebugSelectedLightShadowIndex = -1;
            m_DebugSelectedLightShadowCount = 0;

            for (int i = 0; i < m_Asset.currentPlatformRenderPipelineSettings.hdShadowInitParams.maxScreenSpaceShadowSlots; ++i)
            {
                m_CurrentScreenSpaceShadowData[i].additionalLightData = null;
                m_CurrentScreenSpaceShadowData[i].lightDataIndex      = -1;
                m_CurrentScreenSpaceShadowData[i].valid = false;
            }

            for (int i = 0; i < m_LightTypeCounters.Length; ++i)
            {
                m_LightTypeCounters[i] = 0;
            }
        }
Beispiel #20
0
        public TextureHandle Denoise(RenderGraph renderGraph, HDCamera hdCamera, bool singleChannel, float kernelSize, bool halfResolutionFilter, bool jitterFilter,
                                     TextureHandle noisyBuffer, TextureHandle depthBuffer, TextureHandle normalBuffer, TextureHandle outputBuffer)
        {
            using (var builder = renderGraph.AddRenderPass <DiffuseDenoiserPassData>("DiffuseDenoiser", out var passData, ProfilingSampler.Get(HDProfileId.DiffuseFilter)))
            {
                // Cannot run in async
                builder.EnableAsyncCompute(false);

                // Fetch all the resources
                // Camera parameters
                passData.texWidth  = hdCamera.actualWidth;
                passData.texHeight = hdCamera.actualHeight;
                passData.viewCount = hdCamera.viewCount;

                // Denoising parameters
                passData.pixelSpreadTangent   = HDRenderPipeline.GetPixelSpreadTangent(hdCamera.camera.fieldOfView, hdCamera.actualWidth, hdCamera.actualHeight);
                passData.kernelSize           = kernelSize;
                passData.halfResolutionFilter = halfResolutionFilter;
                passData.jitterFilter         = jitterFilter;
                passData.frameIndex           = m_RenderPipeline.RayTracingFrameIndex(hdCamera);

                // Kernels
                passData.bilateralFilterKernel = singleChannel ? m_BilateralFilterSingleKernel : m_BilateralFilterColorKernel;
                passData.gatherKernel          = singleChannel ? m_GatherSingleKernel : m_GatherColorKernel;

                // Other parameters
                passData.owenScrambleRGBA  = m_OwenScrambleRGBA;
                passData.diffuseDenoiserCS = m_DiffuseDenoiser;

                passData.depthStencilBuffer = builder.ReadTexture(depthBuffer);
                passData.normalBuffer       = builder.ReadTexture(normalBuffer);
                passData.noisyBuffer        = builder.ReadTexture(noisyBuffer);
                passData.intermediateBuffer = builder.CreateTransientTexture(new TextureDesc(Vector2.one, true, true)
                {
                    colorFormat = GraphicsFormat.R16G16B16A16_SFloat, enableRandomWrite = true, name = "DiffuseDenoiserIntermediate"
                });
                passData.outputBuffer = builder.WriteTexture(outputBuffer);

                builder.SetRenderFunc(
                    (DiffuseDenoiserPassData data, RenderGraphContext ctx) =>
                {
                    // Evaluate the dispatch parameters
                    int areaTileSize = 8;
                    int numTilesX    = (data.texWidth + (areaTileSize - 1)) / areaTileSize;
                    int numTilesY    = (data.texHeight + (areaTileSize - 1)) / areaTileSize;

                    // Request the intermediate buffers that we need
                    ctx.cmd.SetGlobalTexture(HDShaderIDs._OwenScrambledRGTexture, data.owenScrambleRGBA);
                    ctx.cmd.SetComputeFloatParam(data.diffuseDenoiserCS, HDShaderIDs._DenoiserFilterRadius, data.kernelSize);
                    ctx.cmd.SetComputeTextureParam(data.diffuseDenoiserCS, data.bilateralFilterKernel, HDShaderIDs._DenoiseInputTexture, data.noisyBuffer);
                    ctx.cmd.SetComputeTextureParam(data.diffuseDenoiserCS, data.bilateralFilterKernel, HDShaderIDs._DepthTexture, data.depthStencilBuffer);
                    ctx.cmd.SetComputeTextureParam(data.diffuseDenoiserCS, data.bilateralFilterKernel, HDShaderIDs._NormalBufferTexture, data.normalBuffer);
                    ctx.cmd.SetComputeTextureParam(data.diffuseDenoiserCS, data.bilateralFilterKernel, HDShaderIDs._DenoiseOutputTextureRW, data.halfResolutionFilter ? data.intermediateBuffer : data.outputBuffer);
                    ctx.cmd.SetComputeIntParam(data.diffuseDenoiserCS, HDShaderIDs._HalfResolutionFilter, data.halfResolutionFilter ? 1 : 0);
                    ctx.cmd.SetComputeFloatParam(data.diffuseDenoiserCS, HDShaderIDs._PixelSpreadAngleTangent, data.pixelSpreadTangent);
                    if (data.jitterFilter)
                    {
                        ctx.cmd.SetComputeIntParam(data.diffuseDenoiserCS, HDShaderIDs._JitterFramePeriod, (data.frameIndex % 4));
                    }
                    else
                    {
                        ctx.cmd.SetComputeIntParam(data.diffuseDenoiserCS, HDShaderIDs._JitterFramePeriod, -1);
                    }

                    ctx.cmd.DispatchCompute(data.diffuseDenoiserCS, data.bilateralFilterKernel, numTilesX, numTilesY, data.viewCount);

                    if (data.halfResolutionFilter)
                    {
                        ctx.cmd.SetComputeTextureParam(data.diffuseDenoiserCS, data.gatherKernel, HDShaderIDs._DenoiseInputTexture, data.intermediateBuffer);
                        ctx.cmd.SetComputeTextureParam(data.diffuseDenoiserCS, data.gatherKernel, HDShaderIDs._DenoiseOutputTextureRW, data.outputBuffer);
                        ctx.cmd.DispatchCompute(data.diffuseDenoiserCS, data.gatherKernel, numTilesX, numTilesY, data.viewCount);
                    }
                });
                return(passData.outputBuffer);
            }
        }
Beispiel #21
0
        public void Init(HDRenderPipelineRayTracingResources rpRTResources, SharedRTManager sharedRTManager, HDRenderPipeline renderPipeline)
        {
            // Keep track of the resources
            m_TemporalFilterCS = rpRTResources.temporalFilterCS;

            // Keep track of the shared rt manager
            m_SharedRTManager = sharedRTManager;
            m_RenderPipeline  = renderPipeline;
        }
Beispiel #22
0
        public void Init(RenderPipelineResources rpResources, SharedRTManager sharedRTManager, HDRenderPipeline renderPipeline)
        {
            // Keep track of the resources
            m_SSGIDenoiserCS = rpResources.shaders.ssGIDenoiserCS;

            // Keep track of the shared rt manager
            m_SharedRTManager = sharedRTManager;
            m_RenderPipeline  = renderPipeline;

            // Fetch the kernels we are going to require
            m_SpatialFilterHalfKernel = m_SSGIDenoiserCS.FindKernel("SpatialFilterHalf");
            m_SpatialFilterKernel     = m_SSGIDenoiserCS.FindKernel("SpatialFilter");

            // Fetch the kernels we are going to require
            m_TemporalFilterHalfKernel = m_SSGIDenoiserCS.FindKernel("TemporalFilterHalf");
            m_TemporalFilterKernel     = m_SSGIDenoiserCS.FindKernel("TemporalFilter");

            m_CopyHistory = m_SSGIDenoiserCS.FindKernel("CopyHistory");
        }
Beispiel #23
0
        public void Init(RenderPipelineSettings settings, RenderPipelineResources rpResources, HDRenderPipelineRayTracingResources rayTracingResources, BlueNoise blueNoise, HDRenderPipeline renderPipeline, SharedRTManager sharedRTManager, DebugDisplaySettings currentDebugDisplaySettings)
        {
            // Keep track of the resources
            m_Resources   = rpResources;
            m_RTResources = rayTracingResources;

            // Keep track of the settings
            m_Settings = settings;

            // Keep track of the render pipeline
            m_RenderPipeline = renderPipeline;

            // Keep track of the shared RT manager
            m_SharedRTManager = sharedRTManager;

            // Keep track of the blue noise manager
            m_BlueNoise = blueNoise;

            // Create the list of environments
            m_Environments = new List <HDRaytracingEnvironment>();

            // Grab all the ray-tracing graphs that have been created before (in case the order of initialization has not been respected, which happens when we open unity the first time)
            HDRaytracingEnvironment[] environmentArray = Object.FindObjectsOfType <HDRaytracingEnvironment>();
            for (int envIdx = 0; envIdx < environmentArray.Length; ++envIdx)
            {
                RegisterEnvironment(environmentArray[envIdx]);
            }

            // Init the denoisers
            m_TemporalFilter.Init(rayTracingResources, m_SharedRTManager);
            m_SimpleDenoiser.Init(rayTracingResources, m_SharedRTManager);
            m_DiffuseDenoiser.Init(rpResources, rayTracingResources, m_SharedRTManager);

            // Init the ray count manager
            m_RayCountManager.Init(rayTracingResources, currentDebugDisplaySettings);

#if UNITY_EDITOR
            // We need to invalidate the acceleration structures in case the hierarchy changed
            EditorApplication.hierarchyChanged += OnHierarchyChanged;
#endif
        }
        public void Init(HDRenderPipelineRayTracingResources rpRTResources, SharedRTManager sharedRTManager, HDRenderPipeline renderPipeline)
        {
            m_SimpleDenoiserCS = rpRTResources.simpleDenoiserCS;
            m_SharedRTManager  = sharedRTManager;
            m_RenderPipeline   = renderPipeline;

            m_BilateralFilterHSingleKernel = m_SimpleDenoiserCS.FindKernel("BilateralFilterHSingle");
            m_BilateralFilterVSingleKernel = m_SimpleDenoiserCS.FindKernel("BilateralFilterVSingle");
            m_BilateralFilterHColorKernel  = m_SimpleDenoiserCS.FindKernel("BilateralFilterHColor");
            m_BilateralFilterVColorKernel  = m_SimpleDenoiserCS.FindKernel("BilateralFilterVColor");
        }
        // Denoiser variant for non history array
        internal TextureHandle Denoise(RenderGraph renderGraph, HDCamera hdCamera, TemporalFilterParameters filterParams,
                                       TextureHandle noisyBuffer, TextureHandle velocityBuffer,
                                       TextureHandle historyBuffer,
                                       TextureHandle depthBuffer, TextureHandle normalBuffer, TextureHandle motionVectorBuffer, TextureHandle historyValidationBuffer)
        {
            using (var builder = renderGraph.AddRenderPass <TemporalFilterPassData>("TemporalDenoiser", out var passData, ProfilingSampler.Get(HDProfileId.TemporalFilter)))
            {
                // Cannot run in async
                builder.EnableAsyncCompute(false);

                // Camera parameters
                passData.texWidth  = hdCamera.actualWidth;
                passData.texHeight = hdCamera.actualHeight;
                passData.viewCount = hdCamera.viewCount;

                // Denoising parameters
                passData.pixelSpreadTangent      = HDRenderPipeline.GetPixelSpreadTangent(hdCamera.camera.fieldOfView, hdCamera.actualWidth, hdCamera.actualHeight);
                passData.historyValidity         = filterParams.historyValidity;
                passData.receiverMotionRejection = filterParams.receiverMotionRejection;
                passData.occluderMotionRejection = filterParams.occluderMotionRejection;
                passData.exposureControl         = filterParams.exposureControl;

                // Kernels
                passData.temporalAccKernel = filterParams.singleChannel ? m_TemporalAccumulationSingleKernel : m_TemporalAccumulationColorKernel;
                passData.copyHistoryKernel = m_CopyHistoryKernel;

                // Other parameters
                passData.temporalFilterCS = m_TemporalFilterCS;

                // Prepass Buffers
                passData.depthStencilBuffer = builder.ReadTexture(depthBuffer);
                passData.normalBuffer       = builder.ReadTexture(normalBuffer);
                passData.motionVectorBuffer = builder.ReadTexture(motionVectorBuffer);

                // Effect buffers
                passData.velocityBuffer   = builder.ReadTexture(velocityBuffer);
                passData.noisyBuffer      = builder.ReadTexture(noisyBuffer);
                passData.validationBuffer = builder.ReadTexture(historyValidationBuffer);

                // History buffer
                passData.historyBuffer = builder.ReadWriteTexture(historyBuffer);

                // Output buffers
                passData.outputBuffer = builder.ReadWriteTexture(renderGraph.CreateTexture(new TextureDesc(Vector2.one, true, true)
                {
                    colorFormat = GraphicsFormat.R16G16B16A16_SFloat, enableRandomWrite = true, name = "Temporal Filter Output"
                }));

                builder.SetRenderFunc(
                    (TemporalFilterPassData data, RenderGraphContext ctx) =>
                {
                    // Evaluate the dispatch parameters
                    int areaTileSize = 8;
                    int numTilesX    = (data.texWidth + (areaTileSize - 1)) / areaTileSize;
                    int numTilesY    = (data.texHeight + (areaTileSize - 1)) / areaTileSize;

                    // Now that we have validated our history, let's accumulate
                    // Bind the input buffers
                    ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.temporalAccKernel, HDShaderIDs._DenoiseInputTexture, data.noisyBuffer);
                    ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.temporalAccKernel, HDShaderIDs._HistoryBuffer, data.historyBuffer);
                    ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.temporalAccKernel, HDShaderIDs._DepthTexture, data.depthStencilBuffer);
                    ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.temporalAccKernel, HDShaderIDs._ValidationBuffer, data.validationBuffer);
                    ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.temporalAccKernel, HDShaderIDs._VelocityBuffer, data.velocityBuffer);
                    ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.temporalAccKernel, HDShaderIDs._CameraMotionVectorsTexture, data.motionVectorBuffer);
                    ctx.cmd.SetComputeFloatParam(data.temporalFilterCS, HDShaderIDs._HistoryValidity, data.historyValidity);
                    ctx.cmd.SetComputeIntParam(data.temporalFilterCS, HDShaderIDs._ReceiverMotionRejection, data.receiverMotionRejection ? 1 : 0);
                    ctx.cmd.SetComputeIntParam(data.temporalFilterCS, HDShaderIDs._OccluderMotionRejection, data.occluderMotionRejection ? 1 : 0);
                    ctx.cmd.SetComputeFloatParam(data.temporalFilterCS, HDShaderIDs._PixelSpreadAngleTangent, data.pixelSpreadTangent);
                    ctx.cmd.SetComputeFloatParam(data.temporalFilterCS, HDShaderIDs._EnableExposureControl, data.exposureControl);

                    // Bind the output buffer
                    ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.temporalAccKernel, HDShaderIDs._AccumulationOutputTextureRW, data.outputBuffer);

                    // Combine signal with history
                    ctx.cmd.DispatchCompute(data.temporalFilterCS, data.temporalAccKernel, numTilesX, numTilesY, data.viewCount);

                    // Make sure to copy the new-accumulated signal in our history buffer
                    ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.copyHistoryKernel, HDShaderIDs._DenoiseInputTexture, data.outputBuffer);
                    ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.copyHistoryKernel, HDShaderIDs._DenoiseOutputTextureRW, data.historyBuffer);
                    ctx.cmd.DispatchCompute(data.temporalFilterCS, data.copyHistoryKernel, numTilesX, numTilesY, data.viewCount);
                });
                return(passData.outputBuffer);
            }
        }
Beispiel #26
0
 public void Init(HDRenderPipelineRayTracingResources rpRTResources, SharedRTManager sharedRTManager, HDRenderPipeline renderPipeline)
 {
     m_SimpleDenoiserCS = rpRTResources.simpleDenoiserCS;
     m_SharedRTManager  = sharedRTManager;
     m_RenderPipeline   = renderPipeline;
 }
        // Function that evaluates the history validation Buffer
        public TextureHandle HistoryValidity(RenderGraph renderGraph, HDCamera hdCamera, float historyValidity,
                                             TextureHandle depthBuffer, TextureHandle normalBuffer, TextureHandle motionVectorBuffer)
        {
            using (var builder = renderGraph.AddRenderPass <HistoryValidityPassData>("History Validity Evaluation", out var passData, ProfilingSampler.Get(HDProfileId.HistoryValidity)))
            {
                // Cannot run in async
                builder.EnableAsyncCompute(false);

                passData.texWidth  = hdCamera.actualWidth;
                passData.texHeight = hdCamera.actualHeight;
                passData.viewCount = hdCamera.viewCount;

                // Denoising parameters
                passData.pixelSpreadTangent = HDRenderPipeline.GetPixelSpreadTangent(hdCamera.camera.fieldOfView, hdCamera.actualWidth, hdCamera.actualHeight);
                passData.historyValidity    = historyValidity;

                // Kernels
                passData.validateHistoryKernel = m_ValidateHistoryKernel;

                // Other parameters
                passData.temporalFilterCS = m_TemporalFilterCS;

                // Input Buffers
                passData.depthStencilBuffer = builder.ReadTexture(depthBuffer);
                passData.normalBuffer       = builder.ReadTexture(normalBuffer);
                if (hdCamera.frameSettings.IsEnabled(FrameSettingsField.MotionVectors))
                {
                    passData.motionVectorBuffer = builder.ReadTexture(motionVectorBuffer);
                }
                else
                {
                    passData.motionVectorBuffer = builder.ReadTexture(renderGraph.defaultResources.blackTextureXR);
                }

                // History buffers
                passData.historyDepthTexture  = builder.ReadTexture(renderGraph.ImportTexture(hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.Depth)));
                passData.historyNormalTexture = builder.ReadTexture(renderGraph.ImportTexture(hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.Normal)));

                // Output buffers
                passData.validationBuffer = builder.WriteTexture(renderGraph.CreateTexture(new TextureDesc(Vector2.one, true, true)
                {
                    colorFormat = GraphicsFormat.R8_UInt, enableRandomWrite = true, name = "ValidationTexture"
                }));

                builder.SetRenderFunc(
                    (HistoryValidityPassData data, RenderGraphContext ctx) =>
                {
                    RTHandle historyDepthTexture  = data.historyDepthTexture;
                    RTHandle historyNormalTexture = data.historyNormalTexture;
                    // If we do not have a depth and normal history buffers, we can skip right away
                    if (historyDepthTexture == null || historyNormalTexture == null)
                    {
                        CoreUtils.SetRenderTarget(ctx.cmd, data.validationBuffer, clearFlag: ClearFlag.Color, Color.black);
                        return;
                    }

                    // Evaluate the dispatch parameters
                    int areaTileSize = 8;
                    int numTilesX    = (data.texWidth + (areaTileSize - 1)) / areaTileSize;
                    int numTilesY    = (data.texHeight + (areaTileSize - 1)) / areaTileSize;

                    // First of all we need to validate the history to know where we can or cannot use the history signal
                    // Bind the input buffers
                    ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.validateHistoryKernel, HDShaderIDs._DepthTexture, data.depthStencilBuffer);
                    ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.validateHistoryKernel, HDShaderIDs._HistoryDepthTexture, data.historyDepthTexture);
                    ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.validateHistoryKernel, HDShaderIDs._NormalBufferTexture, data.normalBuffer);
                    ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.validateHistoryKernel, HDShaderIDs._HistoryNormalTexture, data.historyNormalTexture);
                    ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.validateHistoryKernel, HDShaderIDs._CameraMotionVectorsTexture, data.motionVectorBuffer);
                    ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.validateHistoryKernel, HDShaderIDs._StencilTexture, data.depthStencilBuffer, 0, RenderTextureSubElement.Stencil);

                    // Bind the constants
                    ctx.cmd.SetComputeFloatParam(data.temporalFilterCS, HDShaderIDs._HistoryValidity, data.historyValidity);
                    ctx.cmd.SetComputeFloatParam(data.temporalFilterCS, HDShaderIDs._PixelSpreadAngleTangent, data.pixelSpreadTangent);
                    ctx.cmd.SetComputeIntParam(data.temporalFilterCS, HDShaderIDs._ObjectMotionStencilBit, (int)StencilUsage.ObjectMotionVector);

                    // Bind the output buffer
                    ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.validateHistoryKernel, HDShaderIDs._ValidationBufferRW, data.validationBuffer);

                    // Evaluate the validity
                    ctx.cmd.DispatchCompute(data.temporalFilterCS, data.validateHistoryKernel, numTilesX, numTilesY, data.viewCount);
                });
                return(passData.validationBuffer);
            }
        }
Beispiel #28
0
        void RaytracingRecursiveRender(HDCamera hdCamera, CommandBuffer cmd, ScriptableRenderContext renderContext, CullingResults cull)
        {
            // First thing to check is: Do we have a valid ray-tracing environment?
            RecursiveRendering recursiveSettings = hdCamera.volumeStack.GetComponent <RecursiveRendering>();

            // Check the validity of the state before computing the effect
            bool invalidState = !hdCamera.frameSettings.IsEnabled(FrameSettingsField.RayTracing) ||
                                !recursiveSettings.enable.value;

            // If any resource or game-object is missing We stop right away
            if (invalidState)
            {
                return;
            }

            RayTracingShader   forwardShader        = m_Asset.renderPipelineRayTracingResources.forwardRaytracing;
            Shader             raytracingMask       = m_Asset.renderPipelineRayTracingResources.raytracingFlagMask;
            LightCluster       lightClusterSettings = hdCamera.volumeStack.GetComponent <LightCluster>();
            RayTracingSettings rtSettings           = hdCamera.volumeStack.GetComponent <RayTracingSettings>();

            // Grab the acceleration structure and the list of HD lights for the target camera
            RayTracingAccelerationStructure accelerationStructure = RequestAccelerationStructure();
            HDRaytracingLightCluster        lightCluster          = RequestLightCluster();

            // Fecth the temporary buffers we shall be using
            RTHandle flagBuffer = GetRayTracingBuffer(InternalRayTracingBuffers.R0);

            if (m_RaytracingFlagMaterial == null)
            {
                m_RaytracingFlagMaterial = CoreUtils.CreateEngineMaterial(raytracingMask);
            }

            // Before going into ray tracing, we need to flag which pixels needs to be raytracing
            EvaluateRaytracingMask(cull, hdCamera, cmd, renderContext, flagBuffer);

            // Define the shader pass to use for the reflection pass
            cmd.SetRayTracingShaderPass(forwardShader, "ForwardDXR");

            // Set the acceleration structure for the pass
            cmd.SetRayTracingAccelerationStructure(forwardShader, HDShaderIDs._RaytracingAccelerationStructureName, accelerationStructure);

            // Inject the ray-tracing sampling data
            cmd.SetRayTracingTextureParam(forwardShader, HDShaderIDs._OwenScrambledTexture, m_Asset.renderPipelineResources.textures.owenScrambledRGBATex);
            cmd.SetRayTracingTextureParam(forwardShader, HDShaderIDs._ScramblingTexture, m_Asset.renderPipelineResources.textures.scramblingTex);

            // Inject the ray generation data
            cmd.SetGlobalFloat(HDShaderIDs._RaytracingRayBias, rtSettings.rayBias.value);
            cmd.SetGlobalFloat(HDShaderIDs._RaytracingRayMaxLength, recursiveSettings.rayLength.value);
            cmd.SetGlobalFloat(HDShaderIDs._RaytracingMaxRecursion, recursiveSettings.maxDepth.value);
            cmd.SetGlobalFloat(HDShaderIDs._RaytracingCameraNearPlane, hdCamera.camera.nearClipPlane);

            // Set the data for the ray generation
            cmd.SetRayTracingTextureParam(forwardShader, HDShaderIDs._RaytracingFlagMask, flagBuffer);
            cmd.SetRayTracingTextureParam(forwardShader, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthStencilBuffer());
            cmd.SetRayTracingTextureParam(forwardShader, HDShaderIDs._CameraColorTextureRW, m_CameraColorBuffer);

            // Set ray count texture
            RayCountManager rayCountManager = GetRayCountManager();

            cmd.SetRayTracingIntParam(forwardShader, HDShaderIDs._RayCountEnabled, rayCountManager.RayCountIsEnabled());
            cmd.SetRayTracingTextureParam(forwardShader, HDShaderIDs._RayCountTexture, rayCountManager.GetRayCountTexture());

            // Compute an approximate pixel spread angle value (in radians)
            cmd.SetGlobalFloat(HDShaderIDs._RaytracingPixelSpreadAngle, GetPixelSpreadAngle(hdCamera.camera.fieldOfView, hdCamera.actualWidth, hdCamera.actualHeight));

            // LightLoop data
            lightCluster.BindLightClusterData(cmd);

            // Note: Just in case, we rebind the directional light data (in case they were not)
            cmd.SetGlobalBuffer(HDShaderIDs._DirectionalLightDatas, m_LightLoopLightData.directionalLightData);
            cmd.SetGlobalInt(HDShaderIDs._DirectionalLightCount, m_lightList.directionalLights.Count);

            // Set the data for the ray miss
            cmd.SetRayTracingTextureParam(forwardShader, HDShaderIDs._SkyTexture, m_SkyManager.GetSkyReflection(hdCamera));

            // If this is the right debug mode and we have at least one light, write the first shadow to the de-noised texture
            RTHandle debugBuffer = GetRayTracingBuffer(InternalRayTracingBuffers.RGBA0);

            cmd.SetRayTracingTextureParam(forwardShader, HDShaderIDs._RaytracingPrimaryDebug, debugBuffer);

            // Run the computation
            cmd.DispatchRays(forwardShader, m_RayGenShaderName, (uint)hdCamera.actualWidth, (uint)hdCamera.actualHeight, (uint)hdCamera.viewCount);

            HDRenderPipeline hdrp = (RenderPipelineManager.currentPipeline as HDRenderPipeline);

            hdrp.PushFullScreenDebugTexture(hdCamera, cmd, debugBuffer, FullScreenDebugMode.RecursiveRayTracing);
        }
        void BuildLightData(CommandBuffer cmd, HDCamera hdCamera, HDRayTracingLights rayTracingLights, DebugDisplaySettings debugDisplaySettings)
        {
            // If no lights, exit
            if (rayTracingLights.lightCount == 0)
            {
                ResizeLightDataBuffer(1);
                return;
            }

            // Also we need to build the light list data
            if (m_LightDataGPUArray == null || m_LightDataGPUArray.count != rayTracingLights.lightCount)
            {
                ResizeLightDataBuffer(rayTracingLights.lightCount);
            }

            m_LightDataCPUArray.Clear();

            // Grab the shadow settings
            var hdShadowSettings = hdCamera.volumeStack.GetComponent <HDShadowSettings>();
            BoolScalableSetting contactShadowScalableSetting = HDAdditionalLightData.ScalableSettings.UseContactShadow(m_RenderPipeline.asset);

            // Build the data for every light
            for (int lightIdx = 0; lightIdx < rayTracingLights.hdLightArray.Count; ++lightIdx)
            {
                // Grab the additinal light data to process
                HDAdditionalLightData additionalLightData = rayTracingLights.hdLightArray[lightIdx];

                LightData lightData = new LightData();
                // When the user deletes a light source in the editor, there is a single frame where the light is null before the collection of light in the scene is triggered
                // the workaround for this is simply to add an invalid light for that frame
                if (additionalLightData == null)
                {
                    m_LightDataCPUArray.Add(lightData);
                    continue;
                }

                // Evaluate all the light type data that we need
                LightCategory   lightCategory   = LightCategory.Count;
                GPULightType    gpuLightType    = GPULightType.Point;
                LightVolumeType lightVolumeType = LightVolumeType.Count;
                HDLightType     lightType       = additionalLightData.type;
                HDRenderPipeline.EvaluateGPULightType(lightType, additionalLightData.spotLightShape, additionalLightData.areaLightShape, ref lightCategory, ref gpuLightType, ref lightVolumeType);

                // Fetch the light component for this light
                additionalLightData.gameObject.TryGetComponent(out lightComponent);

                // Build the processed light data  that we need
                ProcessedLightData processedData = new ProcessedLightData();
                processedData.additionalLightData = additionalLightData;
                processedData.lightType           = additionalLightData.type;
                processedData.lightCategory       = lightCategory;
                processedData.gpuLightType        = gpuLightType;
                processedData.lightVolumeType     = lightVolumeType;
                // Both of these positions are non-camera-relative.
                processedData.distanceToCamera       = (additionalLightData.gameObject.transform.position - hdCamera.camera.transform.position).magnitude;
                processedData.lightDistanceFade      = HDUtils.ComputeLinearDistanceFade(processedData.distanceToCamera, additionalLightData.fadeDistance);
                processedData.volumetricDistanceFade = HDUtils.ComputeLinearDistanceFade(processedData.distanceToCamera, additionalLightData.volumetricFadeDistance);
                processedData.isBakedShadowMask      = HDRenderPipeline.IsBakedShadowMaskLight(lightComponent);

                // Build a visible light
                Color finalColor = lightComponent.color.linear * lightComponent.intensity;
                if (additionalLightData.useColorTemperature)
                {
                    finalColor *= Mathf.CorrelatedColorTemperatureToRGB(lightComponent.colorTemperature);
                }
                visibleLight.finalColor = finalColor;
                visibleLight.range      = lightComponent.range;
                // This should be done explicitely, localtoworld matrix doesn't work here
                localToWorldMatrix.SetColumn(3, lightComponent.gameObject.transform.position);
                localToWorldMatrix.SetColumn(2, lightComponent.transform.forward);
                localToWorldMatrix.SetColumn(1, lightComponent.transform.up);
                localToWorldMatrix.SetColumn(0, lightComponent.transform.right);
                visibleLight.localToWorldMatrix = localToWorldMatrix;
                visibleLight.spotAngle          = lightComponent.spotAngle;

                int     shadowIndex            = additionalLightData.shadowIndex;
                int     screenSpaceShadowIndex = -1;
                int     screenSpaceChannelSlot = -1;
                Vector3 lightDimensions        = new Vector3(0.0f, 0.0f, 0.0f);

                // Use the shared code to build the light data
                m_RenderPipeline.GetLightData(cmd, hdCamera, hdShadowSettings, visibleLight, lightComponent, in processedData,
                                              shadowIndex, contactShadowScalableSetting, isRasterization: false, ref lightDimensions, ref screenSpaceShadowIndex, ref screenSpaceChannelSlot, ref lightData);

                // We make the light position camera-relative as late as possible in order
                // to allow the preceding code to work with the absolute world space coordinates.
                Vector3 camPosWS = hdCamera.mainViewConstants.worldSpaceCameraPos;
                HDRenderPipeline.UpdateLightCameraRelativetData(ref lightData, camPosWS);

                // Set the data for this light
                m_LightDataCPUArray.Add(lightData);
            }

            // Push the data to the GPU
            m_LightDataGPUArray.SetData(m_LightDataCPUArray);
        }
Beispiel #30
0
        public void PathTracingRender(HDCamera hdCamera, CommandBuffer cmd, RTHandle outputTexture, ScriptableRenderContext renderContext, int frameCount)
        {
            // First thing to check is: Do we have a valid ray-tracing environment?
            HDRaytracingEnvironment rtEnvironment     = m_RayTracingManager.CurrentEnvironment();
            HDRenderPipeline        renderPipeline    = m_RayTracingManager.GetRenderPipeline();
            RayTracingShader        pathTracingShader = m_Asset.renderPipelineRayTracingResources.pathTracing;

            PathTracing  pathTracingSettings  = VolumeManager.instance.stack.GetComponent <PathTracing>();
            LightCluster lightClusterSettings = VolumeManager.instance.stack.GetComponent <LightCluster>();

            // Check the validity of the state before computing the effect
            bool invalidState = rtEnvironment == null || !pathTracingSettings.enable.value || pathTracingShader == null ||
                                m_Asset.renderPipelineResources.textures.owenScrambled256Tex == null ||
                                m_Asset.renderPipelineResources.textures.scramblingTex == null;

            // If any resource or game-object is missing We stop right away
            if (invalidState)
            {
                return;
            }

            // Grab the history buffer (hijack the reflections one)
            RTHandle history = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.PathTracing)
                               ?? hdCamera.AllocHistoryFrameRT((int)HDCameraFrameHistoryType.PathTracing, PathTracingHistoryBufferAllocatorFunction, 1);

            // Grab the acceleration structure and the list of HD lights for the target camera
            RayTracingAccelerationStructure accelerationStructure = m_RayTracingManager.RequestAccelerationStructure(rtEnvironment.raytracedLayerMask);
            HDRaytracingLightCluster        lightCluster          = m_RayTracingManager.RequestLightCluster(rtEnvironment.raytracedLayerMask);

            // Define the shader pass to use for the path tracing pass
            cmd.SetRayTracingShaderPass(pathTracingShader, "PathTracingDXR");

            // Set the acceleration structure for the pass
            cmd.SetRayTracingAccelerationStructure(pathTracingShader, HDShaderIDs._RaytracingAccelerationStructureName, accelerationStructure);

            // Inject the ray-tracing sampling data
            cmd.SetGlobalTexture(HDShaderIDs._OwenScrambledTexture, m_Asset.renderPipelineResources.textures.owenScrambled256Tex);
            cmd.SetGlobalTexture(HDShaderIDs._ScramblingTexture, m_Asset.renderPipelineResources.textures.scramblingTex);

            // Inject the ray generation data
            cmd.SetGlobalFloat(HDShaderIDs._RaytracingRayBias, rtEnvironment.rayBias);
            cmd.SetGlobalFloat(HDShaderIDs._RaytracingNumSamples, pathTracingSettings.maxSamples.value);
            cmd.SetGlobalFloat(HDShaderIDs._RaytracingMinRecursion, pathTracingSettings.minDepth.value);
            cmd.SetGlobalFloat(HDShaderIDs._RaytracingMaxRecursion, pathTracingSettings.maxDepth.value);
            cmd.SetGlobalFloat(HDShaderIDs._RaytracingIntensityClamp, pathTracingSettings.maxIntensity.value);
            cmd.SetGlobalFloat(HDShaderIDs._RaytracingCameraNearPlane, hdCamera.camera.nearClipPlane);

            // Set the data for the ray generation
            //cmd.SetRayTracingTextureParam(pathTracingShader, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthStencilBuffer());
            cmd.SetRayTracingTextureParam(pathTracingShader, HDShaderIDs._CameraColorTextureRW, outputTexture);
            cmd.SetGlobalInt(HDShaderIDs._RaytracingFrameIndex, frameCount);

            // Compute an approximate pixel spread angle value (in radians)
            float pixelSpreadAngle = hdCamera.camera.fieldOfView * (Mathf.PI / 180.0f) / Mathf.Min(hdCamera.actualWidth, hdCamera.actualHeight);

            cmd.SetRayTracingFloatParam(pathTracingShader, HDShaderIDs._RaytracingPixelSpreadAngle, pixelSpreadAngle);

            // LightLoop data
            cmd.SetGlobalBuffer(HDShaderIDs._RaytracingLightCluster, lightCluster.GetCluster());
            cmd.SetGlobalBuffer(HDShaderIDs._LightDatasRT, lightCluster.GetLightDatas());
            cmd.SetGlobalVector(HDShaderIDs._MinClusterPos, lightCluster.GetMinClusterPos());
            cmd.SetGlobalVector(HDShaderIDs._MaxClusterPos, lightCluster.GetMaxClusterPos());
            cmd.SetGlobalInt(HDShaderIDs._LightPerCellCount, lightClusterSettings.maxNumLightsPercell.value);
            cmd.SetGlobalInt(HDShaderIDs._PunctualLightCountRT, lightCluster.GetPunctualLightCount());
            cmd.SetGlobalInt(HDShaderIDs._AreaLightCountRT, lightCluster.GetAreaLightCount());

            // Note: directional lights are not supported atm
            //cmd.SetGlobalBuffer(HDShaderIDs._DirectionalLightDatas, renderPipeline.directionalLightDatas);
            //cmd.SetGlobalInt(HDShaderIDs._DirectionalLightCount, renderPipeline.m_lightList.directionalLights.Count);

            // Set the data for the ray miss
            cmd.SetRayTracingTextureParam(pathTracingShader, HDShaderIDs._SkyTexture, m_SkyManager.skyReflection);

            // Additional data for path tracing
            cmd.SetRayTracingTextureParam(pathTracingShader, HDShaderIDs._AccumulatedFrameTexture, history);
            cmd.SetRayTracingMatrixParam(pathTracingShader, HDShaderIDs._PixelCoordToViewDirWS, hdCamera.mainViewConstants.pixelCoordToViewDirWS);

            // Run the computation
            cmd.DispatchRays(pathTracingShader, m_PathTracingRayGenShaderName, (uint)hdCamera.actualWidth, (uint)hdCamera.actualHeight, 1);
        }