Пример #1
0
		/// <summary>Registers a texture and returns a handle to the texture.</summary>
		/// <param name="path">The path to the file or folder that contains the texture.</param>
		/// <param name="parameters">The parameters that specify how to process the texture.</param>
		/// <param name="handle">Receives the handle to the texture.</param>
		/// <returns>Whether loading the texture was successful.</returns>
		public virtual bool RegisterTexture(string path, TextureParameters parameters, out TextureHandle handle) {
			handle = null;
			return false;
		}
        TextureHandle RenderScreenSpaceShadows(RenderGraph renderGraph, HDCamera hdCamera, PrepassOutput prepassOutput, TextureHandle depthBuffer, TextureHandle normalBuffer, TextureHandle motionVectorsBuffer, TextureHandle rayCountTexture)
        {
            // If screen space shadows are not supported for this camera, we are done
            if (!hdCamera.frameSettings.IsEnabled(FrameSettingsField.ScreenSpaceShadows) || !RequestedScreenSpaceShadows())
            {
                return(m_RenderGraph.defaultResources.blackTextureArrayXR);
            }

            using (new RenderGraphProfilingScope(renderGraph, ProfilingSampler.Get(HDProfileId.ScreenSpaceShadows)))
            {
                // Request the output texture
                TextureHandle screenSpaceShadowTexture = CreateScreenSpaceShadowTextureArray(renderGraph);

                // First of all we handle the directional light
                RenderDirectionalLightScreenSpaceShadow(renderGraph, hdCamera, depthBuffer, normalBuffer, motionVectorsBuffer, rayCountTexture, screenSpaceShadowTexture);

                if (hdCamera.frameSettings.IsEnabled(FrameSettingsField.RayTracing))
                {
                    // We handle the other light sources
                    RenderLightScreenSpaceShadows(renderGraph, hdCamera, prepassOutput, depthBuffer, normalBuffer, motionVectorsBuffer, rayCountTexture, screenSpaceShadowTexture);
                }

                // We render the debug view, if the texture is not used, it is not evaluated anyway
                TextureHandle screenSpaceShadowDebug = EvaluateShadowDebugView(renderGraph, hdCamera, screenSpaceShadowTexture);
                PushFullScreenDebugTexture(m_RenderGraph, screenSpaceShadowDebug, FullScreenDebugMode.ScreenSpaceShadows);

                return(screenSpaceShadowTexture);
            }
        }
        void WriteScreenSpaceShadow(RenderGraph renderGraph, HDCamera hdCamera, TextureHandle shadowTexture, TextureHandle screenSpaceShadowArray, int shadowIndex, ScreenSpaceShadowType shadowType)
        {
            // Write the result texture to the screen space shadow buffer
            using (var builder = renderGraph.AddRenderPass <WriteScreenSpaceShadowPassData>("Write Screen Space Shadows", out var passData, ProfilingSampler.Get(HDProfileId.RaytracingWriteShadow)))
            {
                passData.parameters              = PrepareWriteScreenSpaceShadowParameters(hdCamera, shadowIndex, shadowType);
                passData.inputShadowBuffer       = builder.ReadTexture(shadowTexture);
                passData.outputShadowArrayBuffer = builder.ReadWriteTexture(screenSpaceShadowArray);

                builder.SetRenderFunc(
                    (WriteScreenSpaceShadowPassData data, RenderGraphContext context) =>
                {
                    WriteScreenSpaceShadowResources resources = new WriteScreenSpaceShadowResources();
                    resources.inputShadowBuffer       = data.inputShadowBuffer;
                    resources.outputShadowArrayBuffer = data.outputShadowArrayBuffer;
                    ExecuteWriteScreenSpaceShadow(context.cmd, data.parameters, resources);
                });
            }
        }
Пример #4
0
        TextureHandle TraceSSGI(RenderGraph renderGraph, HDCamera hdCamera, GlobalIllumination giSettings, TextureHandle depthPyramid, TextureHandle normalBuffer, TextureHandle motionVectorsBuffer)
        {
            using (var builder = renderGraph.AddRenderPass <TraceSSGIPassData>("Trace SSGI", out var passData, ProfilingSampler.Get(HDProfileId.SSGITrace)))
            {
                builder.EnableAsyncCompute(false);

                passData.parameters          = PrepareSSGITraceParameters(hdCamera, giSettings);
                passData.depthTexture        = builder.ReadTexture(depthPyramid);
                passData.normalBuffer        = builder.ReadTexture(normalBuffer);
                passData.motionVectorsBuffer = builder.ReadTexture(motionVectorsBuffer);
                var colorPyramid = hdCamera.GetPreviousFrameRT((int)HDCameraFrameHistoryType.ColorBufferMipChain);
                passData.colorPyramid = colorPyramid != null?builder.ReadTexture(renderGraph.ImportTexture(colorPyramid)) : renderGraph.defaultResources.blackTextureXR;

                var historyDepth = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.Depth);
                passData.historyDepth = historyDepth != null?builder.ReadTexture(renderGraph.ImportTexture(historyDepth)) : renderGraph.defaultResources.blackTextureXR;

                passData.hitPointBuffer = builder.CreateTransientTexture(new TextureDesc(Vector2.one, true, true)
                {
                    colorFormat = GraphicsFormat.R16G16B16A16_SFloat, enableRandomWrite = true, name = "SSGI Hit Point"
                });
                passData.outputBuffer = builder.WriteTexture(renderGraph.CreateTexture(new TextureDesc(Vector2.one, true, true)
                {
                    colorFormat = GraphicsFormat.R16G16B16A16_SFloat, enableRandomWrite = true, name = "SSGI Signal"
                }));

                builder.SetRenderFunc(
                    (TraceSSGIPassData data, RenderGraphContext ctx) =>
                {
                    // We need to fill the structure that holds the various resources
                    SSGITraceResources resources  = new SSGITraceResources();
                    resources.depthTexture        = data.depthTexture;
                    resources.normalBuffer        = data.normalBuffer;
                    resources.motionVectorsBuffer = data.motionVectorsBuffer;
                    resources.colorPyramid        = data.colorPyramid;
                    resources.historyDepth        = data.historyDepth;
                    resources.hitPointBuffer      = data.hitPointBuffer;
                    resources.outputBuffer        = data.outputBuffer;
                    ExecuteSSGITrace(ctx.cmd, data.parameters, resources);
                });
                return(passData.outputBuffer);
            }
        }
Пример #5
0
        TextureHandle RenderSSGI(RenderGraph renderGraph, HDCamera hdCamera, TextureHandle depthPyramid, TextureHandle normalBuffer, TextureHandle motionVectorsBuffer, ShaderVariablesRaytracing shaderVariablesRayTracingCB)
        {
            // Grab the global illumination volume component
            GlobalIllumination giSettings = hdCamera.volumeStack.GetComponent <GlobalIllumination>();

            using (new RenderGraphProfilingScope(renderGraph, ProfilingSampler.Get(HDProfileId.SSGIPass)))
            {
                // Trace the signal
                TextureHandle colorBuffer = TraceSSGI(renderGraph, hdCamera, giSettings, depthPyramid, normalBuffer, motionVectorsBuffer);

                // Denoise the signal
                float        historyValidity = EvaluateHistoryValidity(hdCamera);
                SSGIDenoiser ssgiDenoiser    = GetSSGIDenoiser();
                ssgiDenoiser.Denoise(renderGraph, hdCamera, depthPyramid, normalBuffer, motionVectorsBuffer, colorBuffer, !giSettings.fullResolutionSS, historyValidity: historyValidity);

                // Upscale it if required
                // If this was a half resolution effect, we still have to upscale it
                if (!giSettings.fullResolutionSS)
                {
                    colorBuffer = UpscaleSSGI(renderGraph, hdCamera, giSettings, depthPyramid, colorBuffer);
                }
                return(colorBuffer);
            }
        }
Пример #6
0
        TextureHandle RenderRayTracedIndirectDiffuse(RenderGraph renderGraph, HDCamera hdCamera,
                                                     TextureHandle depthPyramid, TextureHandle stencilBuffer, TextureHandle normalBuffer, TextureHandle motionVectors, TextureHandle historyValidationTexture,
                                                     Texture skyTexture, TextureHandle rayCountTexture,
                                                     ShaderVariablesRaytracing shaderVariablesRaytracing)
        {
            GlobalIllumination giSettings = hdCamera.volumeStack.GetComponent <GlobalIllumination>();

            TextureHandle rtreflResult;
            bool          qualityMode = false;

            // Based on what the asset supports, follow the volume or force the right mode.
            if (m_Asset.currentPlatformRenderPipelineSettings.supportedRayTracingMode == RenderPipelineSettings.SupportedRayTracingMode.Both)
            {
                qualityMode = giSettings.mode.value == RayTracingMode.Quality;
            }
            else
            {
                qualityMode = m_Asset.currentPlatformRenderPipelineSettings.supportedRayTracingMode == RenderPipelineSettings.SupportedRayTracingMode.Quality;
            }


            if (qualityMode)
            {
                rtreflResult = RenderIndirectDiffuseQuality(renderGraph, hdCamera,
                                                            depthPyramid, stencilBuffer, normalBuffer, motionVectors, historyValidationTexture,
                                                            rayCountTexture, skyTexture,
                                                            shaderVariablesRaytracing);
            }
            else
            {
                rtreflResult = RenderIndirectDiffusePerformance(renderGraph, hdCamera,
                                                                depthPyramid, stencilBuffer, normalBuffer, motionVectors, historyValidationTexture,
                                                                rayCountTexture, skyTexture,
                                                                shaderVariablesRaytracing);
            }

            return(rtreflResult);
        }
        BuildGPULightListOutput BuildGPULightList(RenderGraph renderGraph, HDCamera hdCamera, TextureHandle depthStencilBuffer, TextureHandle stencilBufferCopy, GBufferOutput gBuffer)
        {
            using (var builder = renderGraph.AddRenderPass <BuildGPULightListPassData>("Build Light List", out var passData, ProfilingSampler.Get(HDProfileId.BuildLightList)))
            {
                builder.EnableAsyncCompute(hdCamera.frameSettings.BuildLightListRunsAsync());

                passData.lightLoopGlobalParameters   = PrepareLightLoopGlobalParameters(hdCamera, m_TileAndClusterData);
                passData.buildGPULightListParameters = PrepareBuildGPULightListParameters(hdCamera, m_TileAndClusterData, ref m_ShaderVariablesLightListCB, m_TotalLightCount);
                passData.depthBuffer    = builder.ReadTexture(depthStencilBuffer);
                passData.stencilTexture = builder.ReadTexture(stencilBufferCopy);
                if (passData.buildGPULightListParameters.computeMaterialVariants && passData.buildGPULightListParameters.enableFeatureVariants)
                {
                    for (int i = 0; i < gBuffer.gBufferCount; ++i)
                    {
                        passData.gBuffer[i] = builder.ReadTexture(gBuffer.mrt[i]);
                    }
                    passData.gBufferCount = gBuffer.gBufferCount;
                }

                passData.lightVolumeDataBuffer = m_TileAndClusterData.lightVolumeDataBuffer;
                passData.convexBoundsBuffer    = m_TileAndClusterData.convexBoundsBuffer;
                passData.AABBBoundsBuffer      = m_TileAndClusterData.AABBBoundsBuffer;
                passData.globalLightListAtomic = m_TileAndClusterData.globalLightListAtomic;

                passData.output.tileFeatureFlags       = builder.WriteComputeBuffer(renderGraph.ImportComputeBuffer(m_TileAndClusterData.tileFeatureFlags));
                passData.output.dispatchIndirectBuffer = builder.WriteComputeBuffer(renderGraph.ImportComputeBuffer(m_TileAndClusterData.dispatchIndirectBuffer));
                passData.output.perVoxelOffset         = builder.WriteComputeBuffer(renderGraph.ImportComputeBuffer(m_TileAndClusterData.perVoxelOffset));
                passData.output.perTileLogBaseTweak    = builder.WriteComputeBuffer(renderGraph.ImportComputeBuffer(m_TileAndClusterData.perTileLogBaseTweak));
                passData.output.tileList           = builder.WriteComputeBuffer(renderGraph.ImportComputeBuffer(m_TileAndClusterData.tileList));
                passData.output.bigTileLightList   = builder.WriteComputeBuffer(renderGraph.ImportComputeBuffer(m_TileAndClusterData.bigTileLightList));
                passData.output.perVoxelLightLists = builder.WriteComputeBuffer(renderGraph.ImportComputeBuffer(m_TileAndClusterData.perVoxelLightLists));
                passData.output.lightList          = builder.WriteComputeBuffer(renderGraph.ImportComputeBuffer(m_TileAndClusterData.lightList));

                builder.SetRenderFunc(
                    (BuildGPULightListPassData data, RenderGraphContext context) =>
                {
                    bool tileFlagsWritten = false;

                    var buildLightListResources = PrepareBuildGPULightListResources(context, data);

                    ClearLightLists(data.buildGPULightListParameters, buildLightListResources, context.cmd);
                    GenerateLightsScreenSpaceAABBs(data.buildGPULightListParameters, buildLightListResources, context.cmd);
                    BigTilePrepass(data.buildGPULightListParameters, buildLightListResources, context.cmd);
                    BuildPerTileLightList(data.buildGPULightListParameters, buildLightListResources, ref tileFlagsWritten, context.cmd);
                    VoxelLightListGeneration(data.buildGPULightListParameters, buildLightListResources, context.cmd);

                    BuildDispatchIndirectArguments(data.buildGPULightListParameters, buildLightListResources, tileFlagsWritten, context.cmd);

                    // TODO RENDERGRAPH WARNING: Note that the three sets of variables are bound here, but it should be handled differently.
                    PushLightLoopGlobalParams(data.lightLoopGlobalParameters, context.cmd);
                });

                return(passData.output);
            }
        }
Пример #8
0
        PrepassOutput RenderPrepass(RenderGraph renderGraph, TextureHandle sssBuffer, CullingResults cullingResults, HDCamera hdCamera)
        {
            m_IsDepthBufferCopyValid = false;

            var result = new PrepassOutput();

            result.gbuffer = m_GBufferOutput;
            result.dbuffer = m_DBufferOutput;

            bool msaa = hdCamera.frameSettings.IsEnabled(FrameSettingsField.MSAA);
            bool clearMotionVectors = hdCamera.camera.cameraType == CameraType.SceneView && !hdCamera.animateMaterials;


            // TODO: See how to clean this. Some buffers are created outside, some inside functions...
            result.motionVectorsBuffer = CreateMotionVectorBuffer(renderGraph, msaa, clearMotionVectors);
            result.depthBuffer         = CreateDepthBuffer(renderGraph, msaa);

            // TODO RENDERGRAPH : XR occlusion mesh also need to write to color buffer
            //RenderXROcclusionMeshes(renderGraph, hdCamera, result.depthBuffer);

            using (new XRSinglePassScope(renderGraph, hdCamera))
            {
                // TODO RENDERGRAPH
                //// Bind the custom color/depth before the first custom pass
                //if (hdCamera.frameSettings.IsEnabled(FrameSettingsField.CustomPass))
                //{
                //    if (m_CustomPassColorBuffer.IsValueCreated)
                //        cmd.SetGlobalTexture(HDShaderIDs._CustomColorTexture, m_CustomPassColorBuffer.Value);
                //    if (m_CustomPassDepthBuffer.IsValueCreated)
                //        cmd.SetGlobalTexture(HDShaderIDs._CustomDepthTexture, m_CustomPassDepthBuffer.Value);
                //}
                //RenderCustomPass(renderContext, cmd, hdCamera, customPassCullingResults, CustomPassInjectionPoint.BeforeRendering);

                bool shouldRenderMotionVectorAfterGBuffer = RenderDepthPrepass(renderGraph, cullingResults, hdCamera, ref result);

                if (!shouldRenderMotionVectorAfterGBuffer)
                {
                    // If objects motion vectors are enabled, this will render the objects with motion vector into the target buffers (in addition to the depth)
                    // Note: An object with motion vector must not be render in the prepass otherwise we can have motion vector write that should have been rejected
                    RenderObjectsMotionVectors(renderGraph, cullingResults, hdCamera, result);
                }

                // If we have MSAA, we need to complete the motion vector buffer before buffer resolves, hence we need to run camera mv first.
                // This is always fine since shouldRenderMotionVectorAfterGBuffer is always false for forward.
                bool needCameraMVBeforeResolve = hdCamera.frameSettings.IsEnabled(FrameSettingsField.MSAA);
                if (needCameraMVBeforeResolve)
                {
                    RenderCameraMotionVectors(renderGraph, hdCamera, result.depthPyramidTexture, result.motionVectorsBuffer);
                }

                // TODO RENDERGRAPH
                //PreRenderSky(hdCamera, cmd);

                // At this point in forward all objects have been rendered to the prepass (depth/normal/motion vectors) so we can resolve them
                ResolvePrepassBuffers(renderGraph, hdCamera, ref result);

                RenderDBuffer(renderGraph, hdCamera, ref result, cullingResults);

                RenderGBuffer(renderGraph, sssBuffer, ref result, cullingResults, hdCamera);

                DecalNormalPatch(renderGraph, hdCamera, ref result);

                // TODO RENDERGRAPH
                //// After Depth and Normals/roughness including decals
                //RenderCustomPass(renderContext, cmd, hdCamera, customPassCullingResults, CustomPassInjectionPoint.AfterOpaqueDepthAndNormal);

                // In both forward and deferred, everything opaque should have been rendered at this point so we can safely copy the depth buffer for later processing.
                GenerateDepthPyramid(renderGraph, hdCamera, ref result);

                // TODO RENDERGRAPH
                //// Send all the geometry graphics buffer to client systems if required (must be done after the pyramid and before the transparent depth pre-pass)
                //SendGeometryGraphicsBuffers(cmd, hdCamera);

                if (shouldRenderMotionVectorAfterGBuffer)
                {
                    // See the call RenderObjectsMotionVectors() above and comment
                    RenderObjectsMotionVectors(renderGraph, cullingResults, hdCamera, result);
                }

                // In case we don't have MSAA, we always run camera motion vectors when is safe to assume Object MV are rendered
                if (!needCameraMVBeforeResolve)
                {
                    RenderCameraMotionVectors(renderGraph, hdCamera, result.depthPyramidTexture, result.resolvedMotionVectorsBuffer);
                }

                // TODO RENDERGRAPH / Probably need to move this somewhere else.
                //RenderTransparencyOverdraw(cullingResults, hdCamera, renderContext, cmd);

                BuildCoarseStencilAndResolveIfNeeded(renderGraph, hdCamera, ref result);
            }

            return(result);
        }
Пример #9
0
        public TextureHandle Denoise(RenderGraph renderGraph, HDCamera hdCamera, DiffuseDenoiserParameters tfParameters, TextureHandle noisyBuffer, TextureHandle depthPyramid, TextureHandle normalBuffer, TextureHandle outputBuffer)
        {
            using (var builder = renderGraph.AddRenderPass <DiffuseDenoiserPassData>("DiffuseDenoiser", out var passData, ProfilingSampler.Get(HDProfileId.DiffuseFilter)))
            {
                // Cannot run in async
                builder.EnableAsyncCompute(false);

                // Fetch all the resources
                passData.parameters         = tfParameters;
                passData.depthStencilBuffer = builder.ReadTexture(depthPyramid);
                passData.normalBuffer       = builder.ReadTexture(normalBuffer);
                passData.noisyBuffer        = builder.ReadTexture(noisyBuffer);
                passData.intermediateBuffer = builder.CreateTransientTexture(new TextureDesc(Vector2.one, true, true)
                {
                    colorFormat = GraphicsFormat.R16G16B16A16_SFloat, enableRandomWrite = true, name = "DiffuseDenoiserIntermediate"
                });
                passData.outputBuffer = builder.WriteTexture(outputBuffer);

                builder.SetRenderFunc(
                    (DiffuseDenoiserPassData data, RenderGraphContext ctx) =>
                {
                    DiffuseDenoiserResources ddResources = new DiffuseDenoiserResources();
                    ddResources.depthStencilBuffer       = data.depthStencilBuffer;
                    ddResources.normalBuffer             = data.normalBuffer;
                    ddResources.noisyBuffer        = data.noisyBuffer;
                    ddResources.intermediateBuffer = data.intermediateBuffer;
                    ddResources.outputBuffer       = data.outputBuffer;
                    DenoiseBuffer(ctx.cmd, data.parameters, ddResources);
                });
                return(passData.outputBuffer);
            }
        }
Пример #10
0
        // Function that evaluates the history validation Buffer
        public TextureHandle HistoryValidity(RenderGraph renderGraph, HDCamera hdCamera, float historyValidity,
                                             TextureHandle depthBuffer, TextureHandle normalBuffer, TextureHandle motionVectorBuffer)
        {
            using (var builder = renderGraph.AddRenderPass <HistoryValidityPassData>("History Validity Evaluation", out var passData, ProfilingSampler.Get(HDProfileId.HistoryValidity)))
            {
                // Cannot run in async
                builder.EnableAsyncCompute(false);

                passData.texWidth  = hdCamera.actualWidth;
                passData.texHeight = hdCamera.actualHeight;
                passData.viewCount = hdCamera.viewCount;

                // Denoising parameters
                passData.pixelSpreadTangent = HDRenderPipeline.GetPixelSpreadTangent(hdCamera.camera.fieldOfView, hdCamera.actualWidth, hdCamera.actualHeight);
                passData.historyValidity    = historyValidity;

                // Kernels
                passData.validateHistoryKernel = m_ValidateHistoryKernel;

                // Other parameters
                passData.temporalFilterCS = m_TemporalFilterCS;

                // Input Buffers
                passData.depthStencilBuffer = builder.ReadTexture(depthBuffer);
                passData.normalBuffer       = builder.ReadTexture(normalBuffer);
                passData.motionVectorBuffer = builder.ReadTexture(motionVectorBuffer);

                // History buffers
                passData.historyDepthTexture  = builder.ReadTexture(renderGraph.ImportTexture(hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.Depth)));
                passData.historyNormalTexture = builder.ReadTexture(renderGraph.ImportTexture(hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.Normal)));

                // Output buffers
                passData.validationBuffer = builder.WriteTexture(renderGraph.CreateTexture(new TextureDesc(Vector2.one, true, true)
                {
                    colorFormat = GraphicsFormat.R8_UNorm, enableRandomWrite = true, name = "ValidationTexture"
                }));

                builder.SetRenderFunc(
                    (HistoryValidityPassData data, RenderGraphContext ctx) =>
                {
                    RTHandle historyDepthTexture  = data.historyDepthTexture;
                    RTHandle historyNormalTexture = data.historyNormalTexture;
                    // If we do not have a depth and normal history buffers, we can skip right away
                    if (historyDepthTexture == null || historyNormalTexture == null)
                    {
                        CoreUtils.SetRenderTarget(ctx.cmd, data.validationBuffer, clearFlag: ClearFlag.Color, Color.black);
                        return;
                    }

                    // Evaluate the dispatch parameters
                    int areaTileSize = 8;
                    int numTilesX    = (data.texWidth + (areaTileSize - 1)) / areaTileSize;
                    int numTilesY    = (data.texHeight + (areaTileSize - 1)) / areaTileSize;

                    // First of all we need to validate the history to know where we can or cannot use the history signal
                    // Bind the input buffers
                    ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.validateHistoryKernel, HDShaderIDs._DepthTexture, data.depthStencilBuffer);
                    ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.validateHistoryKernel, HDShaderIDs._HistoryDepthTexture, data.historyDepthTexture);
                    ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.validateHistoryKernel, HDShaderIDs._NormalBufferTexture, data.normalBuffer);
                    ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.validateHistoryKernel, HDShaderIDs._HistoryNormalTexture, data.historyNormalTexture);
                    ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.validateHistoryKernel, HDShaderIDs._CameraMotionVectorsTexture, data.motionVectorBuffer);
                    ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.validateHistoryKernel, HDShaderIDs._StencilTexture, data.depthStencilBuffer, 0, RenderTextureSubElement.Stencil);

                    // Bind the constants
                    ctx.cmd.SetComputeFloatParam(data.temporalFilterCS, HDShaderIDs._HistoryValidity, data.historyValidity);
                    ctx.cmd.SetComputeFloatParam(data.temporalFilterCS, HDShaderIDs._PixelSpreadAngleTangent, data.pixelSpreadTangent);
                    ctx.cmd.SetComputeIntParam(data.temporalFilterCS, HDShaderIDs._ObjectMotionStencilBit, (int)StencilUsage.ObjectMotionVector);

                    // Bind the output buffer
                    ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.validateHistoryKernel, HDShaderIDs._ValidationBufferRW, data.validationBuffer);

                    // Evaluate the validity
                    ctx.cmd.DispatchCompute(data.temporalFilterCS, data.validateHistoryKernel, numTilesX, numTilesY, data.viewCount);
                });
                return(passData.validationBuffer);
            }
        }
Пример #11
0
        TextureHandle RaytracingRecursiveRender(RenderGraph renderGraph, HDCamera hdCamera, TextureHandle colorBuffer, TextureHandle depthPyramid, TextureHandle flagMask, TextureHandle rayCountTexture)
        {
            // If ray tracing is disabled in the frame settings or the effect is not enabled
            RecursiveRendering recursiveSettings = hdCamera.volumeStack.GetComponent <RecursiveRendering>();

            if (!hdCamera.frameSettings.IsEnabled(FrameSettingsField.RayTracing) || !recursiveSettings.enable.value)
            {
                return(colorBuffer);
            }

            using (var builder = renderGraph.AddRenderPass <RecursiveRenderingPassData>("Recursive Rendering Evaluation", out var passData, ProfilingSampler.Get(HDProfileId.RayTracingRecursiveRendering)))
            {
                builder.EnableAsyncCompute(false);

                // Camera parameters
                passData.texWidth  = hdCamera.actualWidth;
                passData.texHeight = hdCamera.actualHeight;
                passData.viewCount = hdCamera.viewCount;

                // Effect parameters
                passData.rayLength     = recursiveSettings.rayLength.value;
                passData.maxDepth      = recursiveSettings.maxDepth.value;
                passData.minSmoothness = recursiveSettings.minSmoothness.value;

                // Other data
                passData.accelerationStructure       = RequestAccelerationStructure();
                passData.lightCluster                = RequestLightCluster();
                passData.recursiveRenderingRT        = HDRenderPipelineGlobalSettings.instance.renderPipelineRayTracingResources.forwardRaytracing;
                passData.skyTexture                  = m_SkyManager.GetSkyReflection(hdCamera);
                passData.shaderVariablesRayTracingCB = m_ShaderVariablesRayTracingCB;
                passData.ditheredTextureSet          = GetBlueNoiseManager().DitheredTextureSet8SPP();

                passData.depthStencilBuffer = builder.ReadTexture(depthPyramid);
                passData.flagMask           = builder.ReadTexture(flagMask);
                passData.rayCountTexture    = builder.ReadWriteTexture(rayCountTexture);
                passData.outputBuffer       = builder.ReadWriteTexture(colorBuffer);
                // Right now the debug buffer is written to independently of what is happening. This must be changed
                // TODO RENDERGRAPH
                passData.debugBuffer = builder.WriteTexture(renderGraph.CreateTexture(new TextureDesc(Vector2.one, true, true)
                {
                    colorFormat = GraphicsFormat.R16G16B16A16_SFloat, enableRandomWrite = true, name = "Recursive Rendering Debug Texture"
                }));

                builder.SetRenderFunc(
                    (RecursiveRenderingPassData data, RenderGraphContext ctx) =>
                {
                    // Define the shader pass to use for the reflection pass
                    ctx.cmd.SetRayTracingShaderPass(data.recursiveRenderingRT, "ForwardDXR");

                    // Set the acceleration structure for the pass
                    ctx.cmd.SetRayTracingAccelerationStructure(data.recursiveRenderingRT, HDShaderIDs._RaytracingAccelerationStructureName, data.accelerationStructure);

                    // Inject the ray-tracing sampling data
                    BlueNoise.BindDitheredTextureSet(ctx.cmd, data.ditheredTextureSet);

                    // Update Global Constant Buffer.
                    data.shaderVariablesRayTracingCB._RaytracingRayMaxLength            = data.rayLength;
                    data.shaderVariablesRayTracingCB._RaytracingMaxRecursion            = data.maxDepth;
                    data.shaderVariablesRayTracingCB._RaytracingReflectionMinSmoothness = data.minSmoothness;
                    ConstantBuffer.PushGlobal(ctx.cmd, data.shaderVariablesRayTracingCB, HDShaderIDs._ShaderVariablesRaytracing);

                    // Fecth the temporary buffers we shall be using
                    ctx.cmd.SetRayTracingTextureParam(data.recursiveRenderingRT, HDShaderIDs._RaytracingFlagMask, data.flagMask);
                    ctx.cmd.SetRayTracingTextureParam(data.recursiveRenderingRT, HDShaderIDs._DepthTexture, data.depthStencilBuffer);
                    ctx.cmd.SetRayTracingTextureParam(data.recursiveRenderingRT, HDShaderIDs._CameraColorTextureRW, data.outputBuffer);

                    // Set ray count texture
                    ctx.cmd.SetRayTracingTextureParam(data.recursiveRenderingRT, HDShaderIDs._RayCountTexture, data.rayCountTexture);

                    // LightLoop data
                    data.lightCluster.BindLightClusterData(ctx.cmd);

                    // Set the data for the ray miss
                    ctx.cmd.SetRayTracingTextureParam(data.recursiveRenderingRT, HDShaderIDs._SkyTexture, data.skyTexture);

                    // If this is the right debug mode and we have at least one light, write the first shadow to the de-noised texture
                    ctx.cmd.SetRayTracingTextureParam(data.recursiveRenderingRT, HDShaderIDs._RaytracingPrimaryDebug, data.debugBuffer);

                    // Run the computation
                    ctx.cmd.DispatchRays(data.recursiveRenderingRT, m_RayGenShaderName, (uint)data.texWidth, (uint)data.texHeight, (uint)data.viewCount);
                });

                PushFullScreenDebugTexture(m_RenderGraph, passData.debugBuffer, FullScreenDebugMode.RecursiveRayTracing);

                return(passData.outputBuffer);
            }
        }
Пример #12
0
        public TemporalDenoiserArrayOutputData DenoiseBuffer(RenderGraph renderGraph, HDCamera hdCamera,
                                                             TextureHandle depthBuffer, TextureHandle normalBuffer, TextureHandle motionVectorBuffer, TextureHandle historyValidationBuffer,
                                                             TextureHandle noisyBuffer, RTHandle historyBuffer,
                                                             TextureHandle distanceBuffer, RTHandle distanceHistorySignal,
                                                             TextureHandle velocityBuffer,
                                                             RTHandle validationHistoryBuffer,
                                                             int sliceIndex, Vector4 channelMask, Vector4 distanceChannelMask,
                                                             bool distanceBased, bool singleChannel, float historyValidity)
        {
            TemporalDenoiserArrayOutputData resultData = new TemporalDenoiserArrayOutputData();

            using (var builder = renderGraph.AddRenderPass <TemporalFilterArrayPassData>("TemporalDenoiser", out var passData, ProfilingSampler.Get(HDProfileId.TemporalFilter)))
            {
                // Cannot run in async
                builder.EnableAsyncCompute(false);

                // Set the camera parameters
                passData.texWidth  = hdCamera.actualWidth;
                passData.texHeight = hdCamera.actualHeight;
                passData.viewCount = hdCamera.viewCount;

                // Denoising parameters
                passData.distanceBasedDenoiser = distanceBased;
                passData.historyValidity       = historyValidity;
                passData.pixelSpreadTangent    = HDRenderPipeline.GetPixelSpreadTangent(hdCamera.camera.fieldOfView, hdCamera.actualWidth, hdCamera.actualHeight);
                passData.sliceIndex            = sliceIndex;
                passData.channelMask           = channelMask;
                passData.distanceChannelMask   = distanceChannelMask;

                // Kernels
                passData.temporalAccKernel           = singleChannel ? m_TemporalAccumulationSingleArrayKernel : m_TemporalAccumulationColorArrayKernel;
                passData.copyHistoryKernel           = singleChannel ? m_CopyHistorySingleArrayKernel : m_CopyHistoryColorArrayKernel;
                passData.temporalAccSingleKernel     = m_TemporalAccumulationSingleArrayKernel;
                passData.copyHistoryNoValidityKernel = m_CopyHistorySingleArrayNoValidityKernel;

                // Other parameters
                passData.temporalFilterCS = m_TemporalFilterCS;

                // Input buffers
                passData.depthStencilBuffer = builder.ReadTexture(depthBuffer);
                passData.normalBuffer       = builder.ReadTexture(normalBuffer);
                passData.motionVectorBuffer = builder.ReadTexture(motionVectorBuffer);

                passData.velocityBuffer   = builder.ReadTexture(velocityBuffer);
                passData.noisyBuffer      = builder.ReadTexture(noisyBuffer);
                passData.distanceBuffer   = distanceBased ? builder.ReadTexture(distanceBuffer) : renderGraph.defaultResources.blackTextureXR;
                passData.validationBuffer = builder.ReadTexture(historyValidationBuffer);

                // History buffers
                passData.historyBuffer           = builder.ReadWriteTexture(renderGraph.ImportTexture(historyBuffer));
                passData.validationHistoryBuffer = builder.ReadWriteTexture(renderGraph.ImportTexture(validationHistoryBuffer));
                passData.distanceHistorySignal   = distanceBased ? builder.ReadWriteTexture(renderGraph.ImportTexture(distanceHistorySignal)) : renderGraph.defaultResources.blackTextureXR;

                // Output textures
                passData.outputBuffer = builder.ReadWriteTexture(renderGraph.CreateTexture(new TextureDesc(Vector2.one, true, true)
                {
                    colorFormat = GraphicsFormat.R16G16B16A16_SFloat, enableRandomWrite = true, name = "Temporal Filter Output"
                }));
                passData.outputDistanceSignal = distanceBased ? builder.ReadWriteTexture(renderGraph.CreateTexture(new TextureDesc(Vector2.one, true, true)
                {
                    colorFormat = GraphicsFormat.R16G16B16A16_SFloat, enableRandomWrite = true, name = "Temporal Filter Distance output"
                })) : new TextureHandle();

                builder.SetRenderFunc(
                    (TemporalFilterArrayPassData data, RenderGraphContext ctx) =>
                {
                    // Evaluate the dispatch parameters
                    int tfTileSize = 8;
                    int numTilesX  = (data.texWidth + (tfTileSize - 1)) / tfTileSize;
                    int numTilesY  = (data.texHeight + (tfTileSize - 1)) / tfTileSize;

                    // Now that we have validated our history, let's accumulate
                    ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.temporalAccKernel, HDShaderIDs._DenoiseInputTexture, data.noisyBuffer);
                    ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.temporalAccKernel, HDShaderIDs._HistoryBuffer, data.historyBuffer);
                    ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.temporalAccKernel, HDShaderIDs._HistoryValidityBuffer, data.validationHistoryBuffer);
                    ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.temporalAccKernel, HDShaderIDs._DepthTexture, data.depthStencilBuffer);
                    ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.temporalAccKernel, HDShaderIDs._CameraMotionVectorsTexture, data.motionVectorBuffer);
                    ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.temporalAccKernel, HDShaderIDs._ValidationBuffer, data.validationBuffer);
                    ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.temporalAccKernel, HDShaderIDs._VelocityBuffer, data.velocityBuffer);

                    // Bind the constants
                    ctx.cmd.SetComputeIntParam(data.temporalFilterCS, HDShaderIDs._DenoisingHistorySlice, data.sliceIndex);
                    ctx.cmd.SetComputeVectorParam(data.temporalFilterCS, HDShaderIDs._DenoisingHistoryMask, data.channelMask);
                    ctx.cmd.SetComputeFloatParam(data.temporalFilterCS, HDShaderIDs._HistoryValidity, data.historyValidity);

                    // Bind the output buffer
                    ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.temporalAccKernel, HDShaderIDs._DenoiseOutputTextureRW, data.outputBuffer);

                    // Combine with the history
                    ctx.cmd.DispatchCompute(data.temporalFilterCS, data.temporalAccKernel, numTilesX, numTilesY, data.viewCount);

                    // Make sure to copy the new-accumulated signal in our history buffer
                    ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.copyHistoryKernel, HDShaderIDs._DenoiseInputTexture, data.outputBuffer);
                    ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.copyHistoryKernel, HDShaderIDs._DenoiseOutputTextureRW, data.historyBuffer);
                    ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.copyHistoryKernel, HDShaderIDs._ValidityOutputTextureRW, data.validationHistoryBuffer);
                    ctx.cmd.SetComputeIntParam(data.temporalFilterCS, HDShaderIDs._DenoisingHistorySlice, data.sliceIndex);
                    ctx.cmd.SetComputeVectorParam(data.temporalFilterCS, HDShaderIDs._DenoisingHistoryMask, data.channelMask);
                    ctx.cmd.DispatchCompute(data.temporalFilterCS, data.copyHistoryKernel, numTilesX, numTilesY, data.viewCount);

                    if (data.distanceBasedDenoiser)
                    {
                        // Bind the input buffers
                        ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.temporalAccSingleKernel, HDShaderIDs._DenoiseInputTexture, data.distanceBuffer);
                        ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.temporalAccSingleKernel, HDShaderIDs._HistoryBuffer, data.distanceHistorySignal);
                        ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.temporalAccSingleKernel, HDShaderIDs._HistoryValidityBuffer, data.validationHistoryBuffer);
                        ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.temporalAccSingleKernel, HDShaderIDs._DepthTexture, data.depthStencilBuffer);
                        ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.temporalAccSingleKernel, HDShaderIDs._ValidationBuffer, data.validationBuffer);
                        ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.temporalAccSingleKernel, HDShaderIDs._VelocityBuffer, data.velocityBuffer);
                        ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.temporalAccSingleKernel, HDShaderIDs._CameraMotionVectorsTexture, data.motionVectorBuffer);

                        // Bind the constant inputs
                        ctx.cmd.SetComputeIntParam(data.temporalFilterCS, HDShaderIDs._DenoisingHistorySlice, data.sliceIndex);
                        ctx.cmd.SetComputeVectorParam(data.temporalFilterCS, HDShaderIDs._DenoisingHistoryMask, data.distanceChannelMask);

                        // Bind the output buffers
                        ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.temporalAccSingleKernel, HDShaderIDs._DenoiseOutputTextureRW, data.outputDistanceSignal);

                        // Dispatch the temporal accumulation
                        ctx.cmd.DispatchCompute(data.temporalFilterCS, data.temporalAccSingleKernel, numTilesX, numTilesY, data.viewCount);

                        // Make sure to copy the new-accumulated signal in our history buffer
                        ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.copyHistoryNoValidityKernel, HDShaderIDs._DenoiseInputTexture, data.outputDistanceSignal);
                        ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.copyHistoryNoValidityKernel, HDShaderIDs._DenoiseOutputTextureRW, data.distanceHistorySignal);
                        ctx.cmd.SetComputeIntParam(data.temporalFilterCS, HDShaderIDs._DenoisingHistorySlice, data.sliceIndex);
                        ctx.cmd.SetComputeVectorParam(data.temporalFilterCS, HDShaderIDs._DenoisingHistoryMask, data.distanceChannelMask);
                        ctx.cmd.DispatchCompute(data.temporalFilterCS, data.copyHistoryNoValidityKernel, numTilesX, numTilesY, data.viewCount);
                    }
                });

                resultData.outputSignal         = passData.outputBuffer;
                resultData.outputSignalDistance = passData.outputDistanceSignal;
            }
            return(resultData);
        }
Пример #13
0
        // Denoiser variant for non history array
        public TextureHandle Denoise(RenderGraph renderGraph, HDCamera hdCamera, bool singleChannel, float historyValidity,
                                     TextureHandle noisyBuffer, TextureHandle velocityBuffer,
                                     TextureHandle historyBuffer,
                                     TextureHandle depthBuffer, TextureHandle normalBuffer, TextureHandle motionVectorBuffer, TextureHandle historyValidationBuffer)
        {
            using (var builder = renderGraph.AddRenderPass <TemporalFilterPassData>("TemporalDenoiser", out var passData, ProfilingSampler.Get(HDProfileId.TemporalFilter)))
            {
                // Cannot run in async
                builder.EnableAsyncCompute(false);

                // Camera parameters
                passData.texWidth  = hdCamera.actualWidth;
                passData.texHeight = hdCamera.actualHeight;
                passData.viewCount = hdCamera.viewCount;

                // Denoising parameters
                passData.pixelSpreadTangent = HDRenderPipeline.GetPixelSpreadTangent(hdCamera.camera.fieldOfView, hdCamera.actualWidth, hdCamera.actualHeight);
                passData.historyValidity    = historyValidity;

                // Kernels
                passData.temporalAccKernel = singleChannel ? m_TemporalAccumulationSingleKernel : m_TemporalAccumulationColorKernel;
                passData.copyHistoryKernel = singleChannel ? m_CopyHistorySingleKernel : m_CopyHistoryColorKernel;

                // Other parameters
                passData.temporalFilterCS = m_TemporalFilterCS;

                // Prepass Buffers
                passData.depthStencilBuffer = builder.ReadTexture(depthBuffer);
                passData.normalBuffer       = builder.ReadTexture(normalBuffer);
                passData.motionVectorBuffer = builder.ReadTexture(motionVectorBuffer);

                // Effect buffers
                passData.velocityBuffer   = builder.ReadTexture(velocityBuffer);
                passData.noisyBuffer      = builder.ReadTexture(noisyBuffer);
                passData.validationBuffer = builder.ReadTexture(historyValidationBuffer);

                // History buffer
                passData.historyBuffer = builder.ReadWriteTexture(historyBuffer);

                // Output buffers
                passData.outputBuffer = builder.ReadWriteTexture(renderGraph.CreateTexture(new TextureDesc(Vector2.one, true, true)
                {
                    colorFormat = GraphicsFormat.R16G16B16A16_SFloat, enableRandomWrite = true, name = "Temporal Filter Output"
                }));

                builder.SetRenderFunc(
                    (TemporalFilterPassData data, RenderGraphContext ctx) =>
                {
                    // Evaluate the dispatch parameters
                    int areaTileSize = 8;
                    int numTilesX    = (data.texWidth + (areaTileSize - 1)) / areaTileSize;
                    int numTilesY    = (data.texHeight + (areaTileSize - 1)) / areaTileSize;

                    // Now that we have validated our history, let's accumulate
                    // Bind the input buffers
                    ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.temporalAccKernel, HDShaderIDs._DenoiseInputTexture, data.noisyBuffer);
                    ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.temporalAccKernel, HDShaderIDs._HistoryBuffer, data.historyBuffer);
                    ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.temporalAccKernel, HDShaderIDs._DepthTexture, data.depthStencilBuffer);
                    ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.temporalAccKernel, HDShaderIDs._ValidationBuffer, data.validationBuffer);
                    ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.temporalAccKernel, HDShaderIDs._VelocityBuffer, data.velocityBuffer);
                    ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.temporalAccKernel, HDShaderIDs._CameraMotionVectorsTexture, data.motionVectorBuffer);
                    ctx.cmd.SetComputeFloatParam(data.temporalFilterCS, HDShaderIDs._HistoryValidity, data.historyValidity);

                    // Bind the output buffer
                    ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.temporalAccKernel, HDShaderIDs._DenoiseOutputTextureRW, data.outputBuffer);

                    // Combine signal with history
                    ctx.cmd.DispatchCompute(data.temporalFilterCS, data.temporalAccKernel, numTilesX, numTilesY, data.viewCount);

                    // Make sure to copy the new-accumulated signal in our history buffer
                    ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.copyHistoryKernel, HDShaderIDs._DenoiseInputTexture, data.outputBuffer);
                    ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.copyHistoryKernel, HDShaderIDs._DenoiseOutputTextureRW, data.historyBuffer);
                    ctx.cmd.DispatchCompute(data.temporalFilterCS, data.copyHistoryKernel, numTilesX, numTilesY, data.viewCount);
                });
                return(passData.outputBuffer);
            }
        }
Пример #14
0
        public void Render(RenderGraph renderGraph,
                           HDCamera hdCamera,
                           BlueNoise blueNoise,
                           TextureHandle colorBuffer,
                           TextureHandle afterPostProcessTexture,
                           TextureHandle depthBuffer,
                           TextureHandle depthBufferMipChain,
                           TextureHandle motionVectors,
                           TextureHandle finalRT,
                           bool flipY)
        {
            var           source       = colorBuffer;
            TextureHandle alphaTexture = DoCopyAlpha(renderGraph, hdCamera, source);

            // Note: whether a pass is really executed or not is generally inside the Do* functions.
            // with few exceptions.

            if (m_PostProcessEnabled)
            {
                source = ClearWithGuardBands(renderGraph, hdCamera, source);

                source = StopNaNsPass(renderGraph, hdCamera, source);

                source = DynamicExposurePass(renderGraph, hdCamera, source);


                //if (camera.frameSettings.IsEnabled(FrameSettingsField.CustomPostProcess))
                //{
                //    using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.CustomPostProcessBeforeTAA)))
                //    {
                //        foreach (var typeString in HDRenderPipeline.defaultAsset.beforeTAACustomPostProcesses)
                //            RenderCustomPostProcess(cmd, camera, ref source, colorBuffer, Type.GetType(typeString));
                //    }
                //}

                // Temporal anti-aliasing goes first
                if (m_AntialiasingFS)
                {
                    if (hdCamera.antialiasing == HDAdditionalCameraData.AntialiasingMode.TemporalAntialiasing)
                    {
                        source = DoTemporalAntialiasing(renderGraph, hdCamera, depthBuffer, motionVectors, depthBufferMipChain, source);
                    }
                    else if (hdCamera.antialiasing == HDAdditionalCameraData.AntialiasingMode.SubpixelMorphologicalAntiAliasing)
                    {
                        source = SMAAPass(renderGraph, hdCamera, depthBuffer, source);
                    }
                }

                //                if (camera.frameSettings.IsEnabled(FrameSettingsField.CustomPostProcess))
                //                {
                //                    using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.CustomPostProcessBeforePP)))
                //                    {
                //                        foreach (var typeString in HDRenderPipeline.defaultAsset.beforePostProcessCustomPostProcesses)
                //                            RenderCustomPostProcess(cmd, camera, ref source, colorBuffer, Type.GetType(typeString));
                //                    }
                //                }


                source = DepthOfFieldPass(renderGraph, hdCamera, depthBuffer, motionVectors, depthBufferMipChain, source);

                // Motion blur after depth of field for aesthetic reasons (better to see motion
                // blurred bokeh rather than out of focus motion blur)
                source = MotionBlurPass(renderGraph, hdCamera, motionVectors, source);

                // Panini projection is done as a fullscreen pass after all depth-based effects are
                // done and before bloom kicks in
                // This is one effect that would benefit from an overscan mode or supersampling in
                // HDRP to reduce the amount of resolution lost at the center of the screen
                source = PaniniProjectionPass(renderGraph, hdCamera, source);

                TextureHandle bloomTexture = BloomPass(renderGraph, hdCamera, source);
                TextureHandle logLutOutput = ColorGradingPass(renderGraph, hdCamera);
                source = UberPass(renderGraph, hdCamera, logLutOutput, bloomTexture, source);
                m_HDInstance.PushFullScreenDebugTexture(renderGraph, source, FullScreenDebugMode.ColorLog);

                //                if (camera.frameSettings.IsEnabled(FrameSettingsField.CustomPostProcess))
                //                {
                //                    using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.CustomPostProcessAfterPP)))
                //                    {
                //                        foreach (var typeString in HDRenderPipeline.defaultAsset.afterPostProcessCustomPostProcesses)
                //                            RenderCustomPostProcess(cmd, camera, ref source, colorBuffer, Type.GetType(typeString));
                //                    }
                //                }


                source = FXAAPass(renderGraph, hdCamera, source);

                hdCamera.resetPostProcessingHistory = false;
            }

            // Contrast Adaptive Sharpen Upscaling
            source = ContrastAdaptiveSharpeningPass(renderGraph, hdCamera, source);

            FinalPass(renderGraph, hdCamera, afterPostProcessTexture, alphaTexture, finalRT, source, blueNoise, flipY);
        }
Пример #15
0
        void FinalPass(RenderGraph renderGraph, HDCamera hdCamera, TextureHandle afterPostProcessTexture, TextureHandle alphaTexture, TextureHandle finalRT, TextureHandle source, BlueNoise blueNoise, bool flipY)
        {
            using (var builder = renderGraph.AddRenderPass <FinalPassData>("Final Pass", out var passData, ProfilingSampler.Get(HDProfileId.FinalPost)))
            {
                passData.parameters = PrepareFinalPass(hdCamera, blueNoise, flipY);
                passData.source     = builder.ReadTexture(source);
                passData.afterPostProcessTexture = builder.ReadTexture(afterPostProcessTexture);
                passData.alphaTexture            = builder.ReadTexture(alphaTexture);
                passData.destination             = builder.WriteTexture(finalRT);

                builder.SetRenderFunc(
                    (FinalPassData data, RenderGraphContext ctx) =>
                {
                    DoFinalPass(data.parameters, data.source, data.afterPostProcessTexture, data.destination, data.alphaTexture, ctx.cmd);
                });
            }
        }
Пример #16
0
        public SSGIDenoiserOutput Denoise(RenderGraph renderGraph, HDCamera hdCamera,
                                          TextureHandle depthPyramid, TextureHandle normalBuffer, TextureHandle motionVectorsBuffer, TextureHandle inputOutputBuffer0, TextureHandle inputOutputBuffer1,
                                          HDUtils.PackedMipChainInfo depthMipInfo, bool halfResolution = false, float historyValidity = 1.0f)
        {
            using (var builder = renderGraph.AddRenderPass <DenoiseSSGIPassData>("Denoise SSGI", out var passData, ProfilingSampler.Get(HDProfileId.SSGIDenoise)))
            {
                builder.EnableAsyncCompute(false);

                // Input buffers
                passData.depthTexture        = builder.ReadTexture(depthPyramid);
                passData.normalBuffer        = builder.ReadTexture(normalBuffer);
                passData.motionVectorsBuffer = builder.ReadTexture(motionVectorsBuffer);


                // History buffer
                bool     historyRequireClear     = false;
                RTHandle indirectDiffuseHistory0 = RequestIndirectDiffuseHistory0(hdCamera, out historyRequireClear);
                passData.indirectDiffuseHistory0 = builder.ReadTexture(builder.WriteTexture(renderGraph.ImportTexture(indirectDiffuseHistory0)));
                RTHandle indirectDiffuseHistory1 = RequestIndirectDiffuseHistory1(hdCamera, out historyRequireClear);
                passData.indirectDiffuseHistory1 = builder.ReadTexture(builder.WriteTexture(renderGraph.ImportTexture(indirectDiffuseHistory1)));
                var historyDepthBuffer = halfResolution ? hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.Depth1) : hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.Depth);
                passData.historyDepthBuffer = historyDepthBuffer != null?builder.ReadTexture(renderGraph.ImportTexture(historyDepthBuffer)) : renderGraph.defaultResources.blackTextureXR;

                passData.intermediateBuffer0 = builder.CreateTransientTexture(new TextureDesc(Vector2.one, true, true)
                {
                    colorFormat = GraphicsFormat.R16G16B16A16_SFloat, enableRandomWrite = true, name = "SSGI Denoiser Intermediate0"
                });
                passData.intermediateBuffer1 = builder.CreateTransientTexture(new TextureDesc(Vector2.one, true, true)
                {
                    colorFormat = GraphicsFormat.R16G16B16A16_SFloat, enableRandomWrite = true, name = "SSGI Denoiser Intermediate1"
                });
                passData.inputOutputBuffer0 = builder.WriteTexture(builder.ReadTexture(inputOutputBuffer0));
                passData.inputOutputBuffer1 = builder.WriteTexture(builder.ReadTexture(inputOutputBuffer1));

                passData.parameters = PrepareSSGIDenoiserParameters(hdCamera, halfResolution, historyValidity, historyRequireClear, depthMipInfo);

                builder.SetRenderFunc(
                    (DenoiseSSGIPassData data, RenderGraphContext ctx) =>
                {
                    // We need to fill the structure that holds the various resources
                    SSGIDenoiserResources resources   = new SSGIDenoiserResources();
                    resources.depthTexture            = data.depthTexture;
                    resources.normalBuffer            = data.normalBuffer;
                    resources.motionVectorsBuffer     = data.motionVectorsBuffer;
                    resources.indirectDiffuseHistory0 = data.indirectDiffuseHistory0;
                    resources.indirectDiffuseHistory1 = data.indirectDiffuseHistory1;
                    resources.historyDepthBuffer      = data.historyDepthBuffer;
                    resources.intermediateBuffer0     = data.intermediateBuffer0;
                    resources.intermediateBuffer1     = data.intermediateBuffer1;
                    resources.inputOutputBuffer0      = data.inputOutputBuffer0;
                    resources.inputOutputBuffer1      = data.inputOutputBuffer1;
                    Denoise(ctx.cmd, data.parameters, resources);
                });

                SSGIDenoiserOutput denoiserOutput = new SSGIDenoiserOutput();
                denoiserOutput.outputBuffer0 = inputOutputBuffer0;
                denoiserOutput.outputBuffer1 = inputOutputBuffer1;
                return(denoiserOutput);
            }
        }
Пример #17
0
        TextureHandle DenoiseRTGI(RenderGraph renderGraph, HDCamera hdCamera, TextureHandle rtGIBuffer, TextureHandle depthPyramid, TextureHandle normalBuffer, TextureHandle motionVectorBuffer, TextureHandle historyValidationTexture)
        {
            var giSettings = hdCamera.volumeStack.GetComponent <GlobalIllumination>();

            if (giSettings.denoise)
            {
                // Evaluate the history's validity
                float historyValidity0 = EvaluateIndirectDiffuseHistoryValidity0(hdCamera, giSettings.fullResolution, true);

                HDTemporalFilter  temporalFilter  = GetTemporalFilter();
                HDDiffuseDenoiser diffuseDenoiser = GetDiffuseDenoiser();

                // Run the temporal denoiser
                TextureHandle historyBufferHF = renderGraph.ImportTexture(RequestIndirectDiffuseHistoryTextureHF(hdCamera));
                TextureHandle denoisedRTGI    = temporalFilter.Denoise(renderGraph, hdCamera, singleChannel: false, historyValidity0, rtGIBuffer, renderGraph.defaultResources.blackTextureXR, historyBufferHF, depthPyramid, normalBuffer, motionVectorBuffer, historyValidationTexture);

                // Apply the diffuse denoiser
                rtGIBuffer = diffuseDenoiser.Denoise(renderGraph, hdCamera, singleChannel: false, kernelSize: giSettings.denoiserRadius, halfResolutionFilter: giSettings.halfResolutionDenoiser, jitterFilter: giSettings.secondDenoiserPass, denoisedRTGI, depthPyramid, normalBuffer, rtGIBuffer);

                // If the second pass is requested, do it otherwise blit
                if (giSettings.secondDenoiserPass)
                {
                    float historyValidity1 = EvaluateIndirectDiffuseHistoryValidity1(hdCamera, giSettings.fullResolution, true);

                    // Run the temporal denoiser
                    TextureHandle historyBufferLF = renderGraph.ImportTexture(RequestIndirectDiffuseHistoryTextureLF(hdCamera));
                    denoisedRTGI = temporalFilter.Denoise(renderGraph, hdCamera, singleChannel: false, historyValidity1, rtGIBuffer, renderGraph.defaultResources.blackTextureXR, historyBufferLF, depthPyramid, normalBuffer, motionVectorBuffer, historyValidationTexture);

                    // Apply the diffuse denoiser
                    rtGIBuffer = diffuseDenoiser.Denoise(renderGraph, hdCamera, singleChannel: false, kernelSize: giSettings.denoiserRadius * 0.5f, halfResolutionFilter: giSettings.halfResolutionDenoiser, jitterFilter: false, denoisedRTGI, depthPyramid, normalBuffer, rtGIBuffer);

                    // Propagate the history validity for the second buffer
                    PropagateIndirectDiffuseHistoryValidity1(hdCamera, giSettings.fullResolution, true);
                }

                // Propagate the history validity for the first buffer
                PropagateIndirectDiffuseHistoryValidity0(hdCamera, giSettings.fullResolution, true);

                return(rtGIBuffer);
            }
            else
            {
                return(rtGIBuffer);
            }
        }
Пример #18
0
        TextureHandle RenderPostProcess(RenderGraph renderGraph,
                                        PrepassOutput prepassOutput,
                                        TextureHandle inputColor,
                                        TextureHandle backBuffer,
                                        CullingResults cullResults,
                                        HDCamera hdCamera)
        {
            PostProcessParameters parameters = PreparePostProcess(cullResults, hdCamera);

            TextureHandle afterPostProcessBuffer = renderGraph.defaultResources.blackTextureXR;
            TextureHandle dest = HDUtils.PostProcessIsFinalPass(parameters.hdCamera) ? backBuffer : renderGraph.CreateTexture(
                new TextureDesc(Vector2.one, true, true)
            {
                colorFormat = GetColorBufferFormat(), name = "Intermediate Postprocess buffer"
            });

            if (hdCamera.frameSettings.IsEnabled(FrameSettingsField.AfterPostprocess))
            {
                // We render AfterPostProcess objects first into a separate buffer that will be composited in the final post process pass
                using (var builder = renderGraph.AddRenderPass <AfterPostProcessPassData>("After Post-Process", out var passData, ProfilingSampler.Get(HDProfileId.AfterPostProcessing)))
                {
                    passData.parameters             = parameters;
                    passData.afterPostProcessBuffer = builder.UseColorBuffer(renderGraph.CreateTexture(
                                                                                 new TextureDesc(Vector2.one, true, true)
                    {
                        colorFormat = GraphicsFormat.R8G8B8A8_SRGB, clearBuffer = true, clearColor = Color.black, name = "OffScreen AfterPostProcess"
                    }), 0);
                    if (passData.parameters.useDepthBuffer)
                    {
                        passData.depthStencilBuffer = builder.UseDepthBuffer(prepassOutput.depthBuffer, DepthAccess.ReadWrite);
                    }
                    passData.opaqueAfterPostprocessRL      = builder.UseRendererList(renderGraph.CreateRendererList(passData.parameters.opaqueAfterPPDesc));
                    passData.transparentAfterPostprocessRL = builder.UseRendererList(renderGraph.CreateRendererList(passData.parameters.transparentAfterPPDesc));

                    builder.SetRenderFunc(
                        (AfterPostProcessPassData data, RenderGraphContext ctx) =>
                    {
                        RenderAfterPostProcess(data.parameters
                                               , ctx.resources.GetRendererList(data.opaqueAfterPostprocessRL)
                                               , ctx.resources.GetRendererList(data.transparentAfterPostprocessRL)
                                               , ctx.renderContext, ctx.cmd);
                    });

                    afterPostProcessBuffer = passData.afterPostProcessBuffer;
                }
            }

            m_PostProcessSystem.Render(
                renderGraph,
                parameters.hdCamera,
                parameters.blueNoise,
                inputColor,
                afterPostProcessBuffer,
                prepassOutput.resolvedDepthBuffer,
                prepassOutput.depthPyramidTexture,
                prepassOutput.resolvedMotionVectorsBuffer,
                dest,
                parameters.flipYInPostProcess
                );

            return(dest);
        }
Пример #19
0
 LightingOutput RenderDeferredLighting(RenderGraph renderGraph,
                                       HDCamera hdCamera,
                                       TextureHandle colorBuffer,
                                       TextureHandle depthStencilBuffer,
                                       TextureHandle depthPyramidTexture,
                                       in LightingBuffers lightingBuffers,
Пример #20
0
        TextureHandle DirGenRTGI(RenderGraph renderGraph, HDCamera hdCamera, GlobalIllumination settings, TextureHandle depthPyramid, TextureHandle normalBuffer)
        {
            using (var builder = renderGraph.AddRenderPass <DirGenRTGIPassData>("Generating the rays for RTGI", out var passData, ProfilingSampler.Get(HDProfileId.RaytracingIndirectDiffuseDirectionGeneration)))
            {
                builder.EnableAsyncCompute(false);

                // Set the camera parameters
                passData.texWidth  = hdCamera.actualWidth;
                passData.texHeight = hdCamera.actualHeight;
                passData.viewCount = hdCamera.viewCount;

                // Set the generation parameters
                passData.fullResolution = settings.fullResolution;

                // Grab the right kernel
                passData.directionGenCS = m_Asset.renderPipelineRayTracingResources.indirectDiffuseRaytracingCS;
                passData.dirGenKernel   = settings.fullResolution ? m_RaytracingIndirectDiffuseFullResKernel : m_RaytracingIndirectDiffuseHalfResKernel;

                // Grab the additional parameters
                passData.ditheredTextureSet          = GetBlueNoiseManager().DitheredTextureSet8SPP();
                passData.shaderVariablesRayTracingCB = m_ShaderVariablesRayTracingCB;

                passData.depthStencilBuffer = builder.ReadTexture(depthPyramid);
                passData.normalBuffer       = builder.ReadTexture(normalBuffer);
                passData.outputBuffer       = builder.WriteTexture(renderGraph.CreateTexture(new TextureDesc(Vector2.one, true, true)
                {
                    colorFormat = GraphicsFormat.R16G16B16A16_SFloat, enableRandomWrite = true, name = "GI Ray Directions"
                }));

                builder.SetRenderFunc(
                    (DirGenRTGIPassData data, RenderGraphContext ctx) =>
                {
                    // Inject the ray-tracing sampling data
                    BlueNoise.BindDitheredTextureSet(ctx.cmd, data.ditheredTextureSet);

                    // Bind all the required textures
                    ctx.cmd.SetComputeTextureParam(data.directionGenCS, data.dirGenKernel, HDShaderIDs._DepthTexture, data.depthStencilBuffer);
                    ctx.cmd.SetComputeTextureParam(data.directionGenCS, data.dirGenKernel, HDShaderIDs._NormalBufferTexture, data.normalBuffer);

                    // Bind the output buffers
                    ctx.cmd.SetComputeTextureParam(data.directionGenCS, data.dirGenKernel, HDShaderIDs._RaytracingDirectionBuffer, data.outputBuffer);

                    int numTilesXHR, numTilesYHR;
                    if (data.fullResolution)
                    {
                        // Evaluate the dispatch parameters
                        numTilesXHR = (data.texWidth + (rtReflectionsComputeTileSize - 1)) / rtReflectionsComputeTileSize;
                        numTilesYHR = (data.texHeight + (rtReflectionsComputeTileSize - 1)) / rtReflectionsComputeTileSize;
                    }
                    else
                    {
                        // Evaluate the dispatch parameters
                        numTilesXHR = (data.texWidth / 2 + (rtReflectionsComputeTileSize - 1)) / rtReflectionsComputeTileSize;
                        numTilesYHR = (data.texHeight / 2 + (rtReflectionsComputeTileSize - 1)) / rtReflectionsComputeTileSize;
                    }

                    // Compute the directions
                    ctx.cmd.DispatchCompute(data.directionGenCS, data.dirGenKernel, numTilesXHR, numTilesYHR, data.viewCount);
                });

                return(passData.outputBuffer);
            }
        }
Пример #21
0
        void RenderFullScreenDebug(RenderGraph renderGraph, TextureHandle colorBuffer, TextureHandle depthBuffer, CullingResults cull, HDCamera hdCamera)
        {
            TextureHandle       fullscreenDebugOutput = TextureHandle.nullHandle;
            ComputeBufferHandle fullscreenDebugBuffer = ComputeBufferHandle.nullHandle;

            using (var builder = renderGraph.AddRenderPass <FullScreenDebugPassData>("FullScreen Debug", out var passData))
            {
                passData.parameters   = PrepareFullScreenDebugParameters(hdCamera, cull);
                passData.output       = builder.WriteTexture(colorBuffer);
                passData.depthBuffer  = builder.ReadTexture(depthBuffer);
                passData.debugBuffer  = builder.WriteComputeBuffer(renderGraph.CreateComputeBuffer(new ComputeBufferDesc(hdCamera.actualWidth * hdCamera.actualHeight * hdCamera.viewCount, sizeof(uint))));
                passData.rendererList = builder.UseRendererList(renderGraph.CreateRendererList(passData.parameters.rendererList));

                builder.SetRenderFunc(
                    (FullScreenDebugPassData data, RenderGraphContext ctx) =>
                {
                    RenderFullScreenDebug(data.parameters,
                                          data.output,
                                          data.depthBuffer,
                                          data.debugBuffer,
                                          data.rendererList,
                                          ctx.renderContext, ctx.cmd);
                });

                fullscreenDebugOutput = passData.output;
                fullscreenDebugBuffer = passData.debugBuffer;
            }

            m_DebugFullScreenComputeBuffer = fullscreenDebugBuffer;
            PushFullScreenDebugTexture(renderGraph, ResolveMSAAColor(renderGraph, hdCamera, fullscreenDebugOutput));
        }
Пример #22
0
        TextureHandle UpscaleRTGI(RenderGraph renderGraph, HDCamera hdCamera, GlobalIllumination settings,
                                  TextureHandle depthPyramid, TextureHandle normalBuffer, TextureHandle indirectDiffuseBuffer, TextureHandle directionBuffer)
        {
            using (var builder = renderGraph.AddRenderPass <UpscaleRTGIPassData>("Upscale the RTGI result", out var passData, ProfilingSampler.Get(HDProfileId.RaytracingIndirectDiffuseUpscale)))
            {
                builder.EnableAsyncCompute(false);

                // Set the camera parameters
                passData.texWidth  = hdCamera.actualWidth;
                passData.texHeight = hdCamera.actualHeight;
                passData.viewCount = hdCamera.viewCount;

                // Set the generation parameters
                passData.upscaleRadius = settings.upscaleRadius;

                // Grab the right kernel
                passData.upscaleCS     = m_Asset.renderPipelineRayTracingResources.indirectDiffuseRaytracingCS;
                passData.upscaleKernel = settings.fullResolution ? m_IndirectDiffuseUpscaleFullResKernel : m_IndirectDiffuseUpscaleHalfResKernel;

                // Grab the additional parameters
                passData.blueNoiseTexture  = GetBlueNoiseManager().textureArray16RGB;
                passData.scramblingTexture = m_Asset.renderPipelineResources.textures.scramblingTex;

                passData.depthBuffer           = builder.ReadTexture(depthPyramid);
                passData.normalBuffer          = builder.ReadTexture(normalBuffer);
                passData.indirectDiffuseBuffer = builder.ReadTexture(indirectDiffuseBuffer);
                passData.directionBuffer       = builder.ReadTexture(directionBuffer);
                passData.outputBuffer          = builder.WriteTexture(renderGraph.CreateTexture(new TextureDesc(Vector2.one, true, true)
                {
                    colorFormat = GraphicsFormat.R16G16B16A16_SFloat, enableRandomWrite = true, name = "Reflection Ray Indirect Diffuse"
                }));

                builder.SetRenderFunc(
                    (UpscaleRTGIPassData data, RenderGraphContext ctx) =>
                {
                    // Inject all the parameters for the compute
                    ctx.cmd.SetComputeTextureParam(data.upscaleCS, data.upscaleKernel, HDShaderIDs._DepthTexture, data.depthBuffer);
                    ctx.cmd.SetComputeTextureParam(data.upscaleCS, data.upscaleKernel, HDShaderIDs._NormalBufferTexture, data.normalBuffer);
                    ctx.cmd.SetComputeTextureParam(data.upscaleCS, data.upscaleKernel, HDShaderIDs._IndirectDiffuseTexture, data.indirectDiffuseBuffer);
                    ctx.cmd.SetComputeTextureParam(data.upscaleCS, data.upscaleKernel, HDShaderIDs._RaytracingDirectionBuffer, data.directionBuffer);
                    ctx.cmd.SetComputeTextureParam(data.upscaleCS, data.upscaleKernel, HDShaderIDs._BlueNoiseTexture, data.blueNoiseTexture);
                    ctx.cmd.SetComputeTextureParam(data.upscaleCS, data.upscaleKernel, HDShaderIDs._ScramblingTexture, data.scramblingTexture);
                    ctx.cmd.SetComputeIntParam(data.upscaleCS, HDShaderIDs._SpatialFilterRadius, data.upscaleRadius);

                    // Output buffer
                    ctx.cmd.SetComputeTextureParam(data.upscaleCS, data.upscaleKernel, HDShaderIDs._UpscaledIndirectDiffuseTextureRW, data.outputBuffer);

                    // Texture dimensions
                    int texWidth  = data.texWidth;
                    int texHeight = data.texHeight;

                    // Evaluate the dispatch parameters
                    int areaTileSize = 8;
                    int numTilesXHR  = (texWidth + (areaTileSize - 1)) / areaTileSize;
                    int numTilesYHR  = (texHeight + (areaTileSize - 1)) / areaTileSize;

                    // Compute the texture
                    ctx.cmd.DispatchCompute(data.upscaleCS, data.upscaleKernel, numTilesXHR, numTilesYHR, data.viewCount);
                });

                return(passData.outputBuffer);
            }
        }
Пример #23
0
        TextureHandle UpscaleSSGI(RenderGraph renderGraph, HDCamera hdCamera, GlobalIllumination giSettings, TextureHandle depthPyramid, TextureHandle inputBuffer)
        {
            using (var builder = renderGraph.AddRenderPass <UpscaleSSGIPassData>("Upscale SSGI", out var passData, ProfilingSampler.Get(HDProfileId.SSGIUpscale)))
            {
                builder.EnableAsyncCompute(false);

                passData.parameters   = PrepareSSGIUpscaleParameters(hdCamera, giSettings);;
                passData.depthTexture = builder.ReadTexture(depthPyramid);
                passData.inputBuffer  = builder.ReadTexture(inputBuffer);
                passData.outputBuffer = builder.WriteTexture(renderGraph.CreateTexture(new TextureDesc(Vector2.one, true, true)
                {
                    colorFormat = GraphicsFormat.R16G16B16A16_SFloat, enableRandomWrite = true, name = "SSGI Final"
                }));

                builder.SetRenderFunc(
                    (UpscaleSSGIPassData data, RenderGraphContext ctx) =>
                {
                    // We need to fill the structure that holds the various resources
                    SSGIUpscaleResources resources = new SSGIUpscaleResources();
                    resources.depthTexture         = data.depthTexture;
                    resources.inputBuffer          = data.inputBuffer;
                    resources.outputBuffer         = data.outputBuffer;
                    ExecuteSSGIUpscale(ctx.cmd, data.parameters, resources);
                });
                return(passData.outputBuffer);
            }
        }
Пример #24
0
        TextureHandle AdjustRTGIWeight(RenderGraph renderGraph, HDCamera hdCamera, TextureHandle indirectDiffuseBuffer, TextureHandle depthPyramid, TextureHandle stencilBuffer)
        {
            using (var builder = renderGraph.AddRenderPass <AdjustRTGIWeightPassData>("Adjust the RTGI weight", out var passData, ProfilingSampler.Get(HDProfileId.RaytracingIndirectDiffuseAdjustWeight)))
            {
                builder.EnableAsyncCompute(false);

                // Set the camera parameters
                passData.texWidth  = hdCamera.actualWidth;
                passData.texHeight = hdCamera.actualHeight;
                passData.viewCount = hdCamera.viewCount;

                // Grab the right kernel
                passData.adjustWeightCS     = m_Asset.renderPipelineRayTracingResources.indirectDiffuseRaytracingCS;
                passData.adjustWeightKernel = m_AdjustIndirectDiffuseWeightKernel;

                passData.depthPyramid          = builder.ReadTexture(depthPyramid);
                passData.stencilBuffer         = builder.ReadTexture(stencilBuffer);
                passData.indirectDiffuseBuffer = builder.ReadWriteTexture(indirectDiffuseBuffer);

                builder.SetRenderFunc(
                    (AdjustRTGIWeightPassData data, RenderGraphContext ctx) =>
                {
                    // Input data
                    ctx.cmd.SetComputeTextureParam(data.adjustWeightCS, data.adjustWeightKernel, HDShaderIDs._DepthTexture, data.depthPyramid);
                    ctx.cmd.SetComputeTextureParam(data.adjustWeightCS, data.adjustWeightKernel, HDShaderIDs._StencilTexture, data.stencilBuffer, 0, RenderTextureSubElement.Stencil);
                    ctx.cmd.SetComputeIntParams(data.adjustWeightCS, HDShaderIDs._SsrStencilBit, (int)StencilUsage.TraceReflectionRay);

                    // In/Output buffer
                    ctx.cmd.SetComputeTextureParam(data.adjustWeightCS, data.adjustWeightKernel, HDShaderIDs._IndirectDiffuseTextureRW, data.indirectDiffuseBuffer);

                    // Texture dimensions
                    int texWidth  = data.texWidth;
                    int texHeight = data.texHeight;

                    // Evaluate the dispatch parameters
                    int areaTileSize = 8;
                    int numTilesXHR  = (texWidth + (areaTileSize - 1)) / areaTileSize;
                    int numTilesYHR  = (texHeight + (areaTileSize - 1)) / areaTileSize;

                    // Compute the texture
                    ctx.cmd.DispatchCompute(data.adjustWeightCS, data.adjustWeightKernel, numTilesXHR, numTilesYHR, data.viewCount);
                });

                return(passData.indirectDiffuseBuffer);
            }
        }
 public TextureHandle Render(RenderGraph renderGraph, HDCamera hdCamera, TextureHandle depthPyramid, TextureHandle normalBuffer, TextureHandle motionVectors, int frameCount, in HDUtils.PackedMipChainInfo depthMipInfo, ShaderVariablesRaytracing shaderVariablesRaytracing, TextureHandle rayCountTexture)
Пример #26
0
        TextureHandle RenderIndirectDiffusePerformance(RenderGraph renderGraph, HDCamera hdCamera,
                                                       TextureHandle depthPyramid, TextureHandle stencilBuffer, TextureHandle normalBuffer, TextureHandle motionVectors, TextureHandle historyValidationTexture,
                                                       TextureHandle rayCountTexture, Texture skyTexture,
                                                       ShaderVariablesRaytracing shaderVariablesRaytracing)
        {
            // Pointer to the final result
            TextureHandle rtgiResult;

            // Fetch all the settings
            GlobalIllumination settings = hdCamera.volumeStack.GetComponent <GlobalIllumination>();

            TextureHandle directionBuffer = DirGenRTGI(renderGraph, hdCamera, settings, depthPyramid, normalBuffer);

            DeferredLightingRTParameters deferredParamters = PrepareIndirectDiffuseDeferredLightingRTParameters(hdCamera);
            TextureHandle lightingBuffer = DeferredLightingRT(renderGraph, in deferredParamters, directionBuffer, depthPyramid, normalBuffer, skyTexture, rayCountTexture);

            rtgiResult = UpscaleRTGI(renderGraph, hdCamera, settings, depthPyramid, normalBuffer, lightingBuffer, directionBuffer);

            // Denoise if required
            rtgiResult = DenoiseRTGI(renderGraph, hdCamera, rtgiResult, depthPyramid, normalBuffer, motionVectors, historyValidationTexture);

            // Adjust the weight
            rtgiResult = AdjustRTGIWeight(renderGraph, hdCamera, rtgiResult, depthPyramid, stencilBuffer);

            return(rtgiResult);
        }
        TextureHandle EvaluateShadowDebugView(RenderGraph renderGraph, HDCamera hdCamera, TextureHandle screenSpaceShadowArray)
        {
            // If this is the right debug mode and the index we are asking for is in the range
            if (!rayTracingSupported || (m_ScreenSpaceShadowChannelSlot <= m_CurrentDebugDisplaySettings.data.screenSpaceShadowIndex))
            {
                return(m_RenderGraph.defaultResources.blackTextureXR);
            }

            using (var builder = renderGraph.AddRenderPass <ScreenSpaceShadowDebugPassData>("Screen Space Shadows Debug", out var passData, ProfilingSampler.Get(HDProfileId.ScreenSpaceShadowsDebug)))
            {
                passData.parameters             = PrepareSSShadowDebugParameters(hdCamera, (int)m_CurrentDebugDisplaySettings.data.screenSpaceShadowIndex);
                passData.screenSpaceShadowArray = builder.ReadTexture(screenSpaceShadowArray);
                passData.outputBuffer           = builder.WriteTexture(renderGraph.CreateTexture(new TextureDesc(Vector2.one, true, true)
                {
                    colorFormat = GraphicsFormat.R16G16B16A16_SFloat, enableRandomWrite = true, name = "EvaluateShadowDebug"
                }));

                builder.SetRenderFunc(
                    (ScreenSpaceShadowDebugPassData data, RenderGraphContext context) =>
                {
                    SSShadowDebugResources resources = new SSShadowDebugResources();
                    resources.screenSpaceShadowArray = data.screenSpaceShadowArray;
                    resources.outputBuffer           = data.outputBuffer;
                    ExecuteShadowDebugView(context.cmd, data.parameters, resources);
                });
                return(passData.outputBuffer);
            }
        }
Пример #28
0
        TextureHandle QualityRTGI(RenderGraph renderGraph, HDCamera hdCamera, TextureHandle depthPyramid, TextureHandle normalBuffer, TextureHandle rayCountTexture)
        {
            using (var builder = renderGraph.AddRenderPass <TraceQualityRTGIPassData>("Quality RT Indirect Diffuse", out var passData, ProfilingSampler.Get(HDProfileId.RaytracingIndirectDiffuseEvaluation)))
            {
                builder.EnableAsyncCompute(false);

                var settings = hdCamera.volumeStack.GetComponent <GlobalIllumination>();

                // Set the camera parameters
                passData.texWidth  = hdCamera.actualWidth;
                passData.texHeight = hdCamera.actualHeight;
                passData.viewCount = hdCamera.viewCount;

                // Evaluation parameters
                passData.rayLength   = settings.rayLength;
                passData.sampleCount = settings.sampleCount.value;
                passData.clampValue  = settings.clampValue;
                passData.bounceCount = settings.bounceCount.value;

                // Grab the additional parameters
                passData.indirectDiffuseRT           = m_Asset.renderPipelineRayTracingResources.indirectDiffuseRaytracingRT;
                passData.accelerationStructure       = RequestAccelerationStructure();
                passData.lightCluster                = RequestLightCluster();
                passData.skyTexture                  = m_SkyManager.GetSkyReflection(hdCamera);
                passData.shaderVariablesRayTracingCB = m_ShaderVariablesRayTracingCB;
                passData.ditheredTextureSet          = GetBlueNoiseManager().DitheredTextureSet8SPP();

                passData.depthBuffer     = builder.ReadTexture(depthPyramid);
                passData.normalBuffer    = builder.ReadTexture(normalBuffer);
                passData.rayCountTexture = builder.ReadWriteTexture(rayCountTexture);
                passData.outputBuffer    = builder.WriteTexture(renderGraph.CreateTexture(new TextureDesc(Vector2.one, true, true)
                {
                    colorFormat = GraphicsFormat.R16G16B16A16_SFloat, enableRandomWrite = true, name = "Ray Traced Indirect Diffuse"
                }));

                builder.SetRenderFunc(
                    (TraceQualityRTGIPassData data, RenderGraphContext ctx) =>
                {
                    // Define the shader pass to use for the indirect diffuse pass
                    ctx.cmd.SetRayTracingShaderPass(data.indirectDiffuseRT, "IndirectDXR");

                    // Set the acceleration structure for the pass
                    ctx.cmd.SetRayTracingAccelerationStructure(data.indirectDiffuseRT, HDShaderIDs._RaytracingAccelerationStructureName, data.accelerationStructure);

                    // Inject the ray-tracing sampling data
                    BlueNoise.BindDitheredTextureSet(ctx.cmd, data.ditheredTextureSet);

                    // Set the data for the ray generation
                    ctx.cmd.SetRayTracingTextureParam(data.indirectDiffuseRT, HDShaderIDs._IndirectDiffuseTextureRW, data.outputBuffer);
                    ctx.cmd.SetRayTracingTextureParam(data.indirectDiffuseRT, HDShaderIDs._DepthTexture, data.depthBuffer);
                    ctx.cmd.SetRayTracingTextureParam(data.indirectDiffuseRT, HDShaderIDs._NormalBufferTexture, data.normalBuffer);

                    // Set ray count texture
                    ctx.cmd.SetRayTracingTextureParam(data.indirectDiffuseRT, HDShaderIDs._RayCountTexture, data.rayCountTexture);

                    // LightLoop data
                    data.lightCluster.BindLightClusterData(ctx.cmd);

                    // Set the data for the ray miss
                    ctx.cmd.SetRayTracingTextureParam(data.indirectDiffuseRT, HDShaderIDs._SkyTexture, data.skyTexture);

                    // Update global constant buffer
                    data.shaderVariablesRayTracingCB._RaytracingIntensityClamp      = data.clampValue;
                    data.shaderVariablesRayTracingCB._RaytracingIncludeSky          = 1;
                    data.shaderVariablesRayTracingCB._RaytracingRayMaxLength        = data.rayLength;
                    data.shaderVariablesRayTracingCB._RaytracingNumSamples          = data.sampleCount;
                    data.shaderVariablesRayTracingCB._RaytracingMaxRecursion        = data.bounceCount;
                    data.shaderVariablesRayTracingCB._RayTracingDiffuseLightingOnly = 1;
                    ConstantBuffer.PushGlobal(ctx.cmd, data.shaderVariablesRayTracingCB, HDShaderIDs._ShaderVariablesRaytracing);

                    // Only use the shader variant that has multi bounce if the bounce count > 1
                    CoreUtils.SetKeyword(ctx.cmd, "MULTI_BOUNCE_INDIRECT", data.bounceCount > 1);

                    // Run the computation
                    ctx.cmd.DispatchRays(data.indirectDiffuseRT, m_RayGenIndirectDiffuseIntegrationName, (uint)data.texWidth, (uint)data.texHeight, (uint)data.viewCount);

                    // Disable the keywords we do not need anymore
                    CoreUtils.SetKeyword(ctx.cmd, "MULTI_BOUNCE_INDIRECT", false);
                });

                return(passData.outputBuffer);
            }
        }
        bool RenderLightScreenSpaceShadows(RenderGraph renderGraph, HDCamera hdCamera, PrepassOutput prepassOutput, TextureHandle depthBuffer, TextureHandle normalBuffer, TextureHandle motionVectorsBuffer, TextureHandle rayCountTexture, TextureHandle screenSpaceShadowArray)
        {
            // Loop through all the potential screen space light shadows
            for (int lightIdx = 0; lightIdx < m_ScreenSpaceShadowIndex; ++lightIdx)
            {
                // This matches the directional light
                if (!m_CurrentScreenSpaceShadowData[lightIdx].valid)
                {
                    continue;
                }

                // Fetch the light data and additional light data
                LightData             currentLight = m_lightList.lights[m_CurrentScreenSpaceShadowData[lightIdx].lightDataIndex];
                HDAdditionalLightData currentAdditionalLightData = m_CurrentScreenSpaceShadowData[lightIdx].additionalLightData;

                // Trigger the right algorithm based on the light type
                switch (currentLight.lightType)
                {
                case GPULightType.Rectangle:
                {
                    RenderAreaScreenSpaceShadow(renderGraph, hdCamera, currentLight, currentAdditionalLightData, m_CurrentScreenSpaceShadowData[lightIdx].lightDataIndex,
                                                prepassOutput, depthBuffer, normalBuffer, motionVectorsBuffer, rayCountTexture, screenSpaceShadowArray);
                }
                break;

                case GPULightType.Point:
                case GPULightType.Spot:
                {
                    RenderPunctualScreenSpaceShadow(renderGraph, hdCamera, currentLight, currentAdditionalLightData, m_CurrentScreenSpaceShadowData[lightIdx].lightDataIndex,
                                                    prepassOutput, depthBuffer, normalBuffer, motionVectorsBuffer, rayCountTexture, screenSpaceShadowArray);
                }
                break;
                }
            }
            return(true);
        }
Пример #30
0
        TextureHandle RenderIndirectDiffuseQuality(RenderGraph renderGraph, HDCamera hdCamera,
                                                   TextureHandle depthPyramid, TextureHandle stencilBuffer, TextureHandle normalBuffer, TextureHandle motionVectors, TextureHandle historyValidationTexture,
                                                   TextureHandle rayCountTexture, Texture skyTexture,
                                                   ShaderVariablesRaytracing shaderVariablesRaytracing)
        {
            // Evaluate the signal
            TextureHandle rtgiResult = QualityRTGI(renderGraph, hdCamera, depthPyramid, normalBuffer, rayCountTexture);

            // Denoise if required
            rtgiResult = DenoiseRTGI(renderGraph, hdCamera, rtgiResult, depthPyramid, normalBuffer, motionVectors, historyValidationTexture);

            // Adjust the weight
            rtgiResult = AdjustRTGIWeight(renderGraph, hdCamera, rtgiResult, depthPyramid, stencilBuffer);

            return(rtgiResult);
        }
Пример #31
0
		/// <summary>Registers a texture and returns a handle to the texture.</summary>
		/// <param name="texture">The texture data.</param>
		/// <param name="parameters">The parameters that specify how to process the texture.</param>
		/// <param name="handle">Receives the handle to the texture.</param>
		/// <returns>Whether loading the texture was successful.</returns>
		public virtual bool RegisterTexture(Textures.Texture texture, TextureParameters parameters, out TextureHandle handle) {
			handle = null;
			return false;
		}
Пример #32
0
        TextureHandle ContrastAdaptiveSharpeningPass(RenderGraph renderGraph, HDCamera hdCamera, TextureHandle source)
        {
            var dynResHandler = DynamicResolutionHandler.instance;

            if (dynResHandler.DynamicResolutionEnabled() &&
                dynResHandler.filter == DynamicResUpscaleFilter.ContrastAdaptiveSharpen)
            {
                using (var builder = renderGraph.AddRenderPass <CASData>("Contrast Adaptive Sharpen", out var passData, ProfilingSampler.Get(HDProfileId.ContrastAdaptiveSharpen)))
                {
                    passData.source     = builder.ReadTexture(source);
                    passData.parameters = PrepareContrastAdaptiveSharpeningParameters(hdCamera);
                    TextureHandle dest = GetPostprocessOutputHandle(renderGraph, "Contrast Adaptive Sharpen Destination");
                    passData.destination = builder.WriteTexture(dest);;

                    builder.SetRenderFunc(
                        (CASData data, RenderGraphContext ctx) =>
                    {
                        DoContrastAdaptiveSharpening(data.parameters, ctx.cmd, data.source, data.destination);
                    });

                    source = passData.destination;
                }
            }
            return(source);
        }