Esempio n. 1
0
        TextureHandle UberPass(RenderGraph renderGraph, HDCamera hdCamera, TextureHandle logLut, TextureHandle bloomTexture, TextureHandle source)
        {
            bool isSceneView = hdCamera.camera.cameraType == CameraType.SceneView;

            using (var builder = renderGraph.AddRenderPass <UberPostPassData>("Uber Post", out var passData, ProfilingSampler.Get(HDProfileId.UberPost)))
            {
                TextureHandle dest = GetPostprocessOutputHandle(renderGraph, "Uber Post Destination");

                passData.parameters   = PrepareUberPostParameters(hdCamera, isSceneView);
                passData.source       = builder.ReadTexture(source);
                passData.bloomTexture = builder.ReadTexture(bloomTexture);
                passData.logLut       = builder.ReadTexture(logLut);
                passData.destination  = builder.WriteTexture(dest);

                builder.SetRenderFunc(
                    (UberPostPassData data, RenderGraphContext ctx) =>
                {
                    DoUberPostProcess(data.parameters,
                                      data.source,
                                      data.destination,
                                      data.logLut,
                                      data.bloomTexture,
                                      ctx.cmd);
                });

                source = passData.destination;
            }

            return(source);
        }
Esempio n. 2
0
        TextureHandle FXAAPass(RenderGraph renderGraph, HDCamera hdCamera, TextureHandle source)
        {
            if (DynamicResolutionHandler.instance.DynamicResolutionEnabled() &&     // Dynamic resolution is on.
                hdCamera.antialiasing == HDAdditionalCameraData.AntialiasingMode.FastApproximateAntialiasing &&
                m_AntialiasingFS)
            {
                using (var builder = renderGraph.AddRenderPass <FXAAData>("FXAA", out var passData, ProfilingSampler.Get(HDProfileId.FXAA)))
                {
                    passData.source     = builder.ReadTexture(source);
                    passData.parameters = PrepareFXAAParameters(hdCamera);
                    TextureHandle dest = GetPostprocessOutputHandle(renderGraph, "FXAA Destination");
                    passData.destination = builder.WriteTexture(dest);;

                    builder.SetRenderFunc(
                        (FXAAData data, RenderGraphContext ctx) =>
                    {
                        DoFXAA(data.parameters, ctx.cmd, data.source, data.destination);
                    });

                    source = passData.destination;
                }
            }

            return(source);
        }
Esempio n. 3
0
        TextureHandle ColorGradingPass(RenderGraph renderGraph, HDCamera hdCamera)
        {
            TextureHandle logLutOutput;

            using (var builder = renderGraph.AddRenderPass <ColorGradingPassData>("Color Grading", out var passData, ProfilingSampler.Get(HDProfileId.ColorGradingLUTBuilder)))
            {
                TextureHandle logLut = renderGraph.CreateTexture(new TextureDesc(m_LutSize, m_LutSize)
                {
                    name              = "Color Grading Log Lut",
                    dimension         = TextureDimension.Tex3D,
                    slices            = m_LutSize,
                    depthBufferBits   = DepthBits.None,
                    colorFormat       = m_LutFormat,
                    filterMode        = FilterMode.Bilinear,
                    wrapMode          = TextureWrapMode.Clamp,
                    anisoLevel        = 0,
                    useMipMap         = false,
                    enableRandomWrite = true
                });

                passData.parameters = PrepareColorGradingParameters();
                passData.logLut     = builder.WriteTexture(logLut);
                logLutOutput        = passData.logLut;

                builder.SetRenderFunc(
                    (ColorGradingPassData data, RenderGraphContext ctx) =>
                {
                    DoColorGrading(data.parameters, data.logLut, ctx.cmd);
                });
            }

            return(logLutOutput);
        }
Esempio n. 4
0
        TextureHandle PaniniProjectionPass(RenderGraph renderGraph, HDCamera hdCamera, TextureHandle source)
        {
            bool isSceneView = hdCamera.camera.cameraType == CameraType.SceneView;

            if (m_PaniniProjection.IsActive() && !isSceneView && m_PaniniProjectionFS)
            {
                using (var builder = renderGraph.AddRenderPass <PaniniProjectionData>("Panini Projection", out var passData, ProfilingSampler.Get(HDProfileId.PaniniProjection)))
                {
                    passData.source     = builder.ReadTexture(source);
                    passData.parameters = PreparePaniniProjectionParameters(hdCamera);
                    TextureHandle dest = GetPostprocessOutputHandle(renderGraph, "Panini Projection Destination");
                    passData.destination = builder.WriteTexture(dest);

                    builder.SetRenderFunc(
                        (PaniniProjectionData data, RenderGraphContext ctx) =>
                    {
                        DoPaniniProjection(data.parameters, ctx.cmd, data.source, data.destination);
                    });

                    source = passData.destination;
                }
            }

            return(source);
        }
Esempio n. 5
0
        TextureHandle SMAAPass(RenderGraph renderGraph, HDCamera hdCamera, TextureHandle depthBuffer, TextureHandle source)
        {
            using (var builder = renderGraph.AddRenderPass <SMAAData>("Subpixel Morphological Anti-Aliasing", out var passData, ProfilingSampler.Get(HDProfileId.SMAA)))
            {
                passData.source     = builder.ReadTexture(source);
                passData.parameters = PrepareSMAAParameters(hdCamera);
                builder.ReadTexture(depthBuffer);
                passData.depthBuffer = builder.WriteTexture(depthBuffer);
                passData.smaaEdgeTex = builder.CreateTransientTexture(new TextureDesc(Vector2.one, true, true)
                {
                    colorFormat = GraphicsFormat.R8G8B8A8_UNorm, enableRandomWrite = true, name = "SMAA Edge Texture"
                });
                passData.smaaBlendTex = builder.CreateTransientTexture(new TextureDesc(Vector2.one, true, true)
                {
                    colorFormat = GraphicsFormat.R8G8B8A8_UNorm, enableRandomWrite = true, name = "SMAA Blend Texture"
                });

                TextureHandle dest = GetPostprocessOutputHandle(renderGraph, "SMAA Destination");
                passData.destination = builder.WriteTexture(dest);;

                builder.SetRenderFunc(
                    (SMAAData data, RenderGraphContext ctx) =>
                {
                    DoSMAA(data.parameters, ctx.cmd, data.source,
                           data.smaaEdgeTex,
                           data.smaaBlendTex,
                           data.destination,
                           data.depthBuffer);
                });

                source = passData.destination;
            }

            return(source);
        }
Esempio n. 6
0
        TextureHandle ContrastAdaptiveSharpeningPass(RenderGraph renderGraph, HDCamera hdCamera, TextureHandle source)
        {
            var dynResHandler = DynamicResolutionHandler.instance;

            if (dynResHandler.DynamicResolutionEnabled() &&
                dynResHandler.filter == DynamicResUpscaleFilter.ContrastAdaptiveSharpen)
            {
                using (var builder = renderGraph.AddRenderPass <CASData>("Contrast Adaptive Sharpen", out var passData, ProfilingSampler.Get(HDProfileId.ContrastAdaptiveSharpen)))
                {
                    passData.source     = builder.ReadTexture(source);
                    passData.parameters = PrepareContrastAdaptiveSharpeningParameters(hdCamera);
                    TextureHandle dest = GetPostprocessOutputHandle(renderGraph, "Contrast Adaptive Sharpen Destination");
                    passData.destination = builder.WriteTexture(dest);;

                    builder.SetRenderFunc(
                        (CASData data, RenderGraphContext ctx) =>
                    {
                        DoContrastAdaptiveSharpening(data.parameters, ctx.cmd, data.source, data.destination);
                    });

                    source = passData.destination;
                }
            }
            return(source);
        }
Esempio n. 7
0
        public void EvaluateClusterDebugView(RenderGraph renderGraph, HDCamera hdCamera, TextureHandle depthStencilBuffer, TextureHandle depthPyramid)
        {
            TextureHandle debugTexture;

            using (var builder = renderGraph.AddRenderPass <LightClusterDebugPassData>("Debug Texture for the Light Cluster", out var passData, ProfilingSampler.Get(HDProfileId.RaytracingDebugCluster)))
            {
                builder.EnableAsyncCompute(false);

                passData.texWidth                = hdCamera.actualWidth;
                passData.texHeight               = hdCamera.actualHeight;
                passData.clusterCellSize         = clusterCellSize;
                passData.lightCluster            = m_LightCluster;
                passData.lightClusterDebugCS     = m_RenderPipelineRayTracingResources.lightClusterDebugCS;
                passData.lightClusterDebugKernel = passData.lightClusterDebugCS.FindKernel("DebugLightCluster");
                passData.debugMaterial           = m_DebugMaterial;
                passData.depthStencilBuffer      = builder.UseDepthBuffer(depthStencilBuffer, DepthAccess.Read);
                passData.depthPyramid            = builder.ReadTexture(depthStencilBuffer);
                passData.outputBuffer            = builder.WriteTexture(renderGraph.CreateTexture(new TextureDesc(Vector2.one, true, true)
                {
                    colorFormat = GraphicsFormat.R16G16B16A16_SFloat, enableRandomWrite = true, name = "Light Cluster Debug Texture"
                }));

                builder.SetRenderFunc(
                    (LightClusterDebugPassData data, RenderGraphContext ctx) =>
                {
                    var debugMaterialProperties = ctx.renderGraphPool.GetTempMaterialPropertyBlock();

                    // Bind the output texture
                    CoreUtils.SetRenderTarget(ctx.cmd, data.outputBuffer, data.depthStencilBuffer, clearFlag: ClearFlag.Color, clearColor: Color.black);

                    // Inject all the parameters to the debug compute
                    ctx.cmd.SetComputeBufferParam(data.lightClusterDebugCS, data.lightClusterDebugKernel, HDShaderIDs._RaytracingLightCluster, data.lightCluster);
                    ctx.cmd.SetComputeVectorParam(data.lightClusterDebugCS, _ClusterCellSize, data.clusterCellSize);
                    ctx.cmd.SetComputeTextureParam(data.lightClusterDebugCS, data.lightClusterDebugKernel, HDShaderIDs._CameraDepthTexture, data.depthStencilBuffer);

                    // Target output texture
                    ctx.cmd.SetComputeTextureParam(data.lightClusterDebugCS, data.lightClusterDebugKernel, _DebutLightClusterTexture, data.outputBuffer);

                    // Dispatch the compute
                    int lightVolumesTileSize = 8;
                    int numTilesX            = (data.texWidth + (lightVolumesTileSize - 1)) / lightVolumesTileSize;
                    int numTilesY            = (data.texHeight + (lightVolumesTileSize - 1)) / lightVolumesTileSize;

                    ctx.cmd.DispatchCompute(data.lightClusterDebugCS, data.lightClusterDebugKernel, numTilesX, numTilesY, 1);

                    // Bind the parameters
                    debugMaterialProperties.SetBuffer(HDShaderIDs._RaytracingLightCluster, data.lightCluster);
                    debugMaterialProperties.SetVector(_ClusterCellSize, data.clusterCellSize);
                    debugMaterialProperties.SetTexture(HDShaderIDs._CameraDepthTexture, data.depthPyramid);

                    // Draw the faces
                    ctx.cmd.DrawProcedural(Matrix4x4.identity, data.debugMaterial, 1, MeshTopology.Lines, 48, 64 * 64 * 32, debugMaterialProperties);
                    ctx.cmd.DrawProcedural(Matrix4x4.identity, data.debugMaterial, 0, MeshTopology.Triangles, 36, 64 * 64 * 32, debugMaterialProperties);
                });

                debugTexture = passData.outputBuffer;
            }

            m_RenderPipeline.PushFullScreenDebugTexture(renderGraph, debugTexture, FullScreenDebugMode.LightCluster);
        }
        public void EvaluateClusterDebugView(RenderGraph renderGraph, HDCamera hdCamera, TextureHandle depthStencilBuffer, TextureHandle depthPyramid)
        {
            TextureHandle debugTexture;

            using (var builder = renderGraph.AddRenderPass <LightClusterDebugPassData>("Debug Texture for the Light Cluster", out var passData, ProfilingSampler.Get(HDProfileId.RaytracingDebugCluster)))
            {
                builder.EnableAsyncCompute(false);

                passData.parameters         = PrepareLightClusterDebugParameters(hdCamera);
                passData.depthStencilBuffer = builder.UseDepthBuffer(depthStencilBuffer, DepthAccess.Read);
                passData.depthPyramid       = builder.ReadTexture(depthStencilBuffer);
                passData.outputBuffer       = builder.WriteTexture(renderGraph.CreateTexture(new TextureDesc(Vector2.one, true, true)
                {
                    colorFormat = GraphicsFormat.R16G16B16A16_SFloat, enableRandomWrite = true, name = "Light Cluster Debug Texture"
                }));

                builder.SetRenderFunc(
                    (LightClusterDebugPassData data, RenderGraphContext ctx) =>
                {
                    // We need to fill the structure that holds the various resources
                    LightClusterDebugResources resources = new LightClusterDebugResources();
                    resources.depthStencilBuffer         = data.depthStencilBuffer;
                    resources.depthTexture             = data.depthPyramid;
                    resources.debugLightClusterTexture = data.outputBuffer;
                    ExecuteLightClusterDebug(ctx.cmd, data.parameters, resources);
                });

                debugTexture = passData.outputBuffer;
            }

            m_RenderPipeline.PushFullScreenDebugTexture(renderGraph, debugTexture, FullScreenDebugMode.LightCluster);
        }
        TextureHandle UpscaleSSGI(RenderGraph renderGraph, HDCamera hdCamera, GlobalIllumination giSettings, HDUtils.PackedMipChainInfo info, TextureHandle depthPyramid, TextureHandle inputBuffer)
        {
            using (var builder = renderGraph.AddRenderPass <UpscaleSSGIPassData>("Upscale SSGI", out var passData, ProfilingSampler.Get(HDProfileId.SSGIUpscale)))
            {
                builder.EnableAsyncCompute(false);

                passData.parameters   = PrepareSSGIUpscaleParameters(hdCamera, giSettings, info);
                passData.depthTexture = builder.ReadTexture(depthPyramid);
                passData.inputBuffer  = builder.ReadTexture(inputBuffer);
                passData.outputBuffer = builder.WriteTexture(renderGraph.CreateTexture(new TextureDesc(Vector2.one, true, true)
                {
                    colorFormat = GraphicsFormat.R16G16B16A16_SFloat, enableRandomWrite = true, name = "SSGI Final"
                }));

                builder.SetRenderFunc(
                    (UpscaleSSGIPassData data, RenderGraphContext ctx) =>
                {
                    // We need to fill the structure that holds the various resources
                    SSGIUpscaleResources resources = new SSGIUpscaleResources();
                    resources.depthTexture         = data.depthTexture;
                    resources.inputBuffer          = data.inputBuffer;
                    resources.outputBuffer         = data.outputBuffer;
                    ExecuteSSGIUpscale(ctx.cmd, data.parameters, resources);
                });
                return(passData.outputBuffer);
            }
        }
Esempio n. 10
0
        TextureHandle DoCopyAlpha(RenderGraph renderGraph, HDCamera hdCamera, TextureHandle source)
        {
            // Save the alpha and apply it back into the final pass if rendering in fp16 and post-processing in r11g11b10
            if (m_KeepAlpha)
            {
                using (var builder = renderGraph.AddRenderPass <AlphaCopyPassData>("Alpha Copy", out var passData, ProfilingSampler.Get(HDProfileId.AlphaCopy)))
                {
                    passData.parameters  = PrepareCopyAlphaParameters(hdCamera);
                    passData.source      = builder.ReadTexture(source);
                    passData.outputAlpha = builder.WriteTexture(renderGraph.CreateTexture(new TextureDesc(Vector2.one, true, true)
                    {
                        name = "Alpha Channel Copy", colorFormat = GraphicsFormat.R16_SFloat, enableRandomWrite = true
                    }));

                    builder.SetRenderFunc(
                        (AlphaCopyPassData data, RenderGraphContext ctx) =>
                    {
                        DoCopyAlpha(data.parameters, data.source, data.outputAlpha, ctx.cmd);
                    });

                    return(passData.outputAlpha);
                }
            }

            return(renderGraph.defaultResources.whiteTextureXR);
        }
Esempio n. 11
0
        internal void PushCameraTexture(
            RenderGraph renderGraph,
            AOVBuffers aovBufferId,
            HDCamera camera,
            TextureHandle source,
            List <RTHandle> targets
            )
        {
            if (!isValid || m_RequestedAOVBuffers == null)
            {
                return;
            }

            Assert.IsNotNull(m_RequestedAOVBuffers);
            Assert.IsNotNull(targets);

            var index = Array.IndexOf(m_RequestedAOVBuffers, aovBufferId);

            if (index == -1)
            {
                return;
            }

            using (var builder = renderGraph.AddRenderPass <PushCameraTexturePassData>("Push AOV Camera Texture", out var passData, ProfilingSampler.Get(HDProfileId.AOVOutput + (int)aovBufferId)))
            {
                passData.source = builder.ReadTexture(source);
                passData.target = targets[index];

                builder.SetRenderFunc(
                    (PushCameraTexturePassData data, RenderGraphContext ctx) =>
                {
                    HDUtils.BlitCameraTexture(ctx.cmd, data.source, data.target);
                });
            }
        }
Esempio n. 12
0
        public TextureHandle Denoise(RenderGraph renderGraph, HDCamera hdCamera, DiffuseDenoiserParameters tfParameters, TextureHandle noisyBuffer, TextureHandle depthPyramid, TextureHandle normalBuffer, TextureHandle outputBuffer)
        {
            using (var builder = renderGraph.AddRenderPass <DiffuseDenoiserPassData>("DiffuseDenoiser", out var passData, ProfilingSampler.Get(HDProfileId.DiffuseFilter)))
            {
                // Cannot run in async
                builder.EnableAsyncCompute(false);

                // Fetch all the resources
                passData.parameters         = tfParameters;
                passData.depthStencilBuffer = builder.ReadTexture(depthPyramid);
                passData.normalBuffer       = builder.ReadTexture(normalBuffer);
                passData.noisyBuffer        = builder.ReadTexture(noisyBuffer);
                passData.intermediateBuffer = builder.CreateTransientTexture(new TextureDesc(Vector2.one, true, true)
                {
                    colorFormat = GraphicsFormat.R16G16B16A16_SFloat, enableRandomWrite = true, name = "DiffuseDenoiserIntermediate"
                });
                passData.outputBuffer = builder.WriteTexture(outputBuffer);

                builder.SetRenderFunc(
                    (DiffuseDenoiserPassData data, RenderGraphContext ctx) =>
                {
                    DiffuseDenoiserResources ddResources = new DiffuseDenoiserResources();
                    ddResources.depthStencilBuffer       = data.depthStencilBuffer;
                    ddResources.normalBuffer             = data.normalBuffer;
                    ddResources.noisyBuffer        = data.noisyBuffer;
                    ddResources.intermediateBuffer = data.intermediateBuffer;
                    ddResources.outputBuffer       = data.outputBuffer;
                    DenoiseBuffer(ctx.cmd, data.parameters, ddResources);
                });
                return(passData.outputBuffer);
            }
        }
Esempio n. 13
0
        TextureHandle CombineRTSSS(RenderGraph renderGraph, HDCamera hdCamera, TextureHandle rayTracedSSS, TextureHandle depthStencilBuffer, TextureHandle sssColor, TextureHandle ssgiBuffer, TextureHandle diffuseLightingBuffer, TextureHandle colorBuffer)
        {
            using (var builder = renderGraph.AddRenderPass <ComposeRTSSSPassData>("Composing the result of RTSSS", out var passData, ProfilingSampler.Get(HDProfileId.RaytracingSSSCompose)))
            {
                builder.EnableAsyncCompute(false);

                passData.parameters            = PrepareSSSCombineParameters(hdCamera);
                passData.depthStencilBuffer    = builder.UseDepthBuffer(depthStencilBuffer, DepthAccess.Read);
                passData.sssColor              = builder.ReadTexture(sssColor);
                passData.ssgiBuffer            = passData.parameters.validSSGI ? builder.ReadTexture(ssgiBuffer) : renderGraph.defaultResources.blackTextureXR;
                passData.diffuseLightingBuffer = builder.ReadTexture(diffuseLightingBuffer);
                passData.subsurfaceBuffer      = builder.ReadTexture(rayTracedSSS);
                passData.colorBuffer           = builder.ReadWriteTexture(colorBuffer);

                builder.SetRenderFunc(
                    (ComposeRTSSSPassData data, RenderGraphContext ctx) =>
                {
                    // We need to fill the structure that holds the various resources
                    SSSCombineResources ssscResources   = new SSSCombineResources();
                    ssscResources.depthStencilBuffer    = data.depthStencilBuffer;
                    ssscResources.sssColor              = data.sssColor;
                    ssscResources.ssgiBuffer            = data.ssgiBuffer;
                    ssscResources.diffuseLightingBuffer = data.diffuseLightingBuffer;
                    ssscResources.subsurfaceBuffer      = data.subsurfaceBuffer;
                    ssscResources.outputColorBuffer     = data.colorBuffer;
                    ExecuteCombineSubsurfaceScattering(ctx.cmd, data.parameters, ssscResources);
                });

                return(passData.colorBuffer);
            }
        }
        TextureHandle RenderPostProcess(RenderGraph renderGraph,
                                        TextureHandle inputColor,
                                        TextureHandle depthBuffer,
                                        TextureHandle backBuffer,
                                        CullingResults cullResults,
                                        HDCamera hdCamera)
        {
            PostProcessParameters parameters = PreparePostProcess(cullResults, hdCamera);

            TextureHandle afterPostProcessBuffer = renderGraph.ImportTexture(TextureXR.GetBlackTexture());
            TextureHandle dest = HDUtils.PostProcessIsFinalPass(parameters.hdCamera) ? backBuffer : renderGraph.CreateTexture(
                new TextureDesc(Vector2.one, true, true)
            {
                colorFormat = GetColorBufferFormat(), name = "Intermediate Postprocess buffer"
            });

            if (hdCamera.frameSettings.IsEnabled(FrameSettingsField.AfterPostprocess))
            {
                // We render AfterPostProcess objects first into a separate buffer that will be composited in the final post process pass
                using (var builder = renderGraph.AddRenderPass <AfterPostProcessPassData>("After Post-Process", out var passData, ProfilingSampler.Get(HDProfileId.AfterPostProcessing)))
                {
                    passData.parameters             = parameters;
                    passData.afterPostProcessBuffer = builder.UseColorBuffer(renderGraph.CreateTexture(
                                                                                 new TextureDesc(Vector2.one, true, true)
                    {
                        colorFormat = GraphicsFormat.R8G8B8A8_SRGB, clearBuffer = true, clearColor = Color.black, name = "OffScreen AfterPostProcess"
                    }), 0);
                    if (passData.parameters.useDepthBuffer)
                    {
                        passData.depthStencilBuffer = builder.UseDepthBuffer(depthBuffer, DepthAccess.ReadWrite);
                    }
                    passData.opaqueAfterPostprocessRL      = builder.UseRendererList(renderGraph.CreateRendererList(passData.parameters.opaqueAfterPPDesc));
                    passData.transparentAfterPostprocessRL = builder.UseRendererList(renderGraph.CreateRendererList(passData.parameters.transparentAfterPPDesc));

                    builder.SetRenderFunc(
                        (AfterPostProcessPassData data, RenderGraphContext ctx) =>
                    {
                        RenderAfterPostProcess(data.parameters
                                               , ctx.resources.GetRendererList(data.opaqueAfterPostprocessRL)
                                               , ctx.resources.GetRendererList(data.transparentAfterPostprocessRL)
                                               , ctx.renderContext, ctx.cmd);
                    });

                    afterPostProcessBuffer = passData.afterPostProcessBuffer;
                }
            }

            m_PostProcessSystem.Render(
                renderGraph,
                parameters.hdCamera,
                parameters.blueNoise,
                inputColor,
                afterPostProcessBuffer,
                depthBuffer,
                dest,
                parameters.flipYInPostProcess
                );

            return(dest);
        }
        TextureHandle ConvertSSGI(RenderGraph renderGraph, HDCamera hdCamera, bool halfResolution, TextureHandle depthPyramid, TextureHandle stencilBuffer, TextureHandle normalBuffer, TextureHandle inoutputBuffer0, TextureHandle inoutputBuffer1)
        {
            using (var builder = renderGraph.AddRenderPass <ConvertSSGIPassData>("Upscale SSGI", out var passData, ProfilingSampler.Get(HDProfileId.SSGIConvert)))
            {
                builder.EnableAsyncCompute(false);

                passData.parameters      = PrepareSSGIConvertParameters(hdCamera, halfResolution);
                passData.depthTexture    = builder.ReadTexture(depthPyramid);
                passData.stencilBuffer   = builder.ReadTexture(stencilBuffer);
                passData.normalBuffer    = builder.ReadTexture(normalBuffer);
                passData.inoutputBuffer0 = builder.ReadWriteTexture(inoutputBuffer0);
                passData.inoutputBuffer1 = builder.ReadWriteTexture(inoutputBuffer1);

                builder.SetRenderFunc(
                    (ConvertSSGIPassData data, RenderGraphContext ctx) =>
                {
                    // We need to fill the structure that holds the various resources
                    SSGIConvertResources resources = new SSGIConvertResources();
                    resources.depthTexture         = data.depthTexture;
                    resources.stencilBuffer        = data.stencilBuffer;
                    resources.normalBuffer         = data.normalBuffer;
                    resources.inoutBuffer0         = data.inoutputBuffer0;
                    resources.inputBufer1          = data.inoutputBuffer1;
                    ExecuteSSGIConversion(ctx.cmd, data.parameters, resources);
                });
                return(passData.inoutputBuffer0);
            }
        }
Esempio n. 16
0
        RenderGraphResource CopyStencilBufferIfNeeded(RenderGraph renderGraph, HDCamera hdCamera, RenderGraphResource depthStencilBuffer, Material copyStencil, Material copyStencilForSSR)
        {
            // TODO: Move early out outside of the rendering function, otherwise we adds a pass for nothing.
            using (var builder = renderGraph.AddRenderPass <CopyStencilBufferPassData>("Copy Stencil", out var passData))
            {
                passData.hdCamera           = hdCamera;
                passData.depthStencilBuffer = builder.ReadTexture(depthStencilBuffer);
                passData.stencilBufferCopy  = builder.WriteTexture(renderGraph.CreateTexture(new TextureDesc(Vector2.one, true, true)
                {
                    colorFormat = GraphicsFormat.R8_UNorm, enableRandomWrite = true, name = "CameraStencilCopy"
                }));
                passData.copyStencil       = copyStencil;
                passData.copyStencilForSSR = copyStencilForSSR;

                builder.SetRenderFunc(
                    (CopyStencilBufferPassData data, RenderGraphContext context) =>
                {
                    RTHandle depthBuffer = context.resources.GetTexture(data.depthStencilBuffer);
                    RTHandle stencilCopy = context.resources.GetTexture(data.stencilBufferCopy);
                    CopyStencilBufferIfNeeded(context.cmd, data.hdCamera, depthBuffer, stencilCopy, data.copyStencil, data.copyStencilForSSR);
                });

                return(passData.stencilBufferCopy);
            }
        }
        void RenderFullScreenDebug(RenderGraph renderGraph, TextureHandle colorBuffer, TextureHandle depthBuffer, CullingResults cull, HDCamera hdCamera)
        {
            TextureHandle       fullscreenDebugOutput = TextureHandle.nullHandle;
            ComputeBufferHandle fullscreenDebugBuffer = ComputeBufferHandle.nullHandle;

            using (var builder = renderGraph.AddRenderPass <FullScreenDebugPassData>("FullScreen Debug", out var passData))
            {
                passData.parameters   = PrepareFullScreenDebugParameters(hdCamera, cull);
                passData.output       = builder.WriteTexture(colorBuffer);
                passData.depthBuffer  = builder.ReadTexture(depthBuffer);
                passData.debugBuffer  = builder.WriteComputeBuffer(renderGraph.CreateComputeBuffer(new ComputeBufferDesc(hdCamera.actualWidth * hdCamera.actualHeight * hdCamera.viewCount, sizeof(uint))));
                passData.rendererList = builder.UseRendererList(renderGraph.CreateRendererList(passData.parameters.rendererList));

                builder.SetRenderFunc(
                    (FullScreenDebugPassData data, RenderGraphContext ctx) =>
                {
                    RenderFullScreenDebug(data.parameters,
                                          data.output,
                                          data.depthBuffer,
                                          data.debugBuffer,
                                          data.rendererList,
                                          ctx.renderContext, ctx.cmd);
                });

                fullscreenDebugOutput = passData.output;
                fullscreenDebugBuffer = passData.debugBuffer;
            }

            m_DebugFullScreenComputeBuffer = fullscreenDebugBuffer;
            PushFullScreenDebugTexture(renderGraph, ResolveMSAAColor(renderGraph, hdCamera, fullscreenDebugOutput));
        }
Esempio n. 18
0
        TextureHandle StopNaNsPass(RenderGraph renderGraph, HDCamera hdCamera, TextureHandle source)
        {
            // Optional NaN killer before post-processing kicks in
            bool stopNaNs = hdCamera.stopNaNs && m_StopNaNFS;

#if UNITY_EDITOR
            bool isSceneView = hdCamera.camera.cameraType == CameraType.SceneView;
            if (isSceneView)
            {
                stopNaNs = HDAdditionalSceneViewSettings.sceneViewStopNaNs;
            }
#endif
            if (stopNaNs)
            {
                using (var builder = renderGraph.AddRenderPass <StopNaNPassData>("Stop NaNs", out var passData, ProfilingSampler.Get(HDProfileId.StopNaNs)))
                {
                    passData.source     = builder.ReadTexture(source);
                    passData.parameters = PrepareStopNaNParameters(hdCamera);
                    TextureHandle dest = GetPostprocessOutputHandle(renderGraph, "Stop NaNs Destination");
                    passData.destination = builder.WriteTexture(dest);;

                    builder.SetRenderFunc(
                        (StopNaNPassData data, RenderGraphContext ctx) =>
                    {
                        DoStopNaNs(data.parameters, ctx.cmd, data.source, data.destination);
                    });

                    return(passData.destination);
                }
            }

            return(source);
        }
Esempio n. 19
0
        public SSGIDenoiserOutput Denoise(RenderGraph renderGraph, HDCamera hdCamera,
                                          TextureHandle depthPyramid, TextureHandle normalBuffer, TextureHandle motionVectorsBuffer, TextureHandle inputOutputBuffer0, TextureHandle inputOutputBuffer1,
                                          HDUtils.PackedMipChainInfo depthMipInfo, bool halfResolution = false, float historyValidity = 1.0f)
        {
            using (var builder = renderGraph.AddRenderPass <DenoiseSSGIPassData>("Denoise SSGI", out var passData, ProfilingSampler.Get(HDProfileId.SSGIDenoise)))
            {
                builder.EnableAsyncCompute(false);

                // Input buffers
                passData.depthTexture        = builder.ReadTexture(depthPyramid);
                passData.normalBuffer        = builder.ReadTexture(normalBuffer);
                passData.motionVectorsBuffer = builder.ReadTexture(motionVectorsBuffer);


                // History buffer
                bool     historyRequireClear     = false;
                RTHandle indirectDiffuseHistory0 = RequestIndirectDiffuseHistory0(hdCamera, out historyRequireClear);
                passData.indirectDiffuseHistory0 = builder.ReadWriteTexture(renderGraph.ImportTexture(indirectDiffuseHistory0));
                RTHandle indirectDiffuseHistory1 = RequestIndirectDiffuseHistory1(hdCamera, out historyRequireClear);
                passData.indirectDiffuseHistory1 = builder.ReadWriteTexture(renderGraph.ImportTexture(indirectDiffuseHistory1));
                var historyDepthBuffer = halfResolution ? hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.Depth1) : hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.Depth);
                passData.historyDepthBuffer = historyDepthBuffer != null?builder.ReadTexture(renderGraph.ImportTexture(historyDepthBuffer)) : renderGraph.defaultResources.blackTextureXR;

                passData.intermediateBuffer0 = builder.CreateTransientTexture(new TextureDesc(Vector2.one, true, true)
                {
                    colorFormat = GraphicsFormat.R16G16B16A16_SFloat, enableRandomWrite = true, name = "SSGI Denoiser Intermediate0"
                });
                passData.intermediateBuffer1 = builder.CreateTransientTexture(new TextureDesc(Vector2.one, true, true)
                {
                    colorFormat = GraphicsFormat.R16G16B16A16_SFloat, enableRandomWrite = true, name = "SSGI Denoiser Intermediate1"
                });
                passData.inputOutputBuffer0 = builder.ReadWriteTexture(inputOutputBuffer0);
                passData.inputOutputBuffer1 = builder.ReadWriteTexture(inputOutputBuffer1);

                passData.parameters = PrepareSSGIDenoiserParameters(hdCamera, halfResolution, historyValidity, historyRequireClear, depthMipInfo);

                builder.SetRenderFunc(
                    (DenoiseSSGIPassData data, RenderGraphContext ctx) =>
                {
                    // We need to fill the structure that holds the various resources
                    SSGIDenoiserResources resources   = new SSGIDenoiserResources();
                    resources.depthTexture            = data.depthTexture;
                    resources.normalBuffer            = data.normalBuffer;
                    resources.motionVectorsBuffer     = data.motionVectorsBuffer;
                    resources.indirectDiffuseHistory0 = data.indirectDiffuseHistory0;
                    resources.indirectDiffuseHistory1 = data.indirectDiffuseHistory1;
                    resources.historyDepthBuffer      = data.historyDepthBuffer;
                    resources.intermediateBuffer0     = data.intermediateBuffer0;
                    resources.intermediateBuffer1     = data.intermediateBuffer1;
                    resources.inputOutputBuffer0      = data.inputOutputBuffer0;
                    resources.inputOutputBuffer1      = data.inputOutputBuffer1;
                    Denoise(ctx.cmd, data.parameters, resources);
                });

                SSGIDenoiserOutput denoiserOutput = new SSGIDenoiserOutput();
                denoiserOutput.outputBuffer0 = inputOutputBuffer0;
                denoiserOutput.outputBuffer1 = inputOutputBuffer1;
                return(denoiserOutput);
            }
        }
        TextureHandle EvaluateShadowDebugView(RenderGraph renderGraph, HDCamera hdCamera, TextureHandle screenSpaceShadowArray)
        {
            // If this is the right debug mode and the index we are asking for is in the range
            if (!rayTracingSupported || (m_ScreenSpaceShadowChannelSlot <= m_CurrentDebugDisplaySettings.data.screenSpaceShadowIndex))
            {
                return(m_RenderGraph.defaultResources.blackTextureXR);
            }

            using (var builder = renderGraph.AddRenderPass <ScreenSpaceShadowDebugPassData>("Screen Space Shadows Debug", out var passData, ProfilingSampler.Get(HDProfileId.ScreenSpaceShadowsDebug)))
            {
                passData.parameters             = PrepareSSShadowDebugParameters(hdCamera, (int)m_CurrentDebugDisplaySettings.data.screenSpaceShadowIndex);
                passData.screenSpaceShadowArray = builder.ReadTexture(screenSpaceShadowArray);
                passData.outputBuffer           = builder.WriteTexture(renderGraph.CreateTexture(new TextureDesc(Vector2.one, true, true)
                {
                    colorFormat = GraphicsFormat.R16G16B16A16_SFloat, enableRandomWrite = true, name = "EvaluateShadowDebug"
                }));

                builder.SetRenderFunc(
                    (ScreenSpaceShadowDebugPassData data, RenderGraphContext context) =>
                {
                    SSShadowDebugResources resources = new SSShadowDebugResources();
                    resources.screenSpaceShadowArray = data.screenSpaceShadowArray;
                    resources.outputBuffer           = data.outputBuffer;
                    ExecuteShadowDebugView(context.cmd, data.parameters, resources);
                });
                return(passData.outputBuffer);
            }
        }
        void RenderTransparencyOverdraw(RenderGraph renderGraph, TextureHandle depthBuffer, CullingResults cull, HDCamera hdCamera)
        {
            if (m_CurrentDebugDisplaySettings.IsDebugDisplayEnabled() && m_CurrentDebugDisplaySettings.data.fullScreenDebugMode == FullScreenDebugMode.TransparencyOverdraw)
            {
                TextureHandle transparencyOverdrawOutput = TextureHandle.nullHandle;
                using (var builder = renderGraph.AddRenderPass <TransparencyOverdrawPassData>("Transparency Overdraw", out var passData))
                {
                    passData.parameters = PrepareTransparencyOverdrawParameters(hdCamera, cull);
                    passData.output     = builder.WriteTexture(renderGraph.CreateTexture(new TextureDesc(Vector2.one, true, true)
                    {
                        colorFormat = GetColorBufferFormat()
                    }));
                    passData.depthBuffer             = builder.ReadTexture(depthBuffer);
                    passData.transparencyRL          = builder.UseRendererList(renderGraph.CreateRendererList(passData.parameters.transparencyRL));
                    passData.transparencyAfterPostRL = builder.UseRendererList(renderGraph.CreateRendererList(passData.parameters.transparencyAfterPostRL));
                    passData.transparencyLowResRL    = builder.UseRendererList(renderGraph.CreateRendererList(passData.parameters.transparencyLowResRL));

                    builder.SetRenderFunc(
                        (TransparencyOverdrawPassData data, RenderGraphContext ctx) =>
                    {
                        RenderTransparencyOverdraw(data.parameters,
                                                   data.output,
                                                   data.depthBuffer,
                                                   data.transparencyRL,
                                                   data.transparencyAfterPostRL,
                                                   data.transparencyLowResRL,
                                                   ctx.renderContext, ctx.cmd);
                    });

                    transparencyOverdrawOutput = passData.output;
                }

                PushFullScreenDebugTexture(renderGraph, transparencyOverdrawOutput, FullScreenDebugMode.TransparencyOverdraw);
            }
        }
Esempio n. 22
0
        void RenderDirectionalLightScreenSpaceShadow(RenderGraph renderGraph, HDCamera hdCamera, TextureHandle depthBuffer, TextureHandle normalBuffer, TextureHandle motionVectorsBuffer, TextureHandle rayCountTexture, TextureHandle screenSpaceShadowArray)
        {
            // Should we be executing anything really?
            bool screenSpaceShadowRequired = m_CurrentSunLightAdditionalLightData != null && m_CurrentSunLightAdditionalLightData.WillRenderScreenSpaceShadow();

            // Render directional screen space shadow if required
            if (screenSpaceShadowRequired)
            {
                bool rayTracedDirectionalRequired = m_CurrentSunLightAdditionalLightData.WillRenderRayTracedShadow();
                // If the shadow is flagged as ray traced, we need to evaluate it completely
                if (rayTracedDirectionalRequired)
                {
                    RenderRayTracedDirectionalScreenSpaceShadow(renderGraph, hdCamera, depthBuffer, normalBuffer, motionVectorsBuffer, rayCountTexture, screenSpaceShadowArray);
                }
                else
                {
                    using (var builder = renderGraph.AddRenderPass <SSSDirectionalTracePassData>("Directional RT Shadow", out var passData, ProfilingSampler.Get(HDProfileId.RaytracingDirectionalLightShadow)))
                    {
                        passData.parameters             = PrepareSSShadowDirectionalParameters();
                        passData.normalBuffer           = builder.ReadTexture(normalBuffer);
                        passData.screenSpaceShadowArray = builder.ReadTexture(builder.WriteTexture(screenSpaceShadowArray));

                        builder.SetRenderFunc(
                            (SSSDirectionalTracePassData data, RenderGraphContext context) =>
                        {
                            ExecuteSSShadowDirectional(context.cmd, data.parameters, context.renderGraphPool.GetTempMaterialPropertyBlock(), data.normalBuffer, data.screenSpaceShadowArray);
                        });
                    }
                }
            }
        }
Esempio n. 23
0
        TextureHandle AdjustWeightRTR(RenderGraph renderGraph, HDCamera hdCamera, ScreenSpaceReflection settings,
                                      TextureHandle depthPyramid, TextureHandle normalBuffer, TextureHandle clearCoatTexture, TextureHandle lightingTexture, TextureHandle directionTexture)
        {
            using (var builder = renderGraph.AddRenderPass <AdjustWeightRTRPassData>("Adjust Weight RTR", out var passData, ProfilingSampler.Get(HDProfileId.RaytracingReflectionAdjustWeight)))
            {
                builder.EnableAsyncCompute(false);

                passData.texWidth  = settings.fullResolution ? hdCamera.actualWidth : hdCamera.actualWidth / 2;
                passData.texHeight = settings.fullResolution ? hdCamera.actualHeight : hdCamera.actualHeight / 2;
                passData.viewCount = hdCamera.viewCount;

                // Requires parameters
                passData.minSmoothness       = settings.minSmoothness;
                passData.smoothnessFadeStart = settings.smoothnessFadeStart;

                // Other parameters
                passData.reflectionFilterCS          = m_Asset.renderPipelineRayTracingResources.reflectionBilateralFilterCS;
                passData.adjustWeightKernel          = settings.fullResolution ? m_ReflectionAdjustWeightKernel : m_ReflectionRescaleAndAdjustWeightKernel;
                passData.shaderVariablesRayTracingCB = m_ShaderVariablesRayTracingCB;

                passData.depthStencilBuffer   = builder.ReadTexture(depthPyramid);
                passData.normalBuffer         = builder.ReadTexture(normalBuffer);
                passData.clearCoatMaskTexture = builder.ReadTexture(clearCoatTexture);
                passData.lightingTexture      = builder.ReadTexture(lightingTexture);
                passData.directionTexture     = builder.ReadTexture(directionTexture);
                passData.outputTexture        = builder.WriteTexture(renderGraph.CreateTexture(new TextureDesc(Vector2.one, true, true)
                {
                    colorFormat = GraphicsFormat.R16G16B16A16_SFloat, enableRandomWrite = true, name = "Reflection Ray Reflections"
                }));

                builder.SetRenderFunc(
                    (AdjustWeightRTRPassData data, RenderGraphContext ctx) =>
                {
                    // Bind all the required scalars to the CB
                    data.shaderVariablesRayTracingCB._RaytracingReflectionMinSmoothness       = data.minSmoothness;
                    data.shaderVariablesRayTracingCB._RaytracingReflectionSmoothnessFadeStart = data.smoothnessFadeStart;
                    ConstantBuffer.PushGlobal(ctx.cmd, data.shaderVariablesRayTracingCB, HDShaderIDs._ShaderVariablesRaytracing);

                    // Source input textures
                    ctx.cmd.SetComputeTextureParam(data.reflectionFilterCS, data.adjustWeightKernel, HDShaderIDs._DepthTexture, data.depthStencilBuffer);
                    ctx.cmd.SetComputeTextureParam(data.reflectionFilterCS, data.adjustWeightKernel, HDShaderIDs._SsrClearCoatMaskTexture, data.clearCoatMaskTexture);
                    ctx.cmd.SetComputeTextureParam(data.reflectionFilterCS, data.adjustWeightKernel, HDShaderIDs._NormalBufferTexture, data.normalBuffer);
                    ctx.cmd.SetComputeTextureParam(data.reflectionFilterCS, data.adjustWeightKernel, HDShaderIDs._DirectionPDFTexture, data.directionTexture);

                    // Lighting textures
                    ctx.cmd.SetComputeTextureParam(data.reflectionFilterCS, data.adjustWeightKernel, HDShaderIDs._SsrLightingTextureRW, data.lightingTexture);

                    // Output texture
                    ctx.cmd.SetComputeTextureParam(data.reflectionFilterCS, data.adjustWeightKernel, HDShaderIDs._RaytracingReflectionTexture, data.outputTexture);

                    // Compute the texture
                    int numTilesXHR = (data.texWidth + (rtReflectionsComputeTileSize - 1)) / rtReflectionsComputeTileSize;
                    int numTilesYHR = (data.texHeight + (rtReflectionsComputeTileSize - 1)) / rtReflectionsComputeTileSize;
                    ctx.cmd.DispatchCompute(data.reflectionFilterCS, data.adjustWeightKernel, numTilesXHR, numTilesYHR, data.viewCount);
                });

                return(passData.outputTexture);
            }
        }
        TextureHandle UpscaleSSGI(RenderGraph renderGraph, HDCamera hdCamera, GlobalIllumination giSettings, HDUtils.PackedMipChainInfo info, TextureHandle depthPyramid, TextureHandle inputBuffer)
        {
            using (var builder = renderGraph.AddRenderPass <UpscaleSSGIPassData>("Upscale SSGI", out var passData, ProfilingSampler.Get(HDProfileId.SSGIUpscale)))
            {
                builder.EnableAsyncCompute(false);

                // Set the camera parameters
                passData.texWidth  = hdCamera.actualWidth;
                passData.texHeight = hdCamera.actualHeight;
                passData.viewCount = hdCamera.viewCount;

                passData.shaderVariablesBilateralUpsampleCB._HalfScreenSize = new Vector4(passData.texWidth / 2, passData.texHeight / 2, 1.0f / (passData.texWidth * 0.5f), 1.0f / (passData.texHeight * 0.5f));
                unsafe
                {
                    for (int i = 0; i < 16; ++i)
                    {
                        passData.shaderVariablesBilateralUpsampleCB._DistanceBasedWeights[i] = BilateralUpsample.distanceBasedWeights_2x2[i];
                    }

                    for (int i = 0; i < 32; ++i)
                    {
                        passData.shaderVariablesBilateralUpsampleCB._TapOffsets[i] = BilateralUpsample.tapOffsets_2x2[i];
                    }
                }

                // Grab the right kernel
                passData.bilateralUpsampleCS = m_Asset.renderPipelineResources.shaders.bilateralUpsampleCS;
                passData.upscaleKernel       = m_BilateralUpSampleColorKernel;

                passData.depthTexture = builder.ReadTexture(depthPyramid);
                passData.inputBuffer  = builder.ReadTexture(inputBuffer);
                passData.outputBuffer = builder.WriteTexture(renderGraph.CreateTexture(new TextureDesc(Vector2.one, true, true)
                {
                    colorFormat = GraphicsFormat.B10G11R11_UFloatPack32, enableRandomWrite = true, name = "SSGI Final"
                }));

                builder.SetRenderFunc(
                    (UpscaleSSGIPassData data, RenderGraphContext ctx) =>
                {
                    // Re-evaluate the dispatch parameters (we are evaluating the upsample in full resolution)
                    int ssgiTileSize = 8;
                    int numTilesXHR  = (data.texWidth + (ssgiTileSize - 1)) / ssgiTileSize;
                    int numTilesYHR  = (data.texHeight + (ssgiTileSize - 1)) / ssgiTileSize;

                    ConstantBuffer.PushGlobal(ctx.cmd, data.shaderVariablesBilateralUpsampleCB, HDShaderIDs._ShaderVariablesBilateralUpsample);

                    // Inject all the input buffers
                    ctx.cmd.SetComputeTextureParam(data.bilateralUpsampleCS, data.upscaleKernel, HDShaderIDs._DepthTexture, data.depthTexture);
                    ctx.cmd.SetComputeTextureParam(data.bilateralUpsampleCS, data.upscaleKernel, HDShaderIDs._LowResolutionTexture, data.inputBuffer);

                    // Inject the output textures
                    ctx.cmd.SetComputeTextureParam(data.bilateralUpsampleCS, data.upscaleKernel, HDShaderIDs._OutputUpscaledTexture, data.outputBuffer);

                    // Upscale the buffer to full resolution
                    ctx.cmd.DispatchCompute(data.bilateralUpsampleCS, data.upscaleKernel, numTilesXHR, numTilesYHR, data.viewCount);
                });
                return(passData.outputBuffer);
            }
        }
Esempio n. 25
0
        // RenderDepthPrepass render both opaque and opaque alpha tested based on engine configuration.
        // Lit Forward only: We always render all materials
        // Lit Deferred: We always render depth prepass for alpha tested (optimization), other deferred material are render based on engine configuration.
        // Forward opaque with deferred renderer (DepthForwardOnly pass): We always render all materials
        // True is returned if motion vector must be rendered after GBuffer pass
        bool RenderDepthPrepass(RenderGraph renderGraph, CullingResults cull, HDCamera hdCamera, ref PrepassOutput output)
        {
            var depthPrepassParameters = PrepareDepthPrepass(cull, hdCamera);

            bool msaa = hdCamera.frameSettings.IsEnabled(FrameSettingsField.MSAA);

            using (var builder = renderGraph.AddRenderPass <DepthPrepassData>(depthPrepassParameters.passName, out var passData, ProfilingSampler.Get(depthPrepassParameters.profilingId)))
            {
                passData.frameSettings       = hdCamera.frameSettings;
                passData.msaaEnabled         = msaa;
                passData.hasDepthOnlyPrepass = depthPrepassParameters.hasDepthOnlyPass;

                passData.depthBuffer  = builder.UseDepthBuffer(output.depthBuffer, DepthAccess.ReadWrite);
                passData.normalBuffer = builder.WriteTexture(CreateNormalBuffer(renderGraph, msaa));
                // This texture must be used because reading directly from an MSAA Depth buffer is way to expensive.
                // The solution that we went for is writing the depth in an additional color buffer (10x cheaper to solve on ps4)
                if (msaa)
                {
                    passData.depthAsColorBuffer = builder.WriteTexture(renderGraph.CreateTexture(new TextureDesc(Vector2.one, true, true)
                    {
                        colorFormat = GraphicsFormat.R32_SFloat, clearBuffer = true, clearColor = Color.black, bindTextureMS = true, enableMSAA = true, name = "DepthAsColorMSAA"
                    }, HDShaderIDs._DepthTextureMS));
                }

                if (passData.hasDepthOnlyPrepass)
                {
                    passData.rendererListDepthOnly = builder.UseRendererList(renderGraph.CreateRendererList(depthPrepassParameters.depthOnlyRendererListDesc));
                }

                passData.rendererListMRT = builder.UseRendererList(renderGraph.CreateRendererList(depthPrepassParameters.mrtRendererListDesc));

                output.depthBuffer  = passData.depthBuffer;
                output.depthAsColor = passData.depthAsColorBuffer;
                output.normalBuffer = passData.normalBuffer;

                builder.SetRenderFunc(
                    (DepthPrepassData data, RenderGraphContext context) =>
                {
                    var mrt = context.renderGraphPool.GetTempArray <RenderTargetIdentifier>(data.msaaEnabled ? 2 : 1);
                    mrt[0]  = context.resources.GetTexture(data.normalBuffer);
                    if (data.msaaEnabled)
                    {
                        mrt[1] = context.resources.GetTexture(data.depthAsColorBuffer);
                    }

                    bool useRayTracing = data.frameSettings.IsEnabled(FrameSettingsField.RayTracing);

                    RenderDepthPrepass(context.renderContext, context.cmd, data.frameSettings
                                       , mrt
                                       , context.resources.GetTexture(data.depthBuffer)
                                       , context.resources.GetRendererList(data.rendererListDepthOnly)
                                       , context.resources.GetRendererList(data.rendererListMRT)
                                       , data.hasDepthOnlyPrepass
                                       );
                });
            }

            return(depthPrepassParameters.shouldRenderMotionVectorAfterGBuffer);
        }
        TextureHandle UpscaleRTGI(RenderGraph renderGraph, HDCamera hdCamera, GlobalIllumination settings,
                                  TextureHandle depthPyramid, TextureHandle normalBuffer, TextureHandle indirectDiffuseBuffer, TextureHandle directionBuffer, bool fullResolution)
        {
            using (var builder = renderGraph.AddRenderPass <UpscaleRTGIPassData>("Upscale the RTGI result", out var passData, ProfilingSampler.Get(HDProfileId.RaytracingIndirectDiffuseUpscale)))
            {
                builder.EnableAsyncCompute(false);

                // Set the camera parameters
                passData.texWidth  = hdCamera.actualWidth;
                passData.texHeight = hdCamera.actualHeight;
                passData.viewCount = hdCamera.viewCount;

                // Grab the right kernel
                passData.upscaleCS     = m_GlobalSettings.renderPipelineRayTracingResources.indirectDiffuseRaytracingCS;
                passData.upscaleKernel = fullResolution ? m_IndirectDiffuseUpscaleFullResKernel : m_IndirectDiffuseUpscaleHalfResKernel;

                // Grab the additional parameters
                passData.blueNoiseTexture  = GetBlueNoiseManager().textureArray16RGB;
                passData.scramblingTexture = m_Asset.renderPipelineResources.textures.scramblingTex;

                passData.depthBuffer           = builder.ReadTexture(depthPyramid);
                passData.normalBuffer          = builder.ReadTexture(normalBuffer);
                passData.indirectDiffuseBuffer = builder.ReadTexture(indirectDiffuseBuffer);
                passData.directionBuffer       = builder.ReadTexture(directionBuffer);
                passData.outputBuffer          = builder.WriteTexture(renderGraph.CreateTexture(new TextureDesc(Vector2.one, true, true)
                {
                    colorFormat = GraphicsFormat.R16G16B16A16_SFloat, enableRandomWrite = true, name = "Reflection Ray Indirect Diffuse"
                }));

                builder.SetRenderFunc(
                    (UpscaleRTGIPassData data, RenderGraphContext ctx) =>
                {
                    // Inject all the parameters for the compute
                    ctx.cmd.SetComputeTextureParam(data.upscaleCS, data.upscaleKernel, HDShaderIDs._DepthTexture, data.depthBuffer);
                    ctx.cmd.SetComputeTextureParam(data.upscaleCS, data.upscaleKernel, HDShaderIDs._NormalBufferTexture, data.normalBuffer);
                    ctx.cmd.SetComputeTextureParam(data.upscaleCS, data.upscaleKernel, HDShaderIDs._IndirectDiffuseTexture, data.indirectDiffuseBuffer);
                    ctx.cmd.SetComputeTextureParam(data.upscaleCS, data.upscaleKernel, HDShaderIDs._RaytracingDirectionBuffer, data.directionBuffer);
                    ctx.cmd.SetComputeTextureParam(data.upscaleCS, data.upscaleKernel, HDShaderIDs._BlueNoiseTexture, data.blueNoiseTexture);
                    ctx.cmd.SetComputeTextureParam(data.upscaleCS, data.upscaleKernel, HDShaderIDs._ScramblingTexture, data.scramblingTexture);

                    // Output buffer
                    ctx.cmd.SetComputeTextureParam(data.upscaleCS, data.upscaleKernel, HDShaderIDs._UpscaledIndirectDiffuseTextureRW, data.outputBuffer);

                    // Texture dimensions
                    int texWidth  = data.texWidth;
                    int texHeight = data.texHeight;

                    // Evaluate the dispatch parameters
                    int areaTileSize = 8;
                    int numTilesXHR  = (texWidth + (areaTileSize - 1)) / areaTileSize;
                    int numTilesYHR  = (texHeight + (areaTileSize - 1)) / areaTileSize;

                    // Compute the texture
                    ctx.cmd.DispatchCompute(data.upscaleCS, data.upscaleKernel, numTilesXHR, numTilesYHR, data.viewCount);
                });

                return(passData.outputBuffer);
            }
        }
Esempio n. 27
0
        internal void EvaluateRTASDebugView(RenderGraph renderGraph, HDCamera hdCamera)
        {
            // If the ray tracing state is not valid, we cannot evaluate the debug view
            if (!m_ValidRayTracingState)
            {
                return;
            }

            using (var builder = renderGraph.AddRenderPass <RTASDebugPassData>("Debug view of the RTAS", out var passData, ProfilingSampler.Get(HDProfileId.RaytracingBuildAccelerationStructureDebug)))
            {
                RTASDebugPassData debugPass = new RTASDebugPassData();

                builder.EnableAsyncCompute(false);

                // Camera data
                passData.actualWidth  = hdCamera.actualWidth;
                passData.actualHeight = hdCamera.actualHeight;
                passData.viewCount    = hdCamera.viewCount;

                // Evaluation parameters
                passData.debugMode             = (int)m_CurrentDebugDisplaySettings.data.rtasDebugMode;
                passData.layerMask             = LayerFromRTASDebugView(m_CurrentDebugDisplaySettings.data.rtasDebugView, hdCamera);
                passData.pixelCoordToViewDirWS = hdCamera.mainViewConstants.pixelCoordToViewDirWS;

                // Other parameters
                passData.debugRTASRT = m_GlobalSettings.renderPipelineRayTracingResources.rtasDebug;
                passData.rayTracingAccelerationStructure = RequestAccelerationStructure(hdCamera);

                // Depending of if we will have to denoise (or not), we need to allocate the final format, or a bigger texture
                passData.outputTexture = builder.WriteTexture(renderGraph.CreateTexture(new TextureDesc(Vector2.one, true, true)
                {
                    colorFormat = GraphicsFormat.R16G16B16A16_SFloat, enableRandomWrite = true, name = "RTAS Debug"
                }));

                builder.SetRenderFunc(
                    (RTASDebugPassData data, RenderGraphContext ctx) =>
                {
                    // Define the shader pass to use for the reflection pass
                    ctx.cmd.SetRayTracingShaderPass(data.debugRTASRT, "DebugDXR");

                    // Set the acceleration structure for the pass
                    ctx.cmd.SetRayTracingAccelerationStructure(data.debugRTASRT, HDShaderIDs._RaytracingAccelerationStructureName, data.rayTracingAccelerationStructure);

                    // Layer mask
                    ctx.cmd.SetRayTracingIntParam(data.debugRTASRT, "_DebugMode", data.debugMode);
                    ctx.cmd.SetRayTracingIntParam(data.debugRTASRT, "_LayerMask", (int)data.layerMask);
                    ctx.cmd.SetRayTracingMatrixParam(data.debugRTASRT, HDShaderIDs._PixelCoordToViewDirWS, data.pixelCoordToViewDirWS);

                    // Set the output texture
                    ctx.cmd.SetRayTracingTextureParam(data.debugRTASRT, "_OutputDebugBuffer", data.outputTexture);

                    // Evaluate the debug view
                    ctx.cmd.DispatchRays(data.debugRTASRT, m_RTASDebugRTKernel, (uint)data.actualWidth, (uint)data.actualHeight, (uint)data.viewCount);
                });

                // Use the debug texture to do the full screen debug
                PushFullScreenDebugTexture(renderGraph, passData.outputTexture, FullScreenDebugMode.RayTracingAccelerationStructure);
            }
        }
        TraceOutput TraceSSGI(RenderGraph renderGraph, HDCamera hdCamera, GlobalIllumination giSettings, TextureHandle depthPyramid, TextureHandle normalBuffer, TextureHandle motionVectorsBuffer)
        {
            using (var builder = renderGraph.AddRenderPass <TraceSSGIPassData>("Trace SSGI", out var passData, ProfilingSampler.Get(HDProfileId.SSGITrace)))
            {
                builder.EnableAsyncCompute(false);

                passData.parameters   = PrepareSSGITraceParameters(hdCamera, giSettings);
                passData.depthTexture = builder.ReadTexture(depthPyramid);
                passData.normalBuffer = builder.ReadTexture(normalBuffer);
                if (!hdCamera.frameSettings.IsEnabled(FrameSettingsField.ObjectMotionVectors))
                {
                    passData.motionVectorsBuffer = builder.ReadTexture(renderGraph.defaultResources.blackTextureXR);
                }
                else
                {
                    passData.motionVectorsBuffer = builder.ReadTexture(motionVectorsBuffer);
                }

                var colorPyramid = hdCamera.GetPreviousFrameRT((int)HDCameraFrameHistoryType.ColorBufferMipChain);
                passData.colorPyramid = colorPyramid != null?builder.ReadTexture(renderGraph.ImportTexture(colorPyramid)) : renderGraph.defaultResources.blackTextureXR;

                var historyDepth = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.Depth);
                passData.historyDepth = historyDepth != null?builder.ReadTexture(renderGraph.ImportTexture(historyDepth)) : renderGraph.defaultResources.blackTextureXR;

                passData.hitPointBuffer = builder.CreateTransientTexture(new TextureDesc(Vector2.one, true, true)
                {
                    colorFormat = GraphicsFormat.R16G16B16A16_SFloat, enableRandomWrite = true, name = "SSGI Hit Point"
                });
                passData.outputBuffer0 = builder.WriteTexture(renderGraph.CreateTexture(new TextureDesc(Vector2.one, true, true)
                {
                    colorFormat = GraphicsFormat.R16G16B16A16_SFloat, enableRandomWrite = true, name = "SSGI Signal0"
                }));
                passData.outputBuffer1 = builder.WriteTexture(renderGraph.CreateTexture(new TextureDesc(Vector2.one, true, true)
                {
                    colorFormat = GraphicsFormat.R16G16B16A16_SFloat, enableRandomWrite = true, name = "SSGI Signal1"
                }));

                builder.SetRenderFunc(
                    (TraceSSGIPassData data, RenderGraphContext ctx) =>
                {
                    // We need to fill the structure that holds the various resources
                    SSGITraceResources resources  = new SSGITraceResources();
                    resources.depthTexture        = data.depthTexture;
                    resources.normalBuffer        = data.normalBuffer;
                    resources.motionVectorsBuffer = data.motionVectorsBuffer;
                    resources.colorPyramid        = data.colorPyramid;
                    resources.historyDepth        = data.historyDepth;
                    resources.hitPointBuffer      = data.hitPointBuffer;
                    resources.outputBuffer0       = data.outputBuffer0;
                    resources.outputBuffer1       = data.outputBuffer1;
                    ExecuteSSGITrace(ctx.cmd, data.parameters, resources);
                });
                TraceOutput traceOutput = new TraceOutput();
                traceOutput.outputBuffer0 = passData.outputBuffer0;
                traceOutput.outputBuffer1 = passData.outputBuffer1;
                return(traceOutput);
            }
        }
Esempio n. 29
0
        public void Resolve(RenderGraph renderGraph, HDCamera hdCamera, TextureHandle input)
        {
            if (hdCamera.frameSettings.IsEnabled(FrameSettingsField.VirtualTexturing))
            {
                if (m_DownSampleCS == null)
                {
                    m_DownSampleCS         = HDRenderPipeline.currentAsset.renderPipelineResources.shaders.VTFeedbackDownsample;
                    m_DownsampleKernel     = m_DownSampleCS.FindKernel("KMain");
                    m_DownsampleKernelMSAA = m_DownSampleCS.FindKernel("KMainMSAA");
                }

                using (var builder = renderGraph.AddRenderPass <ResolveVTData>("Resolve VT", out var passData, ProfilingSampler.Get(HDProfileId.VTFeedbackDownsample)))
                {
                    // The output is never read outside the pass but is still useful for the VT system so we can't cull this pass.
                    builder.AllowPassCulling(false);

                    bool msaa = hdCamera.msaaEnabled;
                    passData.width        = hdCamera.actualWidth;
                    passData.height       = hdCamera.actualHeight;
                    passData.lowresWidth  = passData.width;
                    passData.lowresHeight = passData.height;
                    GetResolveDimensions(ref passData.lowresWidth, ref passData.lowresHeight);
                    passData.resolver         = msaa ? m_ResolverMsaa : m_Resolver;
                    passData.downsampleCS     = m_DownSampleCS;
                    passData.downsampleKernel = msaa ? m_DownsampleKernelMSAA : m_DownsampleKernel;

                    passData.input  = builder.ReadTexture(input);
                    passData.lowres = builder.WriteTexture(renderGraph.ImportTexture(m_LowresResolver));

                    builder.SetRenderFunc(
                        (ResolveVTData data, RenderGraphContext ctx) =>
                    {
                        RTHandle lowresBuffer = data.lowres;
                        RTHandle buffer       = data.input;

                        Debug.Assert(data.lowresWidth <= data.resolver.CurrentWidth && data.lowresHeight <= data.resolver.CurrentHeight);
                        Debug.Assert(data.lowresWidth <= lowresBuffer.referenceSize.x && data.lowresHeight <= lowresBuffer.referenceSize.y);

                        string mainFunction = (buffer.isMSAAEnabled) ? "KMainMSAA" : "KMain";
                        int inputID         = (buffer.isMSAAEnabled) ? HDShaderIDs._InputTextureMSAA : HDShaderIDs._InputTexture;

                        ctx.cmd.SetComputeTextureParam(data.downsampleCS, data.downsampleKernel, inputID, buffer);
                        ctx.cmd.SetComputeTextureParam(data.downsampleCS, data.downsampleKernel, HDShaderIDs._OutputTexture, lowresBuffer);
                        var resolveCounter = 0;
                        var startOffsetX   = (resolveCounter % kResolveScaleFactor);
                        var startOffsetY   = (resolveCounter / kResolveScaleFactor) % kResolveScaleFactor;
                        ctx.cmd.SetComputeVectorParam(data.downsampleCS, HDShaderIDs._Params, new Vector4(kResolveScaleFactor, startOffsetX, startOffsetY, /*unused*/ -1));
                        ctx.cmd.SetComputeVectorParam(data.downsampleCS, HDShaderIDs._Params1, new Vector4(data.width, data.height, data.lowresWidth, data.lowresHeight));
                        var TGSize = 8;     //Match shader
                        ctx.cmd.DispatchCompute(data.downsampleCS, data.downsampleKernel, ((int)data.lowresWidth + (TGSize - 1)) / TGSize, ((int)data.lowresHeight + (TGSize - 1)) / TGSize, 1);

                        data.resolver.Process(ctx.cmd, lowresBuffer, 0, data.lowresWidth, 0, data.lowresHeight, 0, 0);

                        VirtualTexturing.System.Update();
                    });
                }
            }
        }
Esempio n. 30
0
        internal void PushCustomPassTexture(
            RenderGraph renderGraph,
            CustomPassInjectionPoint injectionPoint,
            TextureHandle cameraSource,
            Lazy <RTHandle> customPassSource,
            List <RTHandle> targets
            )
        {
            if (!isValid || m_CustomPassAOVBuffers == null)
            {
                return;
            }

            Assert.IsNotNull(targets);

            int index = -1;

            for (int i = 0; i < m_CustomPassAOVBuffers.Length; ++i)
            {
                if (m_CustomPassAOVBuffers[i].injectionPoint == injectionPoint)
                {
                    index = i;
                    break;
                }
            }

            if (index == -1)
            {
                return;
            }

            using (var builder = renderGraph.AddRenderPass <PushCustomPassTexturePassData>("Push Custom Pass Texture", out var passData))
            {
                if (m_CustomPassAOVBuffers[index].outputType == CustomPassAOVBuffers.OutputType.Camera)
                {
                    passData.source           = builder.ReadTexture(cameraSource);
                    passData.customPassSource = null;
                }
                else
                {
                    passData.customPassSource = customPassSource.Value;
                }
                passData.target = targets[index];

                builder.SetRenderFunc(
                    (PushCustomPassTexturePassData data, RenderGraphContext ctx) =>
                {
                    if (data.customPassSource != null)
                    {
                        HDUtils.BlitCameraTexture(ctx.cmd, data.customPassSource, data.target);
                    }
                    else
                    {
                        HDUtils.BlitCameraTexture(ctx.cmd, data.source, data.target);
                    }
                });
            }
        }