Пример #1
0
 public virtual void RenderDeferredLighting(HDCamera hdCamera, ScriptableRenderContext renderContext,
                                            DebugDisplaySettings debugDisplaySettings,
                                            RenderTargetIdentifier[] colorBuffers, RenderTargetIdentifier depthStencilBuffer, RenderTargetIdentifier depthStencilTexture,
                                            bool outputSplitLightingForSSS)
 {
 }
Пример #2
0
 public static void SetRenderTarget(CommandBuffer cmd, HDCamera camera, RTHandle colorBuffer, RTHandle depthBuffer, int miplevel = 0, CubemapFace cubemapFace = CubemapFace.Unknown, int depthSlice = 0)
 {
     SetRenderTarget(cmd, camera, colorBuffer, depthBuffer, ClearFlag.None, CoreUtils.clearColorAllBlack, miplevel, cubemapFace, depthSlice);
 }
Пример #3
0
 public static void SetRenderTarget(CommandBuffer cmd, HDCamera camera, RenderTargetIdentifier[] colorBuffers, RTHandle depthBuffer)
 {
     CoreUtils.SetRenderTarget(cmd, colorBuffers, depthBuffer, ClearFlag.None, CoreUtils.clearColorAllBlack);
     SetViewport(cmd, camera, depthBuffer);
 }
Пример #4
0
 // This case, both source and destination are camera-scaled but we want to override the scale/bias parameter.
 public static void BlitCameraTexture(CommandBuffer cmd, HDCamera camera, RTHandleSystem.RTHandle source, RTHandleSystem.RTHandle destination, Vector4 scaleBias, float mipLevel = 0.0f, bool bilinear = false)
 {
     // Will set the correct camera viewport as well.
     SetRenderTarget(cmd, camera, destination);
     BlitTexture(cmd, source, destination, scaleBias, mipLevel, bilinear);
 }
Пример #5
0
 // This particular case is for blitting a camera-scaled texture into a non scaling texture. So we setup the full viewport (implicit in cmd.Blit) but have to scale the input UVs.
 public static void BlitCameraTexture(CommandBuffer cmd, HDCamera camera, RTHandleSystem.RTHandle source, RenderTargetIdentifier destination)
 {
     cmd.Blit(source, destination, new Vector2(camera.viewportScale.x, camera.viewportScale.y), Vector2.zero);
 }
Пример #6
0
        public void VolumetricLightingPass(HDCamera hdCamera, CommandBuffer cmd, uint frameIndex)
        {
            if (preset == VolumetricLightingPreset.Off)
            {
                return;
            }

            var visualEnvironment = VolumeManager.instance.stack.GetComponent <VisualEnvironment>();

            if (visualEnvironment.fogType != FogType.Volumetric)
            {
                return;
            }

            using (new ProfilingSample(cmd, "Volumetric Lighting"))
            {
                // Only available in the Play Mode because all the frame counters in the Edit Mode are broken.
                bool enableClustered    = hdCamera.frameSettings.lightLoopSettings.enableTileAndCluster;
                bool enableReprojection = Application.isPlaying && hdCamera.camera.cameraType == CameraType.Game;

                int kernel;

                if (enableReprojection)
                {
                    kernel = m_VolumetricLightingCS.FindKernel(enableClustered ? "VolumetricLightingClusteredReproj"
                                                                           : "VolumetricLightingBruteforceReproj");
                }
                else
                {
                    kernel = m_VolumetricLightingCS.FindKernel(enableClustered ? "VolumetricLightingClustered"
                                                                           : "VolumetricLightingBruteforce");
                }

                var     frameParams = hdCamera.vBufferParams[0];
                Vector4 resolution  = frameParams.resolution;
                float   vFoV        = hdCamera.camera.fieldOfView * Mathf.Deg2Rad;
                // Compose the matrix which allows us to compute the world space view direction.
                Matrix4x4 transform = HDUtils.ComputePixelCoordToWorldSpaceViewDirectionMatrix(vFoV, resolution, hdCamera.viewMatrix, false);

                Vector2[] xySeq = GetHexagonalClosePackedSpheres7();

                // This is a sequence of 7 equidistant numbers from 1/14 to 13/14.
                // Each of them is the centroid of the interval of length 2/14.
                // They've been rearranged in a sequence of pairs {small, large}, s.t. (small + large) = 1.
                // That way, the running average position is close to 0.5.
                // | 6 | 2 | 4 | 1 | 5 | 3 | 7 |
                // |   |   |   | o |   |   |   |
                // |   | o |   | x |   |   |   |
                // |   | x |   | x |   | o |   |
                // |   | x | o | x |   | x |   |
                // |   | x | x | x | o | x |   |
                // | o | x | x | x | x | x |   |
                // | x | x | x | x | x | x | o |
                // | x | x | x | x | x | x | x |
                float[] zSeq = { 7.0f / 14.0f, 3.0f / 14.0f, 11.0f / 14.0f, 5.0f / 14.0f, 9.0f / 14.0f, 1.0f / 14.0f, 13.0f / 14.0f };

                int sampleIndex = (int)frameIndex % 7;

                // TODO: should we somehow reorder offsets in Z based on the offset in XY? S.t. the samples more evenly cover the domain.
                // Currently, we assume that they are completely uncorrelated, but maybe we should correlate them somehow.
                Vector4 offset = new Vector4(xySeq[sampleIndex].x, xySeq[sampleIndex].y, zSeq[sampleIndex], frameIndex);

                // Get the interpolated anisotropy value.
                var fog = VolumeManager.instance.stack.GetComponent <VolumetricFog>();

                // TODO: set 'm_VolumetricLightingPreset'.
                // TODO: set the constant buffer data only once.
                cmd.SetComputeMatrixParam(m_VolumetricLightingCS, HDShaderIDs._VBufferCoordToViewDirWS, transform);
                cmd.SetComputeVectorParam(m_VolumetricLightingCS, HDShaderIDs._VBufferSampleOffset, offset);
                cmd.SetComputeFloatParam(m_VolumetricLightingCS, HDShaderIDs._CornetteShanksConstant, CornetteShanksPhasePartConstant(fog.anisotropy));
                cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferDensity, m_DensityBufferHandle);           // Read
                cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingIntegral, m_LightingBufferHandle); // Write
                if (enableReprojection)
                {
                    cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingHistory, hdCamera.GetPreviousFrameRT((int)HDCameraFrameHistoryType.VolumetricLighting)); // Read
                    cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingFeedback, hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.VolumetricLighting)); // Write
                }

                int w = (int)resolution.x;
                int h = (int)resolution.y;

                // The shader defines GROUP_SIZE_1D = 8.
                cmd.DispatchCompute(m_VolumetricLightingCS, kernel, (w + 7) / 8, (h + 7) / 8, 1);
            }
        }
        // GC.Alloc
        // VolumeParameter`.op_Equality()
        public bool UpdateEnvironment(SkyUpdateContext skyContext, HDCamera camera, Light sunLight, bool updateRequired, CommandBuffer cmd)
        {
            bool result = false;

            if (skyContext.IsValid())
            {
                skyContext.currentUpdateTime += Time.deltaTime;

                m_BuiltinParameters.commandBuffer = cmd;
                m_BuiltinParameters.sunLight      = sunLight;
                m_BuiltinParameters.screenSize    = m_CubemapScreenSize;
                m_BuiltinParameters.cameraPosWS   = camera.camera.transform.position;
                m_BuiltinParameters.hdCamera      = null;
                m_BuiltinParameters.debugSettings = null; // We don't want any debug when updating the environment.

                int sunHash = 0;
                if (sunLight != null)
                {
                    sunHash = GetSunLightHashCode(sunLight);
                }
                int skyHash = sunHash * 23 + skyContext.skySettings.GetHashCode();

                bool forceUpdate = (updateRequired || skyContext.updatedFramesRequired > 0 || m_NeedUpdate);
                if (forceUpdate ||
                    (skyContext.skySettings.updateMode == EnvironementUpdateMode.OnChanged && skyHash != skyContext.skyParametersHash) ||
                    (skyContext.skySettings.updateMode == EnvironementUpdateMode.Realtime && skyContext.currentUpdateTime > skyContext.skySettings.updatePeriod))
                {
                    using (new ProfilingSample(cmd, "Sky Environment Pass"))
                    {
                        using (new ProfilingSample(cmd, "Update Env: Generate Lighting Cubemap"))
                        {
                            RenderSkyToCubemap(skyContext);
                        }

                        if (m_SupportsConvolution)
                        {
                            using (new ProfilingSample(cmd, "Update Env: Convolve Lighting Cubemap"))
                            {
                                RenderCubemapGGXConvolution(skyContext);
                            }
                        }

                        result = true;
                        skyContext.skyParametersHash = skyHash;
                        skyContext.currentUpdateTime = 0.0f;
                        skyContext.updatedFramesRequired--;
                        m_NeedUpdate = false;

#if UNITY_EDITOR
                        // In the editor when we change the sky we want to make the GI dirty so when baking again the new sky is taken into account.
                        // Changing the hash of the rendertarget allow to say that GI is dirty
                        m_SkyboxCubemapRT.rt.imageContentsHash = new Hash128((uint)skyContext.skySettings.GetHashCode(), 0, 0, 0);
#endif
                    }
                }
            }
            else
            {
                if (skyContext.skyParametersHash != 0)
                {
                    using (new ProfilingSample(cmd, "Clear Sky Environment Pass"))
                    {
                        CoreUtils.ClearCubemap(cmd, m_SkyboxCubemapRT, Color.black, true);
                        if (m_SupportsConvolution)
                        {
                            CoreUtils.ClearCubemap(cmd, m_SkyboxBSDFCubemapIntermediate, Color.black, true);
                            for (int bsdfIdx = 0; bsdfIdx < m_IBLFilterArray.Length; ++bsdfIdx)
                            {
                                cmd.CopyTexture(m_SkyboxBSDFCubemapIntermediate, 0, m_SkyboxBSDFCubemapArray, bsdfIdx);
                            }
                        }
                    }

                    skyContext.skyParametersHash = 0;
                    result = true;
                }
            }

            return(result);
        }
Пример #8
0
 // Draws a full screen triangle as a faster alternative to drawing a full screen quad.
 public static void DrawFullScreen(CommandBuffer commandBuffer, Material material, HDCamera camera,
                                   RenderTargetIdentifier[] colorBuffers, RenderTargetIdentifier depthStencilBuffer,
                                   MaterialPropertyBlock properties = null, int shaderPassID = 0)
 {
     SetupMaterialHDCamera(camera, material);
     commandBuffer.SetRenderTarget(colorBuffers, depthStencilBuffer);
     commandBuffer.DrawProcedural(Matrix4x4.identity, material, shaderPassID, MeshTopology.Triangles, 3, 1, properties);
 }
Пример #9
0
 // Draws a full screen triangle as a faster alternative to drawing a full screen quad.
 // Important: the first RenderTarget must be created with 0 depth bits!
 public static void DrawFullScreen(CommandBuffer commandBuffer, Material material, HDCamera camera,
                                   RenderTargetIdentifier[] colorBuffers,
                                   MaterialPropertyBlock properties = null, int shaderPassID = 0)
 {
     // It is currently not possible to have MRT without also setting a depth target.
     // To work around this deficiency of the CommandBuffer.SetRenderTarget() API,
     // we pass the first color target as the depth target. If it has 0 depth bits,
     // no depth target ends up being bound.
     DrawFullScreen(commandBuffer, material, camera, colorBuffers, colorBuffers[0], properties, shaderPassID);
 }
Пример #10
0
 public abstract void PushShaderParameters(HDCamera hdCamera, CommandBuffer cmd);
Пример #11
0
        public bool RenderIndirectDiffuse(HDCamera hdCamera, CommandBuffer cmd, ScriptableRenderContext renderContext, uint frameCount)
        {
            // Bind the indirect diffuse texture
            BindIndirectDiffuseTexture(cmd);

            // First thing to check is: Do we have a valid ray-tracing environment?
            HDRaytracingEnvironment rtEnvironement              = m_RaytracingManager.CurrentEnvironment();
            RaytracingShader        indirectDiffuseShader       = m_PipelineAsset.renderPipelineResources.shaders.indirectDiffuseRaytracing;
            ComputeShader           indirectDiffuseAccumulation = m_PipelineAsset.renderPipelineResources.shaders.indirectDiffuseAccumulation;

            bool invalidState = rtEnvironement == null || !rtEnvironement.raytracedIndirectDiffuse ||
                                indirectDiffuseShader == null || indirectDiffuseAccumulation == null ||
                                m_PipelineResources.textures.owenScrambledTex == null || m_PipelineResources.textures.scramblingTex == null;

            // If no acceleration structure available, end it now
            if (!ValidIndirectDiffuseState())
            {
                return(false);
            }

            // Grab the acceleration structures and the light cluster to use
            RaytracingAccelerationStructure accelerationStructure = m_RaytracingManager.RequestAccelerationStructure(rtEnvironement.indirectDiffuseLayerMask);
            HDRaytracingLightCluster        lightCluster          = m_RaytracingManager.RequestLightCluster(rtEnvironement.indirectDiffuseLayerMask);

            // Compute the actual resolution that is needed base on the quality
            string targetRayGen = m_RayGenIndirectDiffuseName;

            // Define the shader pass to use for the indirect diffuse pass
            cmd.SetRaytracingShaderPass(indirectDiffuseShader, "IndirectDXR");

            // Set the acceleration structure for the pass
            cmd.SetRaytracingAccelerationStructure(indirectDiffuseShader, HDShaderIDs._RaytracingAccelerationStructureName, accelerationStructure);

            // Inject the ray-tracing sampling data
            cmd.SetRaytracingTextureParam(indirectDiffuseShader, targetRayGen, HDShaderIDs._OwenScrambledTexture, m_PipelineResources.textures.owenScrambledTex);
            cmd.SetRaytracingTextureParam(indirectDiffuseShader, targetRayGen, HDShaderIDs._ScramblingTexture, m_PipelineResources.textures.scramblingTex);

            // Inject the ray generation data
            cmd.SetGlobalFloat(HDShaderIDs._RaytracingRayBias, rtEnvironement.rayBias);
            cmd.SetGlobalFloat(HDShaderIDs._RaytracingRayMaxLength, rtEnvironement.indirectDiffuseRayLength);
            cmd.SetRaytracingIntParams(indirectDiffuseShader, HDShaderIDs._RaytracingNumSamples, rtEnvironement.indirectDiffuseNumSamples);
            int frameIndex = hdCamera.IsTAAEnabled() ? hdCamera.taaFrameIndex : (int)frameCount % 8;

            cmd.SetGlobalInt(HDShaderIDs._RaytracingFrameIndex, frameIndex);

            // Set the data for the ray generation
            cmd.SetRaytracingTextureParam(indirectDiffuseShader, targetRayGen, HDShaderIDs._IndirectDiffuseTextureRW, m_IndirectDiffuseTexture);
            cmd.SetRaytracingTextureParam(indirectDiffuseShader, targetRayGen, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthStencilBuffer());
            cmd.SetRaytracingTextureParam(indirectDiffuseShader, targetRayGen, HDShaderIDs._NormalBufferTexture, m_SharedRTManager.GetNormalBuffer());

            // Set the indirect diffuse parameters
            cmd.SetRaytracingFloatParams(indirectDiffuseShader, HDShaderIDs._RaytracingIntensityClamp, rtEnvironement.indirectDiffuseClampValue);

            // Set ray count tex
            cmd.SetRaytracingIntParam(indirectDiffuseShader, HDShaderIDs._RayCountEnabled, m_RaytracingManager.rayCountManager.RayCountIsEnabled());
            cmd.SetRaytracingTextureParam(indirectDiffuseShader, targetRayGen, HDShaderIDs._RayCountTexture, m_RaytracingManager.rayCountManager.rayCountTexture);

            // Compute the pixel spread value
            float pixelSpreadAngle = Mathf.Atan(2.0f * Mathf.Tan(hdCamera.camera.fieldOfView * Mathf.PI / 360.0f) / Mathf.Min(hdCamera.actualWidth, hdCamera.actualHeight));

            cmd.SetRaytracingFloatParam(indirectDiffuseShader, HDShaderIDs._RaytracingPixelSpreadAngle, pixelSpreadAngle);

            // LightLoop data
            cmd.SetGlobalBuffer(HDShaderIDs._RaytracingLightCluster, lightCluster.GetCluster());
            cmd.SetGlobalBuffer(HDShaderIDs._LightDatasRT, lightCluster.GetLightDatas());
            cmd.SetGlobalVector(HDShaderIDs._MinClusterPos, lightCluster.GetMinClusterPos());
            cmd.SetGlobalVector(HDShaderIDs._MaxClusterPos, lightCluster.GetMaxClusterPos());
            cmd.SetGlobalInt(HDShaderIDs._LightPerCellCount, rtEnvironement.maxNumLightsPercell);
            cmd.SetGlobalInt(HDShaderIDs._PunctualLightCountRT, lightCluster.GetPunctualLightCount());
            cmd.SetGlobalInt(HDShaderIDs._AreaLightCountRT, lightCluster.GetAreaLightCount());

            // Set the data for the ray miss
            cmd.SetRaytracingTextureParam(indirectDiffuseShader, m_MissShaderName, HDShaderIDs._SkyTexture, m_SkyManager.skyReflection);

            // Compute the actual resolution that is needed base on the quality
            int widthResolution  = hdCamera.actualWidth;
            int heightResolution = hdCamera.actualHeight;

            // Run the calculus
            CoreUtils.SetKeyword(cmd, "DIFFUSE_LIGHTING_ONLY", true);
            cmd.DispatchRays(indirectDiffuseShader, targetRayGen, (uint)widthResolution, (uint)heightResolution, 1);
            CoreUtils.SetKeyword(cmd, "DIFFUSE_LIGHTING_ONLY", false);

            switch (rtEnvironement.indirectDiffuseFilterMode)
            {
            case HDRaytracingEnvironment.IndirectDiffuseFilterMode.SpatioTemporal:
            {
                // Grab the history buffer
                RTHandleSystem.RTHandle indirectDiffuseHistory = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.RaytracedIndirectDiffuse)
                                                                 ?? hdCamera.AllocHistoryFrameRT((int)HDCameraFrameHistoryType.RaytracedIndirectDiffuse, IndirectDiffuseHistoryBufferAllocatorFunction, 1);

                // Texture dimensions
                int texWidth  = hdCamera.actualWidth;
                int texHeight = hdCamera.actualHeight;

                // Evaluate the dispatch parameters
                int areaTileSize = 8;
                int numTilesX    = (texWidth + (areaTileSize - 1)) / areaTileSize;
                int numTilesY    = (texHeight + (areaTileSize - 1)) / areaTileSize;

                int m_KernelFilter = indirectDiffuseAccumulation.FindKernel("RaytracingIndirectDiffuseTAA");

                // Compute the combined TAA frame
                var historyScale = new Vector2(hdCamera.actualWidth / (float)indirectDiffuseHistory.rt.width, hdCamera.actualHeight / (float)indirectDiffuseHistory.rt.height);
                cmd.SetComputeVectorParam(indirectDiffuseAccumulation, HDShaderIDs._ScreenToTargetScaleHistory, historyScale);
                cmd.SetComputeTextureParam(indirectDiffuseAccumulation, m_KernelFilter, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthStencilBuffer());
                cmd.SetComputeTextureParam(indirectDiffuseAccumulation, m_KernelFilter, HDShaderIDs._DenoiseInputTexture, m_IndirectDiffuseTexture);
                cmd.SetComputeTextureParam(indirectDiffuseAccumulation, m_KernelFilter, HDShaderIDs._DenoiseOutputTextureRW, m_DenoiseBuffer0);
                cmd.SetComputeTextureParam(indirectDiffuseAccumulation, m_KernelFilter, HDShaderIDs._IndirectDiffuseHistorybufferRW, indirectDiffuseHistory);
                cmd.DispatchCompute(indirectDiffuseAccumulation, m_KernelFilter, numTilesX, numTilesY, 1);

                // Output the new history
                HDUtils.BlitCameraTexture(cmd, hdCamera, m_DenoiseBuffer0, indirectDiffuseHistory);

                m_KernelFilter = indirectDiffuseAccumulation.FindKernel("IndirectDiffuseFilterH");

                // Horizontal pass of the bilateral filter
                cmd.SetComputeIntParam(indirectDiffuseAccumulation, HDShaderIDs._RaytracingDenoiseRadius, rtEnvironement.indirectDiffuseFilterRadius);
                cmd.SetComputeTextureParam(indirectDiffuseAccumulation, m_KernelFilter, HDShaderIDs._DenoiseInputTexture, indirectDiffuseHistory);
                cmd.SetComputeTextureParam(indirectDiffuseAccumulation, m_KernelFilter, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthStencilBuffer());
                cmd.SetComputeTextureParam(indirectDiffuseAccumulation, m_KernelFilter, HDShaderIDs._NormalBufferTexture, m_SharedRTManager.GetNormalBuffer());
                cmd.SetComputeTextureParam(indirectDiffuseAccumulation, m_KernelFilter, HDShaderIDs._DenoiseOutputTextureRW, m_DenoiseBuffer0);
                cmd.DispatchCompute(indirectDiffuseAccumulation, m_KernelFilter, numTilesX, numTilesY, 1);

                m_KernelFilter = indirectDiffuseAccumulation.FindKernel("IndirectDiffuseFilterV");

                // Horizontal pass of the bilateral filter
                cmd.SetComputeIntParam(indirectDiffuseAccumulation, HDShaderIDs._RaytracingDenoiseRadius, rtEnvironement.indirectDiffuseFilterRadius);
                cmd.SetComputeTextureParam(indirectDiffuseAccumulation, m_KernelFilter, HDShaderIDs._DenoiseInputTexture, m_DenoiseBuffer0);
                cmd.SetComputeTextureParam(indirectDiffuseAccumulation, m_KernelFilter, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthStencilBuffer());
                cmd.SetComputeTextureParam(indirectDiffuseAccumulation, m_KernelFilter, HDShaderIDs._NormalBufferTexture, m_SharedRTManager.GetNormalBuffer());
                cmd.SetComputeTextureParam(indirectDiffuseAccumulation, m_KernelFilter, HDShaderIDs._DenoiseOutputTextureRW, m_IndirectDiffuseTexture);
                cmd.DispatchCompute(indirectDiffuseAccumulation, m_KernelFilter, numTilesX, numTilesY, 1);
            }
            break;
            }

            // If we are in deferred mode, we need to make sure to add the indirect diffuse (that we intentionally ignored during the gbuffer pass)
            // Note that this discards the texture/object ambient occlusion. But we consider that okay given that the raytraced indirect diffuse
            // is a physically correct evaluation of that quantity
            if (hdCamera.frameSettings.litShaderMode == LitShaderMode.Deferred)
            {
                int indirectDiffuseKernel = indirectDiffuseAccumulation.FindKernel("IndirectDiffuseAccumulation");

                // Bind the source texture
                cmd.SetComputeTextureParam(indirectDiffuseAccumulation, indirectDiffuseKernel, HDShaderIDs._IndirectDiffuseTexture, m_IndirectDiffuseTexture);

                // Bind the output texture
                cmd.SetComputeTextureParam(indirectDiffuseAccumulation, indirectDiffuseKernel, HDShaderIDs._GBufferTexture[0], m_GBufferManager.GetBuffer(0));
                cmd.SetComputeTextureParam(indirectDiffuseAccumulation, indirectDiffuseKernel, HDShaderIDs._GBufferTexture[3], m_GBufferManager.GetBuffer(3));

                // Evaluate the dispatch parameters
                int areaTileSize = 8;
                int numTilesX    = (widthResolution + (areaTileSize - 1)) / areaTileSize;
                int numTilesY    = (heightResolution + (areaTileSize - 1)) / areaTileSize;

                // Add the indirect diffuse to the gbuffer
                cmd.DispatchCompute(indirectDiffuseAccumulation, indirectDiffuseKernel, numTilesX, numTilesY, 1);
            }

            return(true);
        }
Пример #12
0
        public void RenderShadows(ScriptableRenderContext renderContext, CommandBuffer cmd, CullingResults cullResults, HDCamera hdCamera)
        {
            // Avoid to do any commands if there is no shadow to draw
            if (m_ShadowRequestCount == 0)
            {
                return;
            }

            // TODO remove DrawShadowSettings, lightIndex and splitData when scriptable culling is available
            ShadowDrawingSettings dss = new ShadowDrawingSettings(cullResults, 0);

            dss.useRenderingLayerMaskTest = hdCamera.frameSettings.IsEnabled(FrameSettingsField.LightLayers);

            // Clear atlas render targets and draw shadows
            using (new ProfilingSample(cmd, "Punctual Lights Shadows rendering", CustomSamplerId.RenderShadows.GetSampler()))
            {
                m_Atlas.RenderShadows(renderContext, cmd, dss);
            }

            using (new ProfilingSample(cmd, "Directional Light Shadows rendering", CustomSamplerId.RenderShadows.GetSampler()))
            {
                m_CascadeAtlas.RenderShadows(renderContext, cmd, dss);
            }

            using (new ProfilingSample(cmd, "Area Light Shadows rendering", CustomSamplerId.RenderShadows.GetSampler()))
            {
                m_AreaLightShadowAtlas.RenderShadows(renderContext, cmd, dss);
                if (m_AreaLightShadowAtlas.HasBlurredEVSM())
                {
                    m_AreaLightShadowAtlas.AreaShadowBlurMoments(cmd, hdCamera);
                }
            }

            // If the shadow algorithm is the improved moment shadow
            if (GetDirectionaShadowAlgorithm() == DirectionalShadowAlgorithm.IMS)
            {
                m_CascadeAtlas.ComputeMomentShadows(cmd, hdCamera);
            }
        }
        public void Render(HDCamera hdCamera, CommandBuffer cmd, RTHandleSystem.RTHandle outputTexture, ScriptableRenderContext renderContext, CullingResults cull)
        {
            // First thing to check is: Do we have a valid ray-tracing environment?
            HDRaytracingEnvironment rtEnvironment  = m_RaytracingManager.CurrentEnvironment();
            HDRenderPipeline        renderPipeline = m_RaytracingManager.GetRenderPipeline();
            RaytracingShader        forwardShader  = m_PipelineAsset.renderPipelineRayTracingResources.forwardRaytracing;
            Shader raytracingMask = m_PipelineAsset.renderPipelineRayTracingResources.raytracingFlagMask;

            RecursiveRendering recursiveSettings    = VolumeManager.instance.stack.GetComponent <RecursiveRendering>();
            LightCluster       lightClusterSettings = VolumeManager.instance.stack.GetComponent <LightCluster>();

            // Check the validity of the state before computing the effect
            bool invalidState = rtEnvironment == null || !recursiveSettings.enable.value ||
                                forwardShader == null || raytracingMask == null ||
                                m_PipelineResources.textures.owenScrambledTex == null || m_PipelineResources.textures.scramblingTex == null;

            // If any resource or game-object is missing We stop right away
            if (invalidState)
            {
                return;
            }

            // Grab the acceleration structure and the list of HD lights for the target camera
            RaytracingAccelerationStructure accelerationStructure = m_RaytracingManager.RequestAccelerationStructure(rtEnvironment.raytracedLayerMask);
            HDRaytracingLightCluster        lightCluster          = m_RaytracingManager.RequestLightCluster(rtEnvironment.raytracedLayerMask);

            if (m_RaytracingFlagMaterial == null)
            {
                m_RaytracingFlagMaterial = CoreUtils.CreateEngineMaterial(raytracingMask);
            }

            // Before going into raytracing, we need to flag which pixels needs to be raytracing
            EvaluateRaytracingMask(cull, hdCamera, cmd, renderContext);

            // Define the shader pass to use for the reflection pass
            cmd.SetRaytracingShaderPass(forwardShader, "ForwardDXR");

            // Set the acceleration structure for the pass
            cmd.SetRaytracingAccelerationStructure(forwardShader, HDShaderIDs._RaytracingAccelerationStructureName, accelerationStructure);

            // Inject the ray-tracing sampling data
            cmd.SetRaytracingTextureParam(forwardShader, HDShaderIDs._OwenScrambledTexture, m_PipelineResources.textures.owenScrambledTex);
            cmd.SetRaytracingTextureParam(forwardShader, HDShaderIDs._ScramblingTexture, m_PipelineResources.textures.scramblingTex);

            // Inject the ray generation data
            cmd.SetGlobalFloat(HDShaderIDs._RaytracingRayBias, rtEnvironment.rayBias);
            cmd.SetGlobalFloat(HDShaderIDs._RaytracingRayMaxLength, recursiveSettings.rayLength.value);
            cmd.SetGlobalFloat(HDShaderIDs._RaytracingMaxRecursion, recursiveSettings.maxDepth.value);
            cmd.SetGlobalFloat(HDShaderIDs._RaytracingCameraNearPlane, hdCamera.camera.nearClipPlane);

            // Set the data for the ray generation
            cmd.SetRaytracingTextureParam(forwardShader, HDShaderIDs._RaytracingFlagMask, m_RaytracingFlagTarget);
            cmd.SetRaytracingTextureParam(forwardShader, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthStencilBuffer());
            cmd.SetRaytracingTextureParam(forwardShader, HDShaderIDs._CameraColorTextureRW, outputTexture);

            // Compute an approximate pixel spread angle value (in radians)
            float pixelSpreadAngle = hdCamera.camera.fieldOfView * (Mathf.PI / 180.0f) / Mathf.Min(hdCamera.actualWidth, hdCamera.actualHeight);

            cmd.SetGlobalFloat(HDShaderIDs._RaytracingPixelSpreadAngle, pixelSpreadAngle);

            // LightLoop data
            cmd.SetGlobalBuffer(HDShaderIDs._RaytracingLightCluster, lightCluster.GetCluster());
            cmd.SetGlobalBuffer(HDShaderIDs._LightDatasRT, lightCluster.GetLightDatas());
            cmd.SetGlobalVector(HDShaderIDs._MinClusterPos, lightCluster.GetMinClusterPos());
            cmd.SetGlobalVector(HDShaderIDs._MaxClusterPos, lightCluster.GetMaxClusterPos());
            cmd.SetGlobalInt(HDShaderIDs._LightPerCellCount, lightClusterSettings.maxNumLightsPercell.value);
            cmd.SetGlobalInt(HDShaderIDs._PunctualLightCountRT, lightCluster.GetPunctualLightCount());
            cmd.SetGlobalInt(HDShaderIDs._AreaLightCountRT, lightCluster.GetAreaLightCount());

            // Note: Just in case, we rebind the directional light data (in case they were not)
            cmd.SetGlobalBuffer(HDShaderIDs._DirectionalLightDatas, renderPipeline.directionalLightDatas);
            cmd.SetGlobalInt(HDShaderIDs._DirectionalLightCount, renderPipeline.m_lightList.directionalLights.Count);

            // Set the data for the ray miss
            cmd.SetRaytracingTextureParam(forwardShader, HDShaderIDs._SkyTexture, m_SkyManager.skyReflection);

            // If this is the right debug mode and we have at least one light, write the first shadow to the denoise texture
            HDRenderPipeline hdrp = (RenderPipelineManager.currentPipeline as HDRenderPipeline);

            cmd.SetRaytracingTextureParam(forwardShader, HDShaderIDs._RaytracingPrimaryDebug, m_DebugRaytracingTexture);
            hdrp.PushFullScreenDebugTexture(hdCamera, cmd, m_DebugRaytracingTexture, FullScreenDebugMode.PrimaryVisibility);

            // Run the computation
            cmd.DispatchRays(forwardShader, m_RayGenShaderName, (uint)hdCamera.actualWidth, (uint)hdCamera.actualHeight, 1);
        }
Пример #14
0
        public void ResizeVBufferAndUpdateProperties(HDCamera camera, uint frameIndex)
        {
            if (preset == VolumetricLightingPreset.Off)
            {
                return;
            }
            var visualEnvironment = VolumeManager.instance.stack.GetComponent <VisualEnvironment>();

            if (visualEnvironment == null || visualEnvironment.fogType != FogType.Volumetric)
            {
                return;
            }

            var controller = camera.camera.GetComponent <VolumetricLightingController>();

            if (camera.camera.cameraType == CameraType.SceneView)
            {
                // HACK: since it's not possible to add a component to a scene camera,
                // we take one from the "main" camera (if present).
                Camera mainCamera = Camera.main;

                if (mainCamera != null)
                {
                    controller = mainCamera.GetComponent <VolumetricLightingController>();
                }
            }

            if (controller == null)
            {
                return;
            }

            int  screenWidth  = (int)camera.screenSize.x;
            int  screenHeight = (int)camera.screenSize.y;
            long viewID       = camera.GetViewID();

            Debug.Assert(viewID > 0);

            int w = 0, h = 0, d = 0;

            ComputeVBufferResolutionAndScale(preset, screenWidth, screenHeight, ref w, ref h, ref d);

            VBuffer vBuffer = FindVBuffer(viewID);

            if (vBuffer != null)
            {
                VBuffer.Parameters frameParams = vBuffer.GetParameters(frameIndex);

                // Found, check resolution.
                if (w == frameParams.resolution.x &&
                    h == frameParams.resolution.y &&
                    d == frameParams.sliceCount.x)
                {
                    // The resolution matches.
                    // Depth parameters may have changed, so update those.
                    frameParams.Update(controller.parameters);
                    vBuffer.SetParameters(frameParams, frameIndex);

                    return;
                }
            }
            else
            {
                // Not found - grow the array.
                vBuffer = new VBuffer();
                m_VBuffers.Add(vBuffer);
            }

            vBuffer.Create(viewID, w, h, d, controller.parameters);
        }
Пример #15
0
        public DensityVolumeList PrepareVisibleDensityVolumeList(HDCamera hdCamera, CommandBuffer cmd)
        {
            DensityVolumeList densityVolumes = new DensityVolumeList();

            if (preset == VolumetricLightingPreset.Off)
            {
                return(densityVolumes);
            }

            var visualEnvironment = VolumeManager.instance.stack.GetComponent <VisualEnvironment>();

            if (visualEnvironment.fogType != FogType.Volumetric)
            {
                return(densityVolumes);
            }

            using (new ProfilingSample(cmd, "Prepare Visible Density Volume List"))
            {
                Vector3 camPosition = hdCamera.camera.transform.position;
                Vector3 camOffset   = Vector3.zero; // World-origin-relative

                if (ShaderConfig.s_CameraRelativeRendering != 0)
                {
                    camOffset = camPosition; // Camera-relative
                }

                m_VisibleVolumeBounds.Clear();
                m_VisibleVolumeData.Clear();

                // Collect all visible finite volume data, and upload it to the GPU.
                DensityVolume[] volumes = DensityVolumeManager.manager.PrepareDensityVolumeData(cmd);

                for (int i = 0; i < Math.Min(volumes.Length, k_MaxVisibleVolumeCount); i++)
                {
                    DensityVolume volume = volumes[i];

                    // TODO: cache these?
                    var obb = OrientedBBox.Create(volume.transform);

                    // Handle camera-relative rendering.
                    obb.center -= camOffset;

                    // Frustum cull on the CPU for now. TODO: do it on the GPU.
                    // TODO: account for custom near and far planes of the V-Buffer's frustum.
                    // It's typically much shorter (along the Z axis) than the camera's frustum.
                    if (GeometryUtils.Overlap(obb, hdCamera.frustum, 6, 8))
                    {
                        // TODO: cache these?
                        var data = volume.parameters.GetData();

                        m_VisibleVolumeBounds.Add(obb);
                        m_VisibleVolumeData.Add(data);
                    }
                }

                s_VisibleVolumeBoundsBuffer.SetData(m_VisibleVolumeBounds);
                s_VisibleVolumeDataBuffer.SetData(m_VisibleVolumeData);

                // Fill the struct with pointers in order to share the data with the light loop.
                densityVolumes.bounds  = m_VisibleVolumeBounds;
                densityVolumes.density = m_VisibleVolumeData;

                return(densityVolumes);
            }
        }
        // Combines specular lighting and diffuse lighting with subsurface scattering.
        // In the case our frame is MSAA, for the moment given the fact that we do not have read/write access to the stencil buffer of the MSAA target; we need to keep this pass MSAA
        // However, the compute can't output and MSAA target so we blend the non-MSAA target into the MSAA one.
        public void SubsurfaceScatteringPass(HDCamera hdCamera, CommandBuffer cmd, RTHandleSystem.RTHandle colorBufferRT,
                                             RTHandleSystem.RTHandle diffuseBufferRT, RTHandleSystem.RTHandle depthStencilBufferRT, RTHandleSystem.RTHandle depthTextureRT)
        {
            if (!hdCamera.frameSettings.IsEnabled(FrameSettingsField.SubsurfaceScattering))
            {
                return;
            }

            // TODO: For MSAA, at least initially, we can only support Jimenez, because we can't
            // create MSAA + UAV render targets.

            using (new ProfilingSample(cmd, "Subsurface Scattering", CustomSamplerId.SubsurfaceScattering.GetSampler()))
            {
                // For Jimenez we always need an extra buffer, for Disney it depends on platform
                if (NeedTemporarySubsurfaceBuffer() || hdCamera.frameSettings.IsEnabled(FrameSettingsField.MSAA))
                {
                    // Clear the SSS filtering target
                    using (new ProfilingSample(cmd, "Clear SSS filtering target", CustomSamplerId.ClearSSSFilteringTarget.GetSampler()))
                    {
                        HDUtils.SetRenderTarget(cmd, m_CameraFilteringBuffer, ClearFlag.Color, Color.clear);
                    }
                }

                using (new ProfilingSample(cmd, "HTile for SSS", CustomSamplerId.HTileForSSS.GetSampler()))
                {
                    // Currently, Unity does not offer a way to access the GCN HTile even on PS4 and Xbox One.
                    // Therefore, it's computed in a pixel shader, and optimized to only contain the SSS bit.

                    // Clear the HTile texture. TODO: move this to ClearBuffers(). Clear operations must be batched!
                    HDUtils.SetRenderTarget(cmd, m_HTile, ClearFlag.Color, Color.clear);

                    HDUtils.SetRenderTarget(cmd, depthStencilBufferRT); // No need for color buffer here
                    cmd.SetRandomWriteTarget(1, m_HTile);               // This need to be done AFTER SetRenderTarget
                    // Generate HTile for the split lighting stencil usage. Don't write into stencil texture (shaderPassId = 2)
                    // Use ShaderPassID 1 => "Pass 2 - Export HTILE for stencilRef to output"
                    CoreUtils.DrawFullScreen(cmd, m_CopyStencilForSplitLighting, null, 2);
                    cmd.ClearRandomWriteTargets();
                }

                unsafe
                {
                    // Warning: Unity is not able to losslessly transfer integers larger than 2^24 to the shader system.
                    // Therefore, we bitcast uint to float in C#, and bitcast back to uint in the shader.
                    uint texturingModeFlags = this.texturingModeFlags;
                    cmd.SetComputeFloatParam(m_SubsurfaceScatteringCS, HDShaderIDs._TexturingModeFlags, *(float *)&texturingModeFlags);
                }

                cmd.SetComputeVectorArrayParam(m_SubsurfaceScatteringCS, HDShaderIDs._WorldScales, worldScales);
                cmd.SetComputeVectorArrayParam(m_SubsurfaceScatteringCS, HDShaderIDs._FilterKernels, filterKernels);
                cmd.SetComputeVectorArrayParam(m_SubsurfaceScatteringCS, HDShaderIDs._ShapeParams, shapeParams);
                cmd.SetComputeFloatParams(m_SubsurfaceScatteringCS, HDShaderIDs._DiffusionProfileHashTable, diffusionProfileHashes);

                int sssKernel = hdCamera.frameSettings.IsEnabled(FrameSettingsField.MSAA) ? m_SubsurfaceScatteringKernelMSAA : m_SubsurfaceScatteringKernel;

                cmd.SetComputeTextureParam(m_SubsurfaceScatteringCS, sssKernel, HDShaderIDs._DepthTexture, depthTextureRT);
                cmd.SetComputeTextureParam(m_SubsurfaceScatteringCS, sssKernel, HDShaderIDs._SSSHTile, m_HTile);
                cmd.SetComputeTextureParam(m_SubsurfaceScatteringCS, sssKernel, HDShaderIDs._IrradianceSource, diffuseBufferRT);

                for (int i = 0; i < sssBufferCount; ++i)
                {
                    cmd.SetComputeTextureParam(m_SubsurfaceScatteringCS, sssKernel, HDShaderIDs._SSSBufferTexture[i], GetSSSBuffer(i));
                }

                int numTilesX = ((int)hdCamera.screenSize.x + 15) / 16;
                int numTilesY = ((int)hdCamera.screenSize.y + 15) / 16;
                int numTilesZ = hdCamera.viewCount;

                if (NeedTemporarySubsurfaceBuffer() || hdCamera.frameSettings.IsEnabled(FrameSettingsField.MSAA))
                {
                    cmd.SetComputeTextureParam(m_SubsurfaceScatteringCS, sssKernel, HDShaderIDs._CameraFilteringBuffer, m_CameraFilteringBuffer);

                    // Perform the SSS filtering pass which fills 'm_CameraFilteringBufferRT'.
                    cmd.DispatchCompute(m_SubsurfaceScatteringCS, sssKernel, numTilesX, numTilesY, numTilesZ);

                    cmd.SetGlobalTexture(HDShaderIDs._IrradianceSource, m_CameraFilteringBuffer);  // Cannot set a RT on a material

                    // Additively blend diffuse and specular lighting into 'm_CameraColorBufferRT'.
                    HDUtils.DrawFullScreen(cmd, m_CombineLightingPass, colorBufferRT, depthStencilBufferRT);
                }
                else
                {
                    cmd.SetComputeTextureParam(m_SubsurfaceScatteringCS, m_SubsurfaceScatteringKernel, HDShaderIDs._CameraColorTexture, colorBufferRT);

                    // Perform the SSS filtering pass which performs an in-place update of 'colorBuffer'.
                    cmd.DispatchCompute(m_SubsurfaceScatteringCS, m_SubsurfaceScatteringKernel, numTilesX, numTilesY, numTilesZ);
                }
            }
        }
Пример #17
0
        public void VolumeVoxelizationPass(HDCamera hdCamera, CommandBuffer cmd, uint frameIndex, DensityVolumeList densityVolumes)
        {
            if (preset == VolumetricLightingPreset.Off)
            {
                return;
            }

            var visualEnvironment = VolumeManager.instance.stack.GetComponent <VisualEnvironment>();

            if (visualEnvironment.fogType != FogType.Volumetric)
            {
                return;
            }

            using (new ProfilingSample(cmd, "Volume Voxelization"))
            {
                int numVisibleVolumes = m_VisibleVolumeBounds.Count;

                if (numVisibleVolumes == 0)
                {
                    // Clear the render target instead of running the shader.
                    // Note: the clear must take the global fog into account!
                    // CoreUtils.SetRenderTarget(cmd, vBuffer.GetDensityBuffer(), ClearFlag.Color, CoreUtils.clearColorAllBlack);
                    // return;

                    // Clearing 3D textures does not seem to work!
                    // Use the workaround by running the full shader with 0 density
                }

                bool enableClustered = hdCamera.frameSettings.lightLoopSettings.enableTileAndCluster;

                int kernel = m_VolumeVoxelizationCS.FindKernel(enableClustered ? "VolumeVoxelizationClustered"
                                                                           : "VolumeVoxelizationBruteforce");

                var     frameParams = hdCamera.vBufferParams[0];
                Vector4 resolution  = frameParams.resolution;
                float   vFoV        = hdCamera.camera.fieldOfView * Mathf.Deg2Rad;

                // Compose the matrix which allows us to compute the world space view direction.
                Matrix4x4 transform = HDUtils.ComputePixelCoordToWorldSpaceViewDirectionMatrix(vFoV, resolution, hdCamera.viewMatrix, false);

                Texture3D volumeAtlas           = DensityVolumeManager.manager.volumeAtlas.volumeAtlas;
                Vector2   volumeAtlasDimensions = new Vector2(0.0f, 0.0f);

                if (volumeAtlas != null)
                {
                    volumeAtlasDimensions.x = volumeAtlas.width / volumeAtlas.depth; // 1 / number of textures
                    volumeAtlasDimensions.y = 1.0f / volumeAtlas.width;
                }

                cmd.SetComputeTextureParam(m_VolumeVoxelizationCS, kernel, HDShaderIDs._VBufferDensity, m_DensityBufferHandle);
                cmd.SetComputeBufferParam(m_VolumeVoxelizationCS, kernel, HDShaderIDs._VolumeBounds, s_VisibleVolumeBoundsBuffer);
                cmd.SetComputeBufferParam(m_VolumeVoxelizationCS, kernel, HDShaderIDs._VolumeData, s_VisibleVolumeDataBuffer);
                cmd.SetComputeTextureParam(m_VolumeVoxelizationCS, kernel, HDShaderIDs._VolumeMaskAtlas, volumeAtlas);

                // TODO: set the constant buffer data only once.
                cmd.SetComputeMatrixParam(m_VolumeVoxelizationCS, HDShaderIDs._VBufferCoordToViewDirWS, transform);
                cmd.SetComputeIntParam(m_VolumeVoxelizationCS, HDShaderIDs._NumVisibleDensityVolumes, numVisibleVolumes);
                cmd.SetComputeVectorParam(m_VolumeVoxelizationCS, HDShaderIDs._VolumeMaskDimensions, volumeAtlasDimensions);

                int w = (int)resolution.x;
                int h = (int)resolution.y;

                // The shader defines GROUP_SIZE_1D = 8.
                cmd.DispatchCompute(m_VolumeVoxelizationCS, kernel, (w + 7) / 8, (h + 7) / 8, 1);
            }
        }
Пример #18
0
        // Must return the first executed shadow request
        public int UpdateShadowRequest(HDCamera hdCamera, HDShadowManager manager, VisibleLight visibleLight, CullingResults cullResults, int lightIndex, out int shadowRequestCount)
        {
            int     firstShadowRequestIndex = -1;
            Vector3 cameraPos = hdCamera.camera.transform.position;

            shadowRequestCount = 0;

            int count = GetShadowRequestCount();

            for (int index = 0; index < count; index++)
            {
                var       shadowRequest      = shadowRequests[index];
                Matrix4x4 invViewProjection  = Matrix4x4.identity;
                int       shadowRequestIndex = m_ShadowRequestIndices[index];
                Vector2   viewportSize       = manager.GetReservedResolution(shadowRequestIndex);

                if (shadowRequestIndex == -1)
                {
                    continue;
                }

                // Write per light type matrices, splitDatas and culling parameters
                switch (m_Light.type)
                {
                case LightType.Point:
                    HDShadowUtils.ExtractPointLightData(
                        hdCamera, m_Light.type, visibleLight, viewportSize, shadowNearPlane,
                        m_ShadowData.normalBiasMax, (uint)index, out shadowRequest.view,
                        out invViewProjection, out shadowRequest.projection,
                        out shadowRequest.deviceProjection, out shadowRequest.splitData
                        );
                    break;

                case LightType.Spot:
                    HDShadowUtils.ExtractSpotLightData(
                        hdCamera, m_Light.type, spotLightShape, shadowNearPlane, aspectRatio, shapeWidth,
                        shapeHeight, visibleLight, viewportSize, m_ShadowData.normalBiasMax,
                        out shadowRequest.view, out invViewProjection, out shadowRequest.projection,
                        out shadowRequest.deviceProjection, out shadowRequest.splitData
                        );
                    break;

                case LightType.Directional:
                    Vector4 cullingSphere;
                    float   nearPlaneOffset = QualitySettings.shadowNearPlaneOffset;

                    HDShadowUtils.ExtractDirectionalLightData(
                        visibleLight, viewportSize, (uint)index, m_ShadowSettings.cascadeShadowSplitCount,
                        m_ShadowSettings.cascadeShadowSplits, nearPlaneOffset, cullResults, lightIndex,
                        out shadowRequest.view, out invViewProjection, out shadowRequest.projection,
                        out shadowRequest.deviceProjection, out shadowRequest.splitData
                        );

                    cullingSphere = shadowRequest.splitData.cullingSphere;

                    // Camera relative for directional light culling sphere
                    if (ShaderConfig.s_CameraRelativeRendering != 0)
                    {
                        cullingSphere.x -= cameraPos.x;
                        cullingSphere.y -= cameraPos.y;
                        cullingSphere.z -= cameraPos.z;
                    }

                    manager.UpdateCascade(index, cullingSphere, m_ShadowSettings.cascadeShadowBorders[index]);
                    break;

                case LightType.Area:
                    HDShadowUtils.ExtractAreaLightData(visibleLight, lightTypeExtent, out shadowRequest.view, out invViewProjection, out shadowRequest.projection, out shadowRequest.deviceProjection, out shadowRequest.splitData);
                    break;
                }

                // Assign all setting common to every lights
                SetCommonShadowRequestSettings(shadowRequest, cameraPos, invViewProjection, viewportSize, lightIndex);

                manager.UpdateShadowRequest(shadowRequestIndex, shadowRequest);

                // Store the first shadow request id to return it
                if (firstShadowRequestIndex == -1)
                {
                    firstShadowRequestIndex = shadowRequestIndex;
                }

                shadowRequestCount++;
            }

            return(firstShadowRequestIndex);
        }
Пример #19
0
 public override void PushShaderParameters(HDCamera hdCamera, CommandBuffer cmd)
 {
     PushShaderParametersCommon(hdCamera, cmd, FogType.Linear);
     cmd.SetGlobalVector(m_LinearFogParam, new Vector4(fogStart, 1.0f / (fogEnd - fogStart), fogHeightEnd, 1.0f / (fogHeightEnd - fogHeightStart)));
 }
Пример #20
0
        public void RenderLightVolumes(CommandBuffer cmd, HDCamera hdCamera, CullingResults cullResults, LightingDebugSettings lightDebugSettings)
        {
            // Clear the buffers
            HDUtils.SetRenderTarget(cmd, hdCamera, m_ColorAccumulationBuffer, ClearFlag.Color, Color.black);
            HDUtils.SetRenderTarget(cmd, hdCamera, m_LightCountBuffer, ClearFlag.Color, Color.black);
            HDUtils.SetRenderTarget(cmd, hdCamera, m_DebugLightVolumesTexture, ClearFlag.Color, Color.black);

            // Set the render target array
            cmd.SetRenderTarget(m_RTIDs, m_DepthBuffer);

            // First of all let's do the regions for the light sources (we only support Punctual and Area)
            int numLights = cullResults.visibleLights.Length;

            for (int lightIdx = 0; lightIdx < numLights; ++lightIdx)
            {
                // Let's build the light's bounding sphere matrix
                Light currentLegacyLight = cullResults.visibleLights[lightIdx].light;
                if (currentLegacyLight == null)
                {
                    continue;
                }
                HDAdditionalLightData currentHDRLight = currentLegacyLight.GetComponent <HDAdditionalLightData>();
                if (currentHDRLight == null)
                {
                    continue;
                }

                Matrix4x4 positionMat = Matrix4x4.Translate(currentLegacyLight.transform.position);

                if (currentLegacyLight.type == LightType.Point || currentLegacyLight.type == LightType.Area)
                {
                    m_MaterialProperty.SetVector(_RangeShaderID, new Vector3(currentLegacyLight.range, currentLegacyLight.range, currentLegacyLight.range));
                    switch (currentHDRLight.lightTypeExtent)
                    {
                    case LightTypeExtent.Punctual:
                    {
                        m_MaterialProperty.SetColor(_ColorShaderID, new Color(0.0f, 0.5f, 0.0f, 1.0f));
                        m_MaterialProperty.SetVector(_OffsetShaderID, new Vector3(0, 0, 0));
                        cmd.DrawMesh(DebugShapes.instance.RequestSphereMesh(), positionMat, m_DebugLightVolumeMaterial, 0, 0, m_MaterialProperty);
                    }
                    break;

                    case LightTypeExtent.Rectangle:
                    {
                        m_MaterialProperty.SetColor(_ColorShaderID, new Color(0.0f, 1.0f, 1.0f, 1.0f));
                        m_MaterialProperty.SetVector(_OffsetShaderID, new Vector3(0, 0, 0));
                        cmd.DrawMesh(DebugShapes.instance.RequestSphereMesh(), positionMat, m_DebugLightVolumeMaterial, 0, 0, m_MaterialProperty);
                    }
                    break;

                    case LightTypeExtent.Tube:
                    {
                        m_MaterialProperty.SetColor(_ColorShaderID, new Color(1.0f, 0.0f, 0.5f, 1.0f));
                        m_MaterialProperty.SetVector(_OffsetShaderID, new Vector3(0, 0, 0));
                        cmd.DrawMesh(DebugShapes.instance.RequestSphereMesh(), positionMat, m_DebugLightVolumeMaterial, 0, 0, m_MaterialProperty);
                    }
                    break;

                    default:
                        break;
                    }
                }
                else if (currentLegacyLight.type == LightType.Spot)
                {
                    if (currentHDRLight.spotLightShape == SpotLightShape.Cone)
                    {
                        float bottomRadius = Mathf.Tan(currentLegacyLight.spotAngle * Mathf.PI / 360.0f) * currentLegacyLight.range;
                        m_MaterialProperty.SetColor(_ColorShaderID, new Color(1.0f, 0.5f, 0.0f, 1.0f));
                        m_MaterialProperty.SetVector(_RangeShaderID, new Vector3(bottomRadius, bottomRadius, currentLegacyLight.range));
                        m_MaterialProperty.SetVector(_OffsetShaderID, new Vector3(0, 0, 0));
                        cmd.DrawMesh(DebugShapes.instance.RequestConeMesh(), currentLegacyLight.gameObject.transform.localToWorldMatrix, m_DebugLightVolumeMaterial, 0, 0, m_MaterialProperty);
                    }
                    else if (currentHDRLight.spotLightShape == SpotLightShape.Box)
                    {
                        m_MaterialProperty.SetColor(_ColorShaderID, new Color(1.0f, 0.5f, 0.0f, 1.0f));
                        m_MaterialProperty.SetVector(_RangeShaderID, new Vector3(currentHDRLight.shapeWidth, currentHDRLight.shapeHeight, currentLegacyLight.range));
                        m_MaterialProperty.SetVector(_OffsetShaderID, new Vector3(0, 0, currentLegacyLight.range / 2.0f));
                        cmd.DrawMesh(DebugShapes.instance.RequestBoxMesh(), currentLegacyLight.gameObject.transform.localToWorldMatrix, m_DebugLightVolumeMaterial, 0, 0, m_MaterialProperty);
                    }
                    else if (currentHDRLight.spotLightShape == SpotLightShape.Pyramid)
                    {
                        float bottomWidth = Mathf.Tan(currentLegacyLight.spotAngle * Mathf.PI / 360.0f) * currentLegacyLight.range;
                        m_MaterialProperty.SetColor(_ColorShaderID, new Color(1.0f, 0.5f, 0.0f, 1.0f));
                        m_MaterialProperty.SetVector(_RangeShaderID, new Vector3(currentHDRLight.aspectRatio * bottomWidth * 2, bottomWidth * 2, currentLegacyLight.range));
                        m_MaterialProperty.SetVector(_OffsetShaderID, new Vector3(0, 0, 0));
                        cmd.DrawMesh(DebugShapes.instance.RequestPyramidMesh(), currentLegacyLight.gameObject.transform.localToWorldMatrix, m_DebugLightVolumeMaterial, 0, 0, m_MaterialProperty);
                    }
                }
            }

            // Now let's do the same but for reflection probes
            int numProbes = cullResults.visibleReflectionProbes.Length;

            for (int probeIdx = 0; probeIdx < numProbes; ++probeIdx)
            {
                // Let's build the light's bounding sphere matrix
                ReflectionProbe            currentLegacyProbe = cullResults.visibleReflectionProbes[probeIdx].reflectionProbe;
                HDAdditionalReflectionData currentHDProbe     = currentLegacyProbe.GetComponent <HDAdditionalReflectionData>();

                if (!currentHDProbe)
                {
                    continue;
                }

                MaterialPropertyBlock m_MaterialProperty = new MaterialPropertyBlock();
                Mesh targetMesh = null;
                if (currentHDProbe.influenceVolume.shape == InfluenceShape.Sphere)
                {
                    m_MaterialProperty.SetVector(_RangeShaderID, new Vector3(currentHDProbe.influenceVolume.sphereRadius, currentHDProbe.influenceVolume.sphereRadius, currentHDProbe.influenceVolume.sphereRadius));
                    targetMesh = DebugShapes.instance.RequestSphereMesh();
                }
                else
                {
                    m_MaterialProperty.SetVector(_RangeShaderID, new Vector3(currentHDProbe.influenceVolume.boxSize.x, currentHDProbe.influenceVolume.boxSize.y, currentHDProbe.influenceVolume.boxSize.z));
                    targetMesh = DebugShapes.instance.RequestBoxMesh();
                }

                m_MaterialProperty.SetColor(_ColorShaderID, new Color(1.0f, 1.0f, 0.0f, 1.0f));
                m_MaterialProperty.SetVector(_OffsetShaderID, new Vector3(0, 0, 0));
                Matrix4x4 positionMat = Matrix4x4.Translate(currentLegacyProbe.transform.position);
                cmd.DrawMesh(targetMesh, positionMat, m_DebugLightVolumeMaterial, 0, 0, m_MaterialProperty);
            }

            // Define which kernel to use based on the lightloop options
            int targetKernel = lightDebugSettings.lightVolumeDebugByCategory == LightLoop.LightVolumeDebug.ColorAndEdge ? m_DebugLightVolumeColorsKernel : m_DebugLightVolumeGradientKernel;

            // Set the input params for the compute
            cmd.SetComputeTextureParam(m_DebugLightVolumeCompute, targetKernel, _DebugLightCountBufferShaderID, m_LightCountBuffer);
            cmd.SetComputeTextureParam(m_DebugLightVolumeCompute, targetKernel, _DebugColorAccumulationBufferShaderID, m_ColorAccumulationBuffer);
            cmd.SetComputeTextureParam(m_DebugLightVolumeCompute, targetKernel, _DebugLightVolumesTextureShaderID, m_DebugLightVolumesTexture);
            cmd.SetComputeTextureParam(m_DebugLightVolumeCompute, targetKernel, _ColorGradientTextureShaderID, m_ColorGradientTexture);
            cmd.SetComputeIntParam(m_DebugLightVolumeCompute, _MaxDebugLightCountShaderID, (int)lightDebugSettings.maxDebugLightCount);

            // Texture dimensions
            int texWidth  = m_ColorAccumulationBuffer.rt.width;
            int texHeight = m_ColorAccumulationBuffer.rt.width;


            // Dispatch the compute
            int lightVolumesTileSize = 8;
            int numTilesX            = (texWidth + (lightVolumesTileSize - 1)) / lightVolumesTileSize;
            int numTilesY            = (texHeight + (lightVolumesTileSize - 1)) / lightVolumesTileSize;

            cmd.DispatchCompute(m_DebugLightVolumeCompute, targetKernel, numTilesX, numTilesY, 1);

            // Blit this into the camera target
            cmd.SetRenderTarget(BuiltinRenderTextureType.CameraTarget);
            m_MaterialProperty.SetTexture(HDShaderIDs._BlitTexture, m_DebugLightVolumesTexture);
            cmd.DrawProcedural(Matrix4x4.identity, m_DebugLightVolumeMaterial, 1, MeshTopology.Triangles, 3, 1, m_MaterialProperty);
        }
Пример #21
0
 public static void SetRenderTarget(CommandBuffer cmd, HDCamera camera, RenderTargetIdentifier[] colorBuffers, RTHandleSystem.RTHandle depthBuffer, ClearFlag clearFlag, Color clearColor)
 {
     cmd.SetRenderTarget(colorBuffers, depthBuffer);
     SetViewportAndClear(cmd, camera, depthBuffer, clearFlag, clearColor);
 }
 public void RenderSky(HDCamera camera, Light sunLight, RTHandle colorBuffer, RTHandle depthBuffer, CommandBuffer cmd)
 {
     m_SkyRenderingContext.RenderSky(m_VisualSky, camera, sunLight, colorBuffer, depthBuffer, cmd);
 }
Пример #23
0
 public static void BlitCameraTexture(CommandBuffer cmd, HDCamera camera, RTHandleSystem.RTHandle source, RTHandleSystem.RTHandle destination, Rect destViewport, float mipLevel = 0.0f, bool bilinear = false)
 {
     SetRenderTarget(cmd, camera, destination);
     cmd.SetViewport(destViewport);
     BlitTexture(cmd, source, destination, camera.viewportScale, mipLevel, bilinear);
 }
Пример #24
0
        public void VolumetricLightingPass(HDCamera camera, CommandBuffer cmd, FrameSettings frameSettings)
        {
            if (preset == VolumetricLightingPreset.Off)
            {
                return;
            }

            using (new ProfilingSample(cmd, "Volumetric Lighting"))
            {
                VBuffer vBuffer = FindVBuffer(camera.GetViewID());
                Debug.Assert(vBuffer != null);

                if (HomogeneousFog.GetGlobalFogComponent() == null)
                {
                    // Clear the render target instead of running the shader.
                    // CoreUtils.SetRenderTarget(cmd, GetVBufferLightingIntegral(viewOffset), ClearFlag.Color, CoreUtils.clearColorAllBlack);
                    // return;

                    // Clearing 3D textures does not seem to work!
                    // Use the workaround by running the full shader with no volume.
                }

                bool enableClustered    = frameSettings.lightLoopSettings.enableTileAndCluster;
                bool enableReprojection = Application.isPlaying && camera.camera.cameraType == CameraType.Game;

                int kernel;

                if (enableReprojection)
                {
                    // Only available in the Play Mode because all the frame counters in the Edit Mode are broken.
                    kernel = m_VolumetricLightingCS.FindKernel(enableClustered ? "VolumetricLightingClusteredReproj"
                                                                           : "VolumetricLightingAllLightsReproj");
                }
                else
                {
                    kernel = m_VolumetricLightingCS.FindKernel(enableClustered ? "VolumetricLightingClustered"
                                                                           : "VolumetricLightingAllLights");
                }

                int     w = 0, h = 0, d = 0;
                Vector2 scale = ComputeVBufferResolutionAndScale(preset, (int)camera.screenSize.x, (int)camera.screenSize.y, ref w, ref h, ref d);
                float   vFoV  = camera.camera.fieldOfView * Mathf.Deg2Rad;

                // Compose the matrix which allows us to compute the world space view direction.
                // Compute it using the scaled resolution to account for the visible area of the VBuffer.
                Vector4   scaledRes = new Vector4(w * scale.x, h * scale.y, 1.0f / (w * scale.x), 1.0f / (h * scale.y));
                Matrix4x4 transform = HDUtils.ComputePixelCoordToWorldSpaceViewDirectionMatrix(vFoV, scaledRes, camera.viewMatrix, false);

                camera.SetupComputeShader(m_VolumetricLightingCS, cmd);

                Vector2[] xySeq = GetHexagonalClosePackedSpheres7();

                // This is a sequence of 7 equidistant numbers from 1/14 to 13/14.
                // Each of them is the centroid of the interval of length 2/14.
                // They've been rearranged in a sequence of pairs {small, large}, s.t. (small + large) = 1.
                // That way, the running average position is close to 0.5.
                // | 6 | 2 | 4 | 1 | 5 | 3 | 7 |
                // |   |   |   | o |   |   |   |
                // |   | o |   | x |   |   |   |
                // |   | x |   | x |   | o |   |
                // |   | x | o | x |   | x |   |
                // |   | x | x | x | o | x |   |
                // | o | x | x | x | x | x |   |
                // | x | x | x | x | x | x | o |
                // | x | x | x | x | x | x | x |
                float[] zSeq = { 7.0f / 14.0f, 3.0f / 14.0f, 11.0f / 14.0f, 5.0f / 14.0f, 9.0f / 14.0f, 1.0f / 14.0f, 13.0f / 14.0f };

                int     rfc         = Time.renderedFrameCount;
                int     sampleIndex = rfc % 7;
                Vector4 offset      = new Vector4(xySeq[sampleIndex].x, xySeq[sampleIndex].y, zSeq[sampleIndex], rfc);

                // TODO: set 'm_VolumetricLightingPreset'.
                cmd.SetComputeVectorParam(m_VolumetricLightingCS, HDShaderIDs._VBufferSampleOffset, offset);
                cmd.SetComputeMatrixParam(m_VolumetricLightingCS, HDShaderIDs._VBufferCoordToViewDirWS, transform);
                cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingIntegral, vBuffer.GetLightingIntegralBuffer()); // Write
                if (enableReprojection)
                {
                    cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingFeedback, vBuffer.GetLightingFeedbackBuffer()); // Write
                    cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingHistory, vBuffer.GetLightingHistoryBuffer());   // Read
                }

                // The shader defines GROUP_SIZE_1D = 16.
                cmd.DispatchCompute(m_VolumetricLightingCS, kernel, (w + 15) / 16, (h + 15) / 16, 1);
            }
        }
Пример #25
0
        // Returns mouse click coordinates: (x,y) in pixels and (z,w) normalized inside the render target (not the viewport)
        public static Vector4 GetMouseClickCoordinates(HDCamera camera)
        {
            Vector2 mousePixelCoord = MousePositionDebug.instance.GetMouseClickPosition(camera.screenSize.y);

            return(new Vector4(mousePixelCoord.x, mousePixelCoord.y, camera.viewportScale.x * mousePixelCoord.x / camera.screenSize.x, camera.viewportScale.y * mousePixelCoord.y / camera.screenSize.y));
        }
Пример #26
0
        public void AreaShadowBlurMoments(CommandBuffer cmd, HDCamera hdCamera)
        {
            ComputeShader shadowBlurMomentsCS = m_RenderPipelineResources.shaders.evsmBlurCS;

            if (!m_isBlurredEVSM || shadowBlurMomentsCS == null)
            {
                return;
            }


            using (new ProfilingSample(cmd, "Render & Blur Moment Shadows", CustomSamplerId.RenderShadows.GetSampler()))
            {
                int generateAndBlurMomentsKernel = shadowBlurMomentsCS.FindKernel("ConvertAndBlur");
                int blurMomentsKernel            = shadowBlurMomentsCS.FindKernel("Blur");
                int copyMomentsKernel            = shadowBlurMomentsCS.FindKernel("CopyMoments");

                Vector4[] blurWeights = new Vector4[2];

                // This is a 9 tap filter, a gaussian with std. dev of 3. This standard deviation with this amount of taps probably cuts
                // the tail of the gaussian a bit too much, and it is a very fat curve, but it seems to work fine for our use case.
                blurWeights[0].x = 0.1531703f;
                blurWeights[0].y = 0.1448929f;
                blurWeights[0].z = 0.1226492f;
                blurWeights[0].w = 0.0929025f;
                blurWeights[1].x = 0.06297021f;

                cmd.SetComputeTextureParam(shadowBlurMomentsCS, generateAndBlurMomentsKernel, HDShaderIDs._DepthTexture, m_Atlas);
                cmd.SetComputeVectorArrayParam(shadowBlurMomentsCS, HDShaderIDs._BlurWeightsStorage, blurWeights);

                // We need to store in which of the two moment texture a request will have its last version stored in for a final patch up at the end.
                int[] finalAtlasTexture = new int[m_ShadowRequests.Count];

                int requestIdx = 0;
                foreach (var shadowRequest in m_ShadowRequests)
                {
                    using (new ProfilingSample(cmd, "EVSM conversion and blur", CustomSamplerId.RenderShadows.GetSampler()))
                    {
                        int downsampledWidth  = Mathf.CeilToInt(shadowRequest.atlasViewport.width * 0.5f);
                        int downsampledHeight = Mathf.CeilToInt(shadowRequest.atlasViewport.height * 0.5f);

                        Vector2 DstRectOffset = new Vector2(shadowRequest.atlasViewport.min.x * 0.5f, shadowRequest.atlasViewport.min.y * 0.5f);

                        cmd.SetComputeTextureParam(shadowBlurMomentsCS, generateAndBlurMomentsKernel, HDShaderIDs._OutputTexture, GetMomentRT());
                        cmd.SetComputeVectorParam(shadowBlurMomentsCS, HDShaderIDs._SrcRect, new Vector4(shadowRequest.atlasViewport.min.x, shadowRequest.atlasViewport.min.y, shadowRequest.atlasViewport.width, shadowRequest.atlasViewport.height));
                        cmd.SetComputeVectorParam(shadowBlurMomentsCS, HDShaderIDs._DstRect, new Vector4(DstRectOffset.x, DstRectOffset.y, 1.0f / width, 1.0f / height));
                        cmd.SetComputeFloatParam(shadowBlurMomentsCS, HDShaderIDs._EVSMExponent, shadowRequest.evsmParams.x);

                        int dispatchSizeX = ((int)downsampledWidth + 7) / 8;
                        int dispatchSizeY = ((int)downsampledHeight + 7) / 8;

                        cmd.DispatchCompute(shadowBlurMomentsCS, generateAndBlurMomentsKernel, dispatchSizeX, dispatchSizeY, 1);


                        cmd.SetComputeVectorParam(shadowBlurMomentsCS, HDShaderIDs._SrcRect, new Vector4(DstRectOffset.x, DstRectOffset.y, downsampledWidth, downsampledHeight));
                        for (int i = 0; i < shadowRequest.evsmParams.w; ++i)
                        {
                            m_CurrentAtlasMomentSurface = (m_CurrentAtlasMomentSurface + 1) & 1;
                            cmd.SetComputeTextureParam(shadowBlurMomentsCS, blurMomentsKernel, HDShaderIDs._InputTexture, GetMomentRTCopy());
                            cmd.SetComputeTextureParam(shadowBlurMomentsCS, blurMomentsKernel, HDShaderIDs._OutputTexture, GetMomentRT());

                            cmd.DispatchCompute(shadowBlurMomentsCS, blurMomentsKernel, dispatchSizeX, dispatchSizeY, 1);
                        }

                        finalAtlasTexture[requestIdx++] = m_CurrentAtlasMomentSurface;
                    }
                }

                // We patch up the atlas with the requests that, due to different count of blur passes, remained in the copy
                for (int i = 0; i < m_ShadowRequests.Count; ++i)
                {
                    if (m_CurrentAtlasMomentSurface != finalAtlasTexture[i])
                    {
                        using (new ProfilingSample(cmd, "Copy into main atlas.", CustomSamplerId.RenderShadows.GetSampler()))
                        {
                            var shadowRequest     = m_ShadowRequests[i];
                            int downsampledWidth  = Mathf.CeilToInt(shadowRequest.atlasViewport.width * 0.5f);
                            int downsampledHeight = Mathf.CeilToInt(shadowRequest.atlasViewport.height * 0.5f);

                            cmd.SetComputeVectorParam(shadowBlurMomentsCS, HDShaderIDs._SrcRect, new Vector4(shadowRequest.atlasViewport.min.x * 0.5f, shadowRequest.atlasViewport.min.y * 0.5f, downsampledWidth, downsampledHeight));
                            cmd.SetComputeTextureParam(shadowBlurMomentsCS, copyMomentsKernel, HDShaderIDs._InputTexture, GetMomentRTCopy());
                            cmd.SetComputeTextureParam(shadowBlurMomentsCS, copyMomentsKernel, HDShaderIDs._OutputTexture, GetMomentRT());

                            int dispatchSizeX = ((int)downsampledWidth + 7) / 8;
                            int dispatchSizeY = ((int)downsampledHeight + 7) / 8;

                            cmd.DispatchCompute(shadowBlurMomentsCS, copyMomentsKernel, dispatchSizeX, dispatchSizeY, 1);
                        }
                    }
                }
            }
        }
Пример #27
0
 public static void SetRenderTarget(CommandBuffer cmd, HDCamera camera, RTHandle colorBuffer, RTHandle depthBuffer, ClearFlag clearFlag, Color clearColor, int miplevel = 0, CubemapFace cubemapFace = CubemapFace.Unknown, int depthSlice = 0)
 {
     CoreUtils.SetRenderTarget(cmd, colorBuffer, depthBuffer, miplevel, cubemapFace, depthSlice);
     SetViewportAndClear(cmd, camera, colorBuffer, clearFlag, clearColor);
 }
 public void RenderSky(HDCamera camera, Light sunLight, RTHandleSystem.RTHandle colorBuffer, RTHandleSystem.RTHandle depthBuffer, DebugDisplaySettings debugSettings, CommandBuffer cmd)
 {
     m_SkyRenderingContext.RenderSky(m_VisualSky, camera, sunLight, colorBuffer, depthBuffer, debugSettings, cmd);
 }
Пример #29
0
 public static void SetRenderTarget(CommandBuffer cmd, HDCamera camera, RenderTargetIdentifier[] colorBuffers, RTHandle depthBuffer, ClearFlag clearFlag = ClearFlag.None)
 {
     CoreUtils.SetRenderTarget(cmd, colorBuffers, depthBuffer); // Don't clear here, viewport needs to be set before we do.
     SetViewportAndClear(cmd, camera, depthBuffer, clearFlag, CoreUtils.clearColorAllBlack);
 }
Пример #30
0
        public void UpdateEnvironment(HDCamera camera, Light sunLight, CommandBuffer cmd)
        {
            // We need one frame delay for this update to work since DynamicGI.UpdateEnvironment is executed direclty but the renderloop is not (so we need to wait for the sky texture to be rendered first)
            if (m_NeedLowLevelUpdateEnvironment)
            {
                using (new Utilities.ProfilingSample("DynamicGI.UpdateEnvironment", cmd))
                {
                    // TODO: Properly send the cubemap to Enlighten. Currently workaround is to set the cubemap in a Skybox/cubemap material
                    m_StandardSkyboxMaterial.SetTexture("_Tex", m_SkyboxCubemapRT);
                    RenderSettings.skybox              = m_StandardSkyboxMaterial;                 // Setup this material as the default to be use in RenderSettings
                    RenderSettings.ambientIntensity    = 1.0f;                                     // fix this to 1, this parameter should not exist!
                    RenderSettings.ambientMode         = UnityEngine.Rendering.AmbientMode.Skybox; // Force skybox for our HDRI
                    RenderSettings.reflectionIntensity = 1.0f;
                    RenderSettings.customReflection    = null;
                    DynamicGI.UpdateEnvironment();

                    m_NeedLowLevelUpdateEnvironment = false;
                }
            }

            if (IsSkyValid())
            {
                m_CurrentUpdateTime += Time.deltaTime;

                m_BuiltinParameters.commandBuffer = cmd;
                m_BuiltinParameters.sunLight      = sunLight;

                if (
                    m_UpdatedFramesRequired > 0 ||
                    (skySettings.updateMode == EnvironementUpdateMode.OnChanged && skySettings.GetHash() != m_SkyParametersHash) ||
                    (skySettings.updateMode == EnvironementUpdateMode.Realtime && m_CurrentUpdateTime > skySettings.updatePeriod)
                    )
                {
                    using (new Utilities.ProfilingSample("Sky Environment Pass", cmd))
                    {
                        using (new Utilities.ProfilingSample("Update Env: Generate Lighting Cubemap", cmd))
                        {
                            // Render sky into a cubemap - doesn't happen every frame, can be controlled
                            // Note that m_SkyboxCubemapRT is created with auto-generate mipmap, it mean that here we have also our mipmap correctly box filtered for importance sampling.
                            if (m_SkySettings.lightingOverride == null)
                            {
                                RenderSkyToCubemap(m_BuiltinParameters, skySettings, m_SkyboxCubemapRT);
                            }
                            // In case the user overrides the lighting, we already have a cubemap ready but we need to blit it anyway for potential resize and so that we can generate proper mipmaps for enlighten.
                            else
                            {
                                BlitCubemap(cmd, m_SkySettings.lightingOverride, m_SkyboxCubemapRT);
                            }
                        }

                        // Convolve downsampled cubemap
                        RenderCubemapGGXConvolution(cmd, m_BuiltinParameters, skySettings, m_SkyboxCubemapRT, m_SkyboxGGXCubemapRT);

                        m_NeedLowLevelUpdateEnvironment = true;
                        m_UpdatedFramesRequired--;
                        m_SkyParametersHash = skySettings.GetHash();
                        m_CurrentUpdateTime = 0.0f;
                    }
                }
            }
            else
            {
                if (m_SkyParametersHash != 0)
                {
                    using (new Utilities.ProfilingSample("Reset Sky Environment", cmd))
                    {
                        // Clear temp cubemap and redo GGX from black and then feed it to enlighten for default light probe.
                        Utilities.ClearCubemap(cmd, m_SkyboxCubemapRT, Color.black);
                        RenderCubemapGGXConvolution(cmd, m_BuiltinParameters, skySettings, m_SkyboxCubemapRT, m_SkyboxGGXCubemapRT);

                        m_SkyParametersHash             = 0;
                        m_NeedLowLevelUpdateEnvironment = true;
                    }
                }
            }
        }