public void RenderSky(SkyUpdateContext skyContext, HDCamera hdCamera, Light sunLight, RTHandleSystem.RTHandle colorBuffer, RTHandleSystem.RTHandle depthBuffer, DebugDisplaySettings debugSettings, CommandBuffer cmd)
        {
            if (skyContext.IsValid() && hdCamera.clearColorMode == HDAdditionalCameraData.ClearColorMode.Sky)
            {
                using (new ProfilingSample(cmd, "Sky Pass"))
                {
                    m_BuiltinParameters.commandBuffer             = cmd;
                    m_BuiltinParameters.sunLight                  = sunLight;
                    m_BuiltinParameters.pixelCoordToViewDirMatrix = HDUtils.ComputePixelCoordToWorldSpaceViewDirectionMatrix(hdCamera.camera.fieldOfView * Mathf.Deg2Rad, hdCamera.screenSize, hdCamera.viewMatrix, false);
                    m_BuiltinParameters.invViewProjMatrix         = hdCamera.viewProjMatrix.inverse;
                    m_BuiltinParameters.screenSize                = hdCamera.screenSize;
                    m_BuiltinParameters.cameraPosWS               = hdCamera.camera.transform.position;
                    m_BuiltinParameters.colorBuffer               = colorBuffer;
                    m_BuiltinParameters.depthBuffer               = depthBuffer;
                    m_BuiltinParameters.hdCamera                  = hdCamera;
                    m_BuiltinParameters.debugSettings             = debugSettings;

                    skyContext.renderer.SetRenderTargets(m_BuiltinParameters);

                    // If the luxmeter is enabled, we don't render the sky
                    if (debugSettings.lightingDebugSettings.debugLightingMode != DebugLightingMode.LuxMeter)
                    {
                        // When rendering the visual sky for reflection probes, we need to remove the sun disk if skySettings.includeSunInBaking is false.
                        skyContext.renderer.RenderSky(m_BuiltinParameters, false, hdCamera.camera.cameraType != CameraType.Reflection || skyContext.skySettings.includeSunInBaking);
                    }
                }
            }
        }
        void FilterCubemapCommon(CommandBuffer cmd,
                                 Texture source, RenderTexture target,
                                 Matrix4x4[] worldToViewMatrices)
        {
            int mipCount = 1 + (int)Mathf.Log(source.width, 2.0f);

            if (mipCount < ((int)EnvConstants.SpecCubeLodStep + 1))
            {
                Debug.LogWarning("RenderCubemapGGXConvolution: Cubemap size is too small for GGX convolution, needs at least " + ((int)EnvConstants.SpecCubeLodStep + 1) + " mip levels");
                return;
            }

            // Copy the first mip
            using (new ProfilingSample(cmd, "Copy Original Mip"))
            {
                for (int f = 0; f < 6; f++)
                {
                    cmd.CopyTexture(source, f, 0, target, f, 0);
                }
            }

            // Solid angle associated with a texel of the cubemap.
            float invOmegaP = (6.0f * source.width * source.width) / (4.0f * Mathf.PI);

            if (!m_GgxIblSampleData.IsCreated())
            {
                m_GgxIblSampleData.Create();
                InitializeGgxIblSampleData(cmd);
            }

            m_convolveMaterial.SetTexture("_GgxIblSamples", m_GgxIblSampleData);

            var props = new MaterialPropertyBlock();

            props.SetTexture("_MainTex", source);
            props.SetFloat("_InvOmegaP", invOmegaP);

            for (int mip = 1; mip < ((int)EnvConstants.SpecCubeLodStep + 1); ++mip)
            {
                props.SetFloat("_Level", mip);

                using (new ProfilingSample(cmd, "Filter Cubemap Mip {0}", mip))
                {
                    for (int face = 0; face < 6; ++face)
                    {
                        var faceSize  = new Vector4(source.width >> mip, source.height >> mip, 1.0f / (source.width >> mip), 1.0f / (source.height >> mip));
                        var transform = HDUtils.ComputePixelCoordToWorldSpaceViewDirectionMatrix(0.5f * Mathf.PI, Vector2.zero, faceSize, worldToViewMatrices[face], true);

                        props.SetMatrix(HDShaderIDs._PixelCoordToViewDirWS, transform);

                        CoreUtils.SetRenderTarget(cmd, target, ClearFlag.None, mip, (CubemapFace)face);
                        CoreUtils.DrawFullScreen(cmd, m_convolveMaterial, props);
                    }
                }
            }
        }
        public void RebuildSkyMatrices(int resolution)
        {
            var cubeProj = Matrix4x4.Perspective(90.0f, 1.0f, 0.01f, 1.0f);

            for (int i = 0; i < 6; ++i)
            {
                var lookAt      = Matrix4x4.LookAt(Vector3.zero, CoreUtils.lookAtList[i], CoreUtils.upVectorList[i]);
                var worldToView = lookAt * Matrix4x4.Scale(new Vector3(1.0f, 1.0f, -1.0f)); // Need to scale -1.0 on Z to match what is being done in the camera.wolrdToCameraMatrix API. ...

                m_facePixelCoordToViewDirMatrices[i] = HDUtils.ComputePixelCoordToWorldSpaceViewDirectionMatrix(0.5f * Mathf.PI, Vector2.zero, m_CubemapScreenSize, worldToView, true);
            }
        }
Пример #4
0
        public void VolumeVoxelizationPass(DensityVolumeList densityVolumes, HDCamera camera, CommandBuffer cmd, FrameSettings settings)
        {
            if (preset == VolumetricLightingPreset.Off)
            {
                return;
            }

            using (new ProfilingSample(cmd, "Volume Voxelization"))
            {
                int numVisibleVolumes = m_VisibleVolumeBounds.Count;

                if (numVisibleVolumes == 0)
                {
                    // Clear the render target instead of running the shader.
                    // CoreUtils.SetRenderTarget(cmd, vBuffer.GetDensityBuffer(), ClearFlag.Color, CoreUtils.clearColorAllBlack);
                    // return;

                    // Clearing 3D textures does not seem to work!
                    // Use the workaround by running the full shader with 0 density.
                }

                VBuffer vBuffer = FindVBuffer(camera.GetViewID());
                Debug.Assert(vBuffer != null);

                int w = 0, h = 0, d = 0;
                vBuffer.GetResolution(ref w, ref h, ref d);

                bool enableClustered = settings.lightLoopSettings.enableTileAndCluster;

                int kernel           = m_VolumeVoxelizationCS.FindKernel(enableClustered ? "VolumeVoxelizationClustered"
                                                                           : "VolumeVoxelizationBruteforce");

                float     vFoV       = camera.camera.fieldOfView * Mathf.Deg2Rad;
                Vector4   resolution = new Vector4(w, h, 1.0f / w, 1.0f / h);
                Matrix4x4 transform  = HDUtils.ComputePixelCoordToWorldSpaceViewDirectionMatrix(vFoV, resolution, camera.viewMatrix, false);

                camera.SetupComputeShader(m_VolumeVoxelizationCS, cmd);
                cmd.SetComputeTextureParam(m_VolumeVoxelizationCS, kernel, HDShaderIDs._VBufferDensity, vBuffer.GetDensityBuffer());
                cmd.SetComputeBufferParam(m_VolumeVoxelizationCS, kernel, HDShaderIDs._VolumeBounds, s_VisibleVolumeBoundsBuffer);
                cmd.SetComputeBufferParam(m_VolumeVoxelizationCS, kernel, HDShaderIDs._VolumeProperties, s_VisibleVolumePropertiesBuffer);

                // TODO: set the constant buffer data only once.
                cmd.SetComputeMatrixParam(m_VolumeVoxelizationCS, HDShaderIDs._VBufferCoordToViewDirWS, transform);
                cmd.SetComputeIntParam(m_VolumeVoxelizationCS, HDShaderIDs._NumVisibleDensityVolumes, numVisibleVolumes);

                // The shader defines GROUP_SIZE_1D = 8.
                cmd.DispatchCompute(m_VolumeVoxelizationCS, kernel, (w + 7) / 8, (h + 7) / 8, 1);
            }
        }
Пример #5
0
        void RebuildSkyMatrices(float nearPlane, float farPlane)
        {
            if (!m_SkySettings)
            {
                return;
            }

            var cubeProj = Matrix4x4.Perspective(90.0f, 1.0f, nearPlane, farPlane);

            for (int i = 0; i < 6; ++i)
            {
                var lookAt      = Matrix4x4.LookAt(Vector3.zero, CoreUtils.lookAtList[i], CoreUtils.upVectorList[i]);
                var worldToView = lookAt * Matrix4x4.Scale(new Vector3(1.0f, 1.0f, -1.0f)); // Need to scale -1.0 on Z to match what is being done in the camera.wolrdToCameraMatrix API. ...
                var screenSize  = new Vector4((int)m_SkySettings.resolution, (int)m_SkySettings.resolution, 1.0f / (int)m_SkySettings.resolution, 1.0f / (int)m_SkySettings.resolution);

                m_facePixelCoordToViewDirMatrices[i]   = HDUtils.ComputePixelCoordToWorldSpaceViewDirectionMatrix(0.5f * Mathf.PI, screenSize, worldToView, true);
                m_faceCameraInvViewProjectionMatrix[i] = HDUtils.GetViewProjectionMatrix(lookAt, cubeProj).inverse;
            }
        }
        public void RenderSky(SkyUpdateContext skyContext, HDCamera camera, Light sunLight, RenderTargetIdentifier colorBuffer, RenderTargetIdentifier depthBuffer, CommandBuffer cmd)
        {
            if (skyContext.IsValid())
            {
                using (new ProfilingSample(cmd, "Sky Pass"))
                {
                    m_BuiltinParameters.commandBuffer             = cmd;
                    m_BuiltinParameters.sunLight                  = sunLight;
                    m_BuiltinParameters.pixelCoordToViewDirMatrix = HDUtils.ComputePixelCoordToWorldSpaceViewDirectionMatrix(camera.camera.fieldOfView * Mathf.Deg2Rad, camera.screenSize, camera.viewMatrix, false);
                    m_BuiltinParameters.invViewProjMatrix         = camera.viewProjMatrix.inverse;
                    m_BuiltinParameters.screenSize                = camera.screenSize;
                    m_BuiltinParameters.cameraPosWS               = camera.camera.transform.position;
                    m_BuiltinParameters.colorBuffer               = colorBuffer;
                    m_BuiltinParameters.depthBuffer               = depthBuffer;

                    skyContext.renderer.SetRenderTargets(m_BuiltinParameters);
                    skyContext.renderer.RenderSky(m_BuiltinParameters, false);
                }
            }
        }
        public void RenderSky(SkyUpdateContext skyContext, HDCamera hdCamera, Light sunLight, RTHandle colorBuffer, RTHandle depthBuffer, CommandBuffer cmd)
        {
            if (skyContext.IsValid() && hdCamera.clearColorMode == HDAdditionalCameraData.ClearColorMode.Sky)
            {
                using (new ProfilingSample(cmd, "Sky Pass"))
                {
                    m_BuiltinParameters.commandBuffer             = cmd;
                    m_BuiltinParameters.sunLight                  = sunLight;
                    m_BuiltinParameters.pixelCoordToViewDirMatrix = HDUtils.ComputePixelCoordToWorldSpaceViewDirectionMatrix(hdCamera.camera.fieldOfView * Mathf.Deg2Rad, hdCamera.screenSize, hdCamera.viewMatrix, false);
                    m_BuiltinParameters.invViewProjMatrix         = hdCamera.viewProjMatrix.inverse;
                    m_BuiltinParameters.screenSize                = hdCamera.screenSize;
                    m_BuiltinParameters.cameraPosWS               = hdCamera.camera.transform.position;
                    m_BuiltinParameters.colorBuffer               = colorBuffer;
                    m_BuiltinParameters.depthBuffer               = depthBuffer;
                    m_BuiltinParameters.hdCamera                  = hdCamera;

                    skyContext.renderer.SetRenderTargets(m_BuiltinParameters);
                    skyContext.renderer.RenderSky(m_BuiltinParameters, false);
                }
            }
        }
Пример #8
0
        public void RenderSky(HDCamera camera, Light sunLight, RenderTargetIdentifier colorBuffer, RenderTargetIdentifier depthBuffer, CommandBuffer cmd, FrameSettings m_frameSettings)
        {
            using (new ProfilingSample(cmd, "Sky Pass"))
            {
                if (IsSkyValid())
                {
                    // Rendering the sky is the first time in the frame where we need fog parameters so we push them here for the whole frame.
                    m_SkySettings.atmosphericScatteringSettings.PushShaderParameters(cmd, m_frameSettings);

                    m_BuiltinParameters.commandBuffer             = cmd;
                    m_BuiltinParameters.sunLight                  = sunLight;
                    m_BuiltinParameters.pixelCoordToViewDirMatrix = HDUtils.ComputePixelCoordToWorldSpaceViewDirectionMatrix(camera.camera.fieldOfView * Mathf.Deg2Rad, camera.screenSize, camera.viewMatrix, false);
                    m_BuiltinParameters.invViewProjMatrix         = camera.viewProjMatrix.inverse;
                    m_BuiltinParameters.screenSize                = camera.screenSize;
                    m_BuiltinParameters.cameraPosWS               = camera.camera.transform.position;
                    m_BuiltinParameters.colorBuffer               = colorBuffer;
                    m_BuiltinParameters.depthBuffer               = depthBuffer;

                    m_Renderer.SetRenderTargets(m_BuiltinParameters);
                    m_Renderer.RenderSky(m_BuiltinParameters, false);
                }
            }
        }
        public void VolumeVoxelizationPass(HDCamera hdCamera, CommandBuffer cmd, uint frameIndex, DensityVolumeList densityVolumes, LightLoop lightLoop)
        {
            if (!hdCamera.frameSettings.IsEnabled(FrameSettingsField.Volumetrics))
            {
                return;
            }

            var visualEnvironment = VolumeManager.instance.stack.GetComponent <VisualEnvironment>();

            if (visualEnvironment.fogType.value != FogType.Volumetric)
            {
                return;
            }

            using (new ProfilingSample(cmd, "Volume Voxelization"))
            {
                int  numVisibleVolumes = m_VisibleVolumeBounds.Count;
                bool tiledLighting     = lightLoop.HasLightToCull() && hdCamera.frameSettings.IsEnabled(FrameSettingsField.BigTilePrepass);
                bool highQuality       = preset == VolumetricLightingPreset.High;

                int kernel = (tiledLighting ? 1 : 0) | (highQuality ? 2 : 0);

                var currFrameParams = hdCamera.vBufferParams[0];
                var cvp             = currFrameParams.viewportSize;

                Vector4 resolution = new Vector4(cvp.x, cvp.y, 1.0f / cvp.x, 1.0f / cvp.y);
#if UNITY_2019_1_OR_NEWER
                var vFoV      = hdCamera.camera.GetGateFittedFieldOfView() * Mathf.Deg2Rad;
                var lensShift = hdCamera.camera.GetGateFittedLensShift();
#else
                var vFoV      = hdCamera.camera.fieldOfView * Mathf.Deg2Rad;
                var lensShift = Vector2.zero;
#endif

                // Compose the matrix which allows us to compute the world space view direction.
                Matrix4x4 transform = HDUtils.ComputePixelCoordToWorldSpaceViewDirectionMatrix(vFoV, lensShift, resolution, hdCamera.mainViewConstants.viewMatrix, false);

                // Compute texel spacing at the depth of 1 meter.
                float unitDepthTexelSpacing = HDUtils.ComputZPlaneTexelSpacing(1.0f, vFoV, resolution.y);

                Texture3D volumeAtlas           = DensityVolumeManager.manager.volumeAtlas.GetAtlas();
                Vector4   volumeAtlasDimensions = new Vector4(0.0f, 0.0f, 0.0f, 0.0f);

                if (volumeAtlas != null)
                {
                    volumeAtlasDimensions.x = (float)volumeAtlas.width / volumeAtlas.depth; // 1 / number of textures
                    volumeAtlasDimensions.y = volumeAtlas.width;
                    volumeAtlasDimensions.z = volumeAtlas.depth;
                    volumeAtlasDimensions.w = Mathf.Log(volumeAtlas.width, 2);              // Max LoD
                }
                else
                {
                    volumeAtlas = CoreUtils.blackVolumeTexture;
                }

                if (hdCamera.frameSettings.VolumeVoxelizationRunsAsync())
                {
                    // We explicitly set the big tile info even though it is set globally, since this could be running async before the PushGlobalParams
                    cmd.SetComputeIntParam(m_VolumeVoxelizationCS, HDShaderIDs._NumTileBigTileX, lightLoop.GetNumTileBigTileX(hdCamera));
                    cmd.SetComputeIntParam(m_VolumeVoxelizationCS, HDShaderIDs._NumTileBigTileY, lightLoop.GetNumTileBigTileY(hdCamera));
                    if (tiledLighting)
                    {
                        cmd.SetComputeBufferParam(m_VolumeVoxelizationCS, kernel, HDShaderIDs.g_vBigTileLightList, lightLoop.GetBigTileLightList());
                    }
                }

                cmd.SetComputeTextureParam(m_VolumeVoxelizationCS, kernel, HDShaderIDs._VBufferDensity, m_DensityBufferHandle);
                cmd.SetComputeBufferParam(m_VolumeVoxelizationCS, kernel, HDShaderIDs._VolumeBounds, s_VisibleVolumeBoundsBuffer);
                cmd.SetComputeBufferParam(m_VolumeVoxelizationCS, kernel, HDShaderIDs._VolumeData, s_VisibleVolumeDataBuffer);
                cmd.SetComputeTextureParam(m_VolumeVoxelizationCS, kernel, HDShaderIDs._VolumeMaskAtlas, volumeAtlas);

                // TODO: set the constant buffer data only once.
                cmd.SetComputeMatrixParam(m_VolumeVoxelizationCS, HDShaderIDs._VBufferCoordToViewDirWS, transform);
                cmd.SetComputeFloatParam(m_VolumeVoxelizationCS, HDShaderIDs._VBufferUnitDepthTexelSpacing, unitDepthTexelSpacing);
                cmd.SetComputeIntParam(m_VolumeVoxelizationCS, HDShaderIDs._NumVisibleDensityVolumes, numVisibleVolumes);
                cmd.SetComputeVectorParam(m_VolumeVoxelizationCS, HDShaderIDs._VolumeMaskDimensions, volumeAtlasDimensions);

                int w = (int)resolution.x;
                int h = (int)resolution.y;

                // The shader defines GROUP_SIZE_1D = 8.
                cmd.DispatchCompute(m_VolumeVoxelizationCS, kernel, (w + 7) / 8, (h + 7) / 8, hdCamera.computePassCount);
            }
        }
Пример #10
0
        public void VolumetricLightingPass(HDCamera hdCamera, CommandBuffer cmd, uint frameIndex)
        {
            if (!hdCamera.frameSettings.enableVolumetrics)
            {
                return;
            }

            var visualEnvironment = VolumeManager.instance.stack.GetComponent <VisualEnvironment>();

            if (visualEnvironment.fogType.value != FogType.Volumetric)
            {
                return;
            }

            using (new ProfilingSample(cmd, "Volumetric Lighting"))
            {
                // Only available in the Play Mode because all the frame counters in the Edit Mode are broken.
                bool highQuality        = preset == VolumetricLightingPreset.High;
                bool enableClustered    = hdCamera.frameSettings.lightLoopSettings.enableTileAndCluster;
                bool enableReprojection = Application.isPlaying && hdCamera.camera.cameraType == CameraType.Game &&
                                          hdCamera.frameSettings.enableReprojectionForVolumetrics;

                int kernel;

                if (highQuality)
                {
                    if (enableReprojection)
                    {
                        kernel = m_VolumetricLightingCS.FindKernel(enableClustered ? "VolumetricLightingClusteredReprojHQ"
                                                                                   : "VolumetricLightingBruteforceReprojHQ");
                    }
                    else
                    {
                        kernel = m_VolumetricLightingCS.FindKernel(enableClustered ? "VolumetricLightingClusteredHQ"
                                                                                   : "VolumetricLightingBruteforceHQ");
                    }
                }
                else
                {
                    if (enableReprojection)
                    {
                        kernel = m_VolumetricLightingCS.FindKernel(enableClustered ? "VolumetricLightingClusteredReprojMQ"
                                                                                   : "VolumetricLightingBruteforceReprojMQ");
                    }
                    else
                    {
                        kernel = m_VolumetricLightingCS.FindKernel(enableClustered ? "VolumetricLightingClusteredMQ"
                                                                                   : "VolumetricLightingBruteforceMQ");
                    }
                }

                var currFrameParams = hdCamera.vBufferParams[0];
                var cvp             = currFrameParams.viewportSize;

                Vector4 resolution = new Vector4(cvp.x, cvp.y, 1.0f / cvp.x, 1.0f / cvp.y);
                float   vFoV       = hdCamera.camera.fieldOfView * Mathf.Deg2Rad;
                // Compose the matrix which allows us to compute the world space view direction.
                Matrix4x4 transform = HDUtils.ComputePixelCoordToWorldSpaceViewDirectionMatrix(vFoV, resolution, hdCamera.viewMatrix, false);

                GetHexagonalClosePackedSpheres7(m_xySeq);

                int sampleIndex = (int)frameIndex % 7;

                // TODO: should we somehow reorder offsets in Z based on the offset in XY? S.t. the samples more evenly cover the domain.
                // Currently, we assume that they are completely uncorrelated, but maybe we should correlate them somehow.
                m_xySeqOffset.Set(m_xySeq[sampleIndex].x, m_xySeq[sampleIndex].y, m_zSeq[sampleIndex], frameIndex);

                // Get the interpolated anisotropy value.
                var fog = VolumeManager.instance.stack.GetComponent <VolumetricFog>();

                // TODO: set 'm_VolumetricLightingPreset'.
                // TODO: set the constant buffer data only once.
                cmd.SetComputeMatrixParam(m_VolumetricLightingCS, HDShaderIDs._VBufferCoordToViewDirWS, transform);
                cmd.SetComputeVectorParam(m_VolumetricLightingCS, HDShaderIDs._VBufferSampleOffset, m_xySeqOffset);
                cmd.SetComputeFloatParam(m_VolumetricLightingCS, HDShaderIDs._CornetteShanksConstant, CornetteShanksPhasePartConstant(fog.anisotropy));
                cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferDensity, m_DensityBufferHandle);           // Read
                cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingIntegral, m_LightingBufferHandle); // Write
                if (enableReprojection)
                {
                    var historyRT  = hdCamera.GetPreviousFrameRT((int)HDCameraFrameHistoryType.VolumetricLighting);
                    var feedbackRT = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.VolumetricLighting);

                    // Detect if the history buffer has been recreated or resized.
                    Vector3Int currentResolutionOfHistoryBuffer = new Vector3Int();
                    currentResolutionOfHistoryBuffer.x = historyRT.rt.width;
                    currentResolutionOfHistoryBuffer.y = historyRT.rt.height;
                    currentResolutionOfHistoryBuffer.z = historyRT.rt.volumeDepth;

                    cmd.SetComputeIntParam(m_VolumetricLightingCS, HDShaderIDs._VBufferLightingHistoryIsValid, hdCamera.volumetricHistoryIsValid ? 1 : 0);
                    cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingHistory, historyRT);   // Read
                    cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingFeedback, feedbackRT); // Write

                    hdCamera.volumetricHistoryIsValid = true;                                                                     // For the next frame...
                }

                int w = (int)resolution.x;
                int h = (int)resolution.y;

                // The shader defines GROUP_SIZE_1D = 8.
                cmd.DispatchCompute(m_VolumetricLightingCS, kernel, (w + 7) / 8, (h + 7) / 8, 1);
            }
        }
Пример #11
0
        public void VolumeVoxelizationPass(HDCamera hdCamera, CommandBuffer cmd, uint frameIndex, DensityVolumeList densityVolumes)
        {
            if (!hdCamera.frameSettings.enableVolumetrics)
            {
                return;
            }

            var visualEnvironment = VolumeManager.instance.stack.GetComponent <VisualEnvironment>();

            if (visualEnvironment.fogType.value != FogType.Volumetric)
            {
                return;
            }

            using (new ProfilingSample(cmd, "Volume Voxelization"))
            {
                int numVisibleVolumes = m_VisibleVolumeBounds.Count;

                bool highQuality     = preset == VolumetricLightingPreset.High;
                bool enableClustered = hdCamera.frameSettings.lightLoopSettings.enableTileAndCluster;

                int kernel;

                if (highQuality)
                {
                    kernel = m_VolumeVoxelizationCS.FindKernel(enableClustered ? "VolumeVoxelizationClusteredHQ"
                                                                               : "VolumeVoxelizationBruteforceHQ");
                }
                else
                {
                    kernel = m_VolumeVoxelizationCS.FindKernel(enableClustered ? "VolumeVoxelizationClusteredMQ"
                                                                               : "VolumeVoxelizationBruteforceMQ");
                }

                var currFrameParams = hdCamera.vBufferParams[0];
                var cvp             = currFrameParams.viewportSize;

                Vector4 resolution = new Vector4(cvp.x, cvp.y, 1.0f / cvp.x, 1.0f / cvp.y);
                float   vFoV       = hdCamera.camera.fieldOfView * Mathf.Deg2Rad;

                // Compose the matrix which allows us to compute the world space view direction.
                Matrix4x4 transform = HDUtils.ComputePixelCoordToWorldSpaceViewDirectionMatrix(vFoV, resolution, hdCamera.viewMatrix, false);

                Texture3D volumeAtlas           = DensityVolumeManager.manager.volumeAtlas.GetAtlas();
                Vector4   volumeAtlasDimensions = new Vector4(0.0f, 0.0f, 0.0f, 0.0f);

                if (volumeAtlas != null)
                {
                    volumeAtlasDimensions.x = (float)volumeAtlas.width / volumeAtlas.depth; // 1 / number of textures
                    volumeAtlasDimensions.y = volumeAtlas.width;
                    volumeAtlasDimensions.z = volumeAtlas.depth;
                    volumeAtlasDimensions.w = Mathf.Log(volumeAtlas.width, 2);              // Max LoD
                }
                else
                {
                    volumeAtlas = CoreUtils.blackVolumeTexture;
                }

                cmd.SetComputeTextureParam(m_VolumeVoxelizationCS, kernel, HDShaderIDs._VBufferDensity, m_DensityBufferHandle);
                cmd.SetComputeBufferParam(m_VolumeVoxelizationCS, kernel, HDShaderIDs._VolumeBounds, s_VisibleVolumeBoundsBuffer);
                cmd.SetComputeBufferParam(m_VolumeVoxelizationCS, kernel, HDShaderIDs._VolumeData, s_VisibleVolumeDataBuffer);
                cmd.SetComputeTextureParam(m_VolumeVoxelizationCS, kernel, HDShaderIDs._VolumeMaskAtlas, volumeAtlas);

                // TODO: set the constant buffer data only once.
                cmd.SetComputeMatrixParam(m_VolumeVoxelizationCS, HDShaderIDs._VBufferCoordToViewDirWS, transform);
                cmd.SetComputeIntParam(m_VolumeVoxelizationCS, HDShaderIDs._NumVisibleDensityVolumes, numVisibleVolumes);
                cmd.SetComputeVectorParam(m_VolumeVoxelizationCS, HDShaderIDs._VolumeMaskDimensions, volumeAtlasDimensions);

                int w = (int)resolution.x;
                int h = (int)resolution.y;

                // The shader defines GROUP_SIZE_1D = 8.
                cmd.DispatchCompute(m_VolumeVoxelizationCS, kernel, (w + 7) / 8, (h + 7) / 8, 1);
            }
        }
        public void VolumetricLightingPass(HDCamera camera, CommandBuffer cmd, FrameSettings settings, uint frameIndex)
        {
            if (preset == VolumetricLightingPreset.Off)
            {
                return;
            }

            var visualEnvironment = VolumeManager.instance.stack.GetComponent <VisualEnvironment>();

            if (visualEnvironment.fogType != FogType.Volumetric)
            {
                return;
            }

            VBuffer vBuffer = FindVBuffer(camera.GetViewID());

            if (vBuffer == null)
            {
                return;
            }

            using (new ProfilingSample(cmd, "Volumetric Lighting"))
            {
                // Only available in the Play Mode because all the frame counters in the Edit Mode are broken.
                bool enableClustered    = settings.lightLoopSettings.enableTileAndCluster;
                bool enableReprojection = Application.isPlaying && camera.camera.cameraType == CameraType.Game;

                int kernel;

                if (enableReprojection)
                {
                    kernel = m_VolumetricLightingCS.FindKernel(enableClustered ? "VolumetricLightingClusteredReproj"
                                                                           : "VolumetricLightingBruteforceReproj");
                }
                else
                {
                    kernel = m_VolumetricLightingCS.FindKernel(enableClustered ? "VolumetricLightingClustered"
                                                                           : "VolumetricLightingBruteforce");
                }

                var     frameParams = vBuffer.GetParameters(frameIndex);
                Vector4 resolution  = frameParams.resolution;
                float   vFoV        = camera.camera.fieldOfView * Mathf.Deg2Rad;
                // Compose the matrix which allows us to compute the world space view direction.
                Matrix4x4 transform = HDUtils.ComputePixelCoordToWorldSpaceViewDirectionMatrix(vFoV, resolution, camera.viewMatrix, false);

                Vector2[] xySeq = GetHexagonalClosePackedSpheres7();

                // This is a sequence of 7 equidistant numbers from 1/14 to 13/14.
                // Each of them is the centroid of the interval of length 2/14.
                // They've been rearranged in a sequence of pairs {small, large}, s.t. (small + large) = 1.
                // That way, the running average position is close to 0.5.
                // | 6 | 2 | 4 | 1 | 5 | 3 | 7 |
                // |   |   |   | o |   |   |   |
                // |   | o |   | x |   |   |   |
                // |   | x |   | x |   | o |   |
                // |   | x | o | x |   | x |   |
                // |   | x | x | x | o | x |   |
                // | o | x | x | x | x | x |   |
                // | x | x | x | x | x | x | o |
                // | x | x | x | x | x | x | x |
                float[] zSeq = { 7.0f / 14.0f, 3.0f / 14.0f, 11.0f / 14.0f, 5.0f / 14.0f, 9.0f / 14.0f, 1.0f / 14.0f, 13.0f / 14.0f };

                int sampleIndex = (int)frameIndex % 7;

                // TODO: should we somehow reorder offsets in Z based on the offset in XY? S.t. the samples more evenly cover the domain.
                // Currently, we assume that they are completely uncorrelated, but maybe we should correlate them somehow.
                Vector4 offset = new Vector4(xySeq[sampleIndex].x, xySeq[sampleIndex].y, zSeq[sampleIndex], frameIndex);

                // Get the interpolated asymmetry value.
                var fog = VolumeManager.instance.stack.GetComponent <VolumetricFog>();

                // TODO: set 'm_VolumetricLightingPreset'.
                // TODO: set the constant buffer data only once.
                cmd.SetComputeMatrixParam(m_VolumetricLightingCS, HDShaderIDs._VBufferCoordToViewDirWS, transform);
                cmd.SetComputeVectorParam(m_VolumetricLightingCS, HDShaderIDs._VBufferSampleOffset, offset);
                cmd.SetComputeFloatParam(m_VolumetricLightingCS, HDShaderIDs._CornetteShanksConstant, CornetteShanksPhasePartConstant(fog.asymmetry));
                cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferDensity, vBuffer.GetDensityBuffer());                   // Read
                cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingIntegral, vBuffer.GetLightingIntegralBuffer()); // Write
                if (enableReprojection)
                {
                    cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingFeedback, vBuffer.GetLightingFeedbackBuffer(frameIndex)); // Write
                    cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingHistory, vBuffer.GetLightingHistoryBuffer(frameIndex));   // Read
                }

                int w = (int)resolution.x;
                int h = (int)resolution.y;

                // The shader defines GROUP_SIZE_1D = 8.
                cmd.DispatchCompute(m_VolumetricLightingCS, kernel, (w + 7) / 8, (h + 7) / 8, 1);
            }
        }
Пример #13
0
        public void VolumetricLightingPass(HDCamera camera, CommandBuffer cmd, FrameSettings frameSettings)
        {
            if (preset == VolumetricLightingPreset.Off)
            {
                return;
            }

            using (new ProfilingSample(cmd, "Volumetric Lighting"))
            {
                VBuffer vBuffer = FindVBuffer(camera.GetViewID());
                Debug.Assert(vBuffer != null);

                if (HomogeneousFog.GetGlobalFogComponent() == null)
                {
                    // Clear the render target instead of running the shader.
                    // CoreUtils.SetRenderTarget(cmd, GetVBufferLightingIntegral(viewOffset), ClearFlag.Color, CoreUtils.clearColorAllBlack);
                    // return;

                    // Clearing 3D textures does not seem to work!
                    // Use the workaround by running the full shader with no volume.
                }

                bool enableClustered    = frameSettings.lightLoopSettings.enableTileAndCluster;
                bool enableReprojection = Application.isPlaying && camera.camera.cameraType == CameraType.Game;

                int kernel;

                if (enableReprojection)
                {
                    // Only available in the Play Mode because all the frame counters in the Edit Mode are broken.
                    kernel = m_VolumetricLightingCS.FindKernel(enableClustered ? "VolumetricLightingClusteredReproj"
                                                                           : "VolumetricLightingAllLightsReproj");
                }
                else
                {
                    kernel = m_VolumetricLightingCS.FindKernel(enableClustered ? "VolumetricLightingClustered"
                                                                           : "VolumetricLightingAllLights");
                }

                int     w = 0, h = 0, d = 0;
                Vector2 scale = ComputeVBufferResolutionAndScale(preset, (int)camera.screenSize.x, (int)camera.screenSize.y, ref w, ref h, ref d);
                float   vFoV  = camera.camera.fieldOfView * Mathf.Deg2Rad;

                // Compose the matrix which allows us to compute the world space view direction.
                // Compute it using the scaled resolution to account for the visible area of the VBuffer.
                Vector4   scaledRes = new Vector4(w * scale.x, h * scale.y, 1.0f / (w * scale.x), 1.0f / (h * scale.y));
                Matrix4x4 transform = HDUtils.ComputePixelCoordToWorldSpaceViewDirectionMatrix(vFoV, scaledRes, camera.viewMatrix, false);

                camera.SetupComputeShader(m_VolumetricLightingCS, cmd);

                Vector2[] xySeq = GetHexagonalClosePackedSpheres7();

                // This is a sequence of 7 equidistant numbers from 1/14 to 13/14.
                // Each of them is the centroid of the interval of length 2/14.
                // They've been rearranged in a sequence of pairs {small, large}, s.t. (small + large) = 1.
                // That way, the running average position is close to 0.5.
                // | 6 | 2 | 4 | 1 | 5 | 3 | 7 |
                // |   |   |   | o |   |   |   |
                // |   | o |   | x |   |   |   |
                // |   | x |   | x |   | o |   |
                // |   | x | o | x |   | x |   |
                // |   | x | x | x | o | x |   |
                // | o | x | x | x | x | x |   |
                // | x | x | x | x | x | x | o |
                // | x | x | x | x | x | x | x |
                float[] zSeq = { 7.0f / 14.0f, 3.0f / 14.0f, 11.0f / 14.0f, 5.0f / 14.0f, 9.0f / 14.0f, 1.0f / 14.0f, 13.0f / 14.0f };

                int     rfc         = Time.renderedFrameCount;
                int     sampleIndex = rfc % 7;
                Vector4 offset      = new Vector4(xySeq[sampleIndex].x, xySeq[sampleIndex].y, zSeq[sampleIndex], rfc);

                // TODO: set 'm_VolumetricLightingPreset'.
                cmd.SetComputeVectorParam(m_VolumetricLightingCS, HDShaderIDs._VBufferSampleOffset, offset);
                cmd.SetComputeMatrixParam(m_VolumetricLightingCS, HDShaderIDs._VBufferCoordToViewDirWS, transform);
                cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingIntegral, vBuffer.GetLightingIntegralBuffer()); // Write
                if (enableReprojection)
                {
                    cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingFeedback, vBuffer.GetLightingFeedbackBuffer()); // Write
                    cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingHistory, vBuffer.GetLightingHistoryBuffer());   // Read
                }

                // The shader defines GROUP_SIZE_1D = 16.
                cmd.DispatchCompute(m_VolumetricLightingCS, kernel, (w + 15) / 16, (h + 15) / 16, 1);
            }
        }
Пример #14
0
        public void VolumetricLightingPass(HDCamera camera, CommandBuffer cmd, FrameSettings settings)
        {
            if (preset == VolumetricLightingPreset.Off)
            {
                return;
            }

            using (new ProfilingSample(cmd, "Volumetric Lighting"))
            {
                var visualEnvironment = VolumeManager.instance.stack.GetComponent <VisualEnvironment>();

                if (visualEnvironment.fogType != FogType.Volumetric)
                {
                    // Clear the render target instead of running the shader.
                    // CoreUtils.SetRenderTarget(cmd, vBuffer.GetDensityBuffer(), ClearFlag.Color, CoreUtils.clearColorAllBlack);
                    // return;

                    // Clearing 3D textures does not seem to work!
                    // Use the workaround by running the full shader with 0 density.
                }

                VBuffer vBuffer = FindVBuffer(camera.GetViewID());
                Debug.Assert(vBuffer != null);

                // Only available in the Play Mode because all the frame counters in the Edit Mode are broken.
                bool enableClustered    = settings.lightLoopSettings.enableTileAndCluster;
                bool enableReprojection = Application.isPlaying && camera.camera.cameraType == CameraType.Game;

                int kernel;

                if (enableReprojection)
                {
                    kernel = m_VolumetricLightingCS.FindKernel(enableClustered ? "VolumetricLightingClusteredReproj"
                                                                           : "VolumetricLightingBruteforceReproj");
                }
                else
                {
                    kernel = m_VolumetricLightingCS.FindKernel(enableClustered ? "VolumetricLightingClustered"
                                                                           : "VolumetricLightingBruteforce");
                }

                int w = 0, h = 0, d = 0;
                vBuffer.GetResolution(ref w, ref h, ref d);

                // Compose the matrix which allows us to compute the world space view direction.
                float     vFoV       = camera.camera.fieldOfView * Mathf.Deg2Rad;
                Vector4   resolution = new Vector4(w, h, 1.0f / w, 1.0f / h);
                Matrix4x4 transform  = HDUtils.ComputePixelCoordToWorldSpaceViewDirectionMatrix(vFoV, resolution, camera.viewMatrix, false);

                Vector2[] xySeq = GetHexagonalClosePackedSpheres7();

                // This is a sequence of 7 equidistant numbers from 1/14 to 13/14.
                // Each of them is the centroid of the interval of length 2/14.
                // They've been rearranged in a sequence of pairs {small, large}, s.t. (small + large) = 1.
                // That way, the running average position is close to 0.5.
                // | 6 | 2 | 4 | 1 | 5 | 3 | 7 |
                // |   |   |   | o |   |   |   |
                // |   | o |   | x |   |   |   |
                // |   | x |   | x |   | o |   |
                // |   | x | o | x |   | x |   |
                // |   | x | x | x | o | x |   |
                // | o | x | x | x | x | x |   |
                // | x | x | x | x | x | x | o |
                // | x | x | x | x | x | x | x |
                float[] zSeq = { 7.0f / 14.0f, 3.0f / 14.0f, 11.0f / 14.0f, 5.0f / 14.0f, 9.0f / 14.0f, 1.0f / 14.0f, 13.0f / 14.0f };

                int     rfc         = Time.renderedFrameCount;
                int     sampleIndex = rfc % 7;
                Vector4 offset      = new Vector4(xySeq[sampleIndex].x, xySeq[sampleIndex].y, zSeq[sampleIndex], rfc);

                // Get the interpolated asymmetry value.
                var fog = VolumeManager.instance.stack.GetComponent <VolumetricFog>();

                // TODO: set 'm_VolumetricLightingPreset'.
                // TODO: set the constant buffer data only once.
                cmd.SetComputeMatrixParam(m_VolumetricLightingCS, HDShaderIDs._VBufferCoordToViewDirWS, transform);
                cmd.SetComputeVectorParam(m_VolumetricLightingCS, HDShaderIDs._VBufferSampleOffset, offset);
                cmd.SetComputeFloatParam(m_VolumetricLightingCS, HDShaderIDs._CornetteShanksConstant, CornetteShanksPhasePartConstant(fog.asymmetry));
                cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferDensity, vBuffer.GetDensityBuffer());                   // Read
                cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingIntegral, vBuffer.GetLightingIntegralBuffer()); // Write
                if (enableReprojection)
                {
                    cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingFeedback, vBuffer.GetLightingFeedbackBuffer()); // Write
                    cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingHistory, vBuffer.GetLightingHistoryBuffer());   // Read
                }

                // The shader defines GROUP_SIZE_1D = 8.
                cmd.DispatchCompute(m_VolumetricLightingCS, kernel, (w + 7) / 8, (h + 7) / 8, 1);
            }
        }
        public void VolumetricLightingPass(HDCamera hdCamera, CommandBuffer cmd, uint frameIndex)
        {
            if (!hdCamera.frameSettings.IsEnabled(FrameSettingsField.Volumetrics))
            {
                return;
            }

            var visualEnvironment = VolumeManager.instance.stack.GetComponent <VisualEnvironment>();

            if (visualEnvironment.fogType.value != FogType.Volumetric)
            {
                return;
            }

            using (new ProfilingSample(cmd, "Volumetric Lighting"))
            {
                // Get the interpolated anisotropy value.
                var fog = VolumeManager.instance.stack.GetComponent <VolumetricFog>();

                // Only available in the Play Mode because all the frame counters in the Edit Mode are broken.
                bool tiledLighting      = hdCamera.frameSettings.IsEnabled(FrameSettingsField.BigTilePrepass);
                bool enableReprojection = Application.isPlaying && hdCamera.camera.cameraType == CameraType.Game &&
                                          hdCamera.frameSettings.IsEnabled(FrameSettingsField.ReprojectionForVolumetrics);
                bool enableAnisotropy = fog.anisotropy.value != 0;
                bool highQuality      = preset == VolumetricLightingPreset.High;

                int kernel = (tiledLighting ? 1 : 0) | (enableReprojection ? 2 : 0) | (enableAnisotropy ? 4 : 0) | (highQuality ? 8 : 0);

                var currFrameParams = hdCamera.vBufferParams[0];
                var cvp             = currFrameParams.viewportSize;

                Vector4 resolution = new Vector4(cvp.x, cvp.y, 1.0f / cvp.x, 1.0f / cvp.y);
#if UNITY_2019_1_OR_NEWER
                var vFoV      = hdCamera.camera.GetGateFittedFieldOfView() * Mathf.Deg2Rad;
                var lensShift = hdCamera.camera.GetGateFittedLensShift();
#else
                var vFoV      = hdCamera.camera.fieldOfView * Mathf.Deg2Rad;
                var lensShift = Vector2.zero;
#endif
                // Compose the matrix which allows us to compute the world space view direction.
                Matrix4x4 transform = HDUtils.ComputePixelCoordToWorldSpaceViewDirectionMatrix(vFoV, lensShift, resolution, hdCamera.mainViewConstants.viewMatrix, false);

                // Compute texel spacing at the depth of 1 meter.
                float unitDepthTexelSpacing = HDUtils.ComputZPlaneTexelSpacing(1.0f, vFoV, resolution.y);

                GetHexagonalClosePackedSpheres7(m_xySeq);

                int sampleIndex = (int)frameIndex % 7;

                // TODO: should we somehow reorder offsets in Z based on the offset in XY? S.t. the samples more evenly cover the domain.
                // Currently, we assume that they are completely uncorrelated, but maybe we should correlate them somehow.
                m_xySeqOffset.Set(m_xySeq[sampleIndex].x, m_xySeq[sampleIndex].y, m_zSeq[sampleIndex], frameIndex);


                // TODO: set 'm_VolumetricLightingPreset'.
                // TODO: set the constant buffer data only once.
                cmd.SetComputeMatrixParam(m_VolumetricLightingCS, HDShaderIDs._VBufferCoordToViewDirWS, transform);
                cmd.SetComputeFloatParam(m_VolumetricLightingCS, HDShaderIDs._VBufferUnitDepthTexelSpacing, unitDepthTexelSpacing);
                cmd.SetComputeFloatParam(m_VolumetricLightingCS, HDShaderIDs._CornetteShanksConstant, CornetteShanksPhasePartConstant(fog.anisotropy.value));
                cmd.SetComputeVectorParam(m_VolumetricLightingCS, HDShaderIDs._VBufferSampleOffset, m_xySeqOffset);
                cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferDensity, m_DensityBufferHandle);                // Read
                cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingIntegral, m_LightingBufferHandle);      // Write

                if (enableReprojection)
                {
                    var historyRT  = hdCamera.GetPreviousFrameRT((int)HDCameraFrameHistoryType.VolumetricLighting);
                    var feedbackRT = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.VolumetricLighting);

                    cmd.SetComputeIntParam(m_VolumetricLightingCS, HDShaderIDs._VBufferLightingHistoryIsValid, hdCamera.volumetricHistoryIsValid ? 1 : 0);
                    cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingHistory, historyRT);   // Read
                    cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingFeedback, feedbackRT); // Write

                    hdCamera.volumetricHistoryIsValid = true;                                                                     // For the next frame...
                }

                int w = (int)resolution.x;
                int h = (int)resolution.y;

                // The shader defines GROUP_SIZE_1D = 8.
                cmd.DispatchCompute(m_VolumetricLightingCS, kernel, (w + 7) / 8, (h + 7) / 8, hdCamera.computePassCount);
            }
        }
        public void VolumeVoxelizationPass(DensityVolumeList densityVolumes, HDCamera camera, CommandBuffer cmd, FrameSettings settings, uint frameIndex)
        {
            if (preset == VolumetricLightingPreset.Off)
            {
                return;
            }

            var visualEnvironment = VolumeManager.instance.stack.GetComponent <VisualEnvironment>();

            if (visualEnvironment.fogType != FogType.Volumetric)
            {
                return;
            }

            VBuffer vBuffer = FindVBuffer(camera.GetViewID());

            if (vBuffer == null)
            {
                return;
            }

            using (new ProfilingSample(cmd, "Volume Voxelization"))
            {
                int numVisibleVolumes = m_VisibleVolumeBounds.Count;

                if (numVisibleVolumes == 0)
                {
                    // Clear the render target instead of running the shader.
                    // Note: the clear must take the global fog into account!
                    // CoreUtils.SetRenderTarget(cmd, vBuffer.GetDensityBuffer(), ClearFlag.Color, CoreUtils.clearColorAllBlack);
                    // return;

                    // Clearing 3D textures does not seem to work!
                    // Use the workaround by running the full shader with 0 density
                }

                bool enableClustered = settings.lightLoopSettings.enableTileAndCluster;

                int kernel = m_VolumeVoxelizationCS.FindKernel(enableClustered ? "VolumeVoxelizationClustered"
                                                                           : "VolumeVoxelizationBruteforce");

                var     frameParams = vBuffer.GetParameters(frameIndex);
                Vector4 resolution  = frameParams.resolution;
                float   vFoV        = camera.camera.fieldOfView * Mathf.Deg2Rad;

                // Compose the matrix which allows us to compute the world space view direction.
                Matrix4x4 transform = HDUtils.ComputePixelCoordToWorldSpaceViewDirectionMatrix(vFoV, resolution, camera.viewMatrix, false);

                Texture3D volumeAtlas           = DensityVolumeManager.manager.volumeAtlas.volumeAtlas;
                Vector2   volumeAtlasDimensions = new Vector2(0.0f, 0.0f);

                if (volumeAtlas != null)
                {
                    volumeAtlasDimensions.x = volumeAtlas.width / volumeAtlas.depth; // 1 / number of textures
                    volumeAtlasDimensions.y = 1.0f / volumeAtlas.width;
                }

                cmd.SetComputeTextureParam(m_VolumeVoxelizationCS, kernel, HDShaderIDs._VBufferDensity, vBuffer.GetDensityBuffer());
                cmd.SetComputeBufferParam(m_VolumeVoxelizationCS, kernel, HDShaderIDs._VolumeBounds, s_VisibleVolumeBoundsBuffer);
                cmd.SetComputeBufferParam(m_VolumeVoxelizationCS, kernel, HDShaderIDs._VolumeData, s_VisibleVolumeDataBuffer);
                cmd.SetComputeTextureParam(m_VolumeVoxelizationCS, kernel, HDShaderIDs._VolumeMaskAtlas, volumeAtlas);

                // TODO: set the constant buffer data only once.
                cmd.SetComputeMatrixParam(m_VolumeVoxelizationCS, HDShaderIDs._VBufferCoordToViewDirWS, transform);
                cmd.SetComputeIntParam(m_VolumeVoxelizationCS, HDShaderIDs._NumVisibleDensityVolumes, numVisibleVolumes);
                cmd.SetComputeVectorParam(m_VolumeVoxelizationCS, HDShaderIDs._VolumeMaskDimensions, volumeAtlasDimensions);

                int w = (int)resolution.x;
                int h = (int)resolution.y;

                // The shader defines GROUP_SIZE_1D = 8.
                cmd.DispatchCompute(m_VolumeVoxelizationCS, kernel, (w + 7) / 8, (h + 7) / 8, 1);
            }
        }
        public void VolumetricLightingPass(HDCamera hdCamera, CommandBuffer cmd, uint frameIndex)
        {
            if (!hdCamera.frameSettings.enableVolumetrics)
            {
                return;
            }

            var visualEnvironment = VolumeManager.instance.stack.GetComponent <VisualEnvironment>();

            if (visualEnvironment.fogType.value != FogType.Volumetric)
            {
                return;
            }

            using (new ProfilingSample(cmd, "Volumetric Lighting"))
            {
                // Only available in the Play Mode because all the frame counters in the Edit Mode are broken.
                bool highQuality        = preset == VolumetricLightingPreset.High;
                bool enableClustered    = hdCamera.frameSettings.lightLoopSettings.enableTileAndCluster;
                bool enableReprojection = Application.isPlaying && hdCamera.camera.cameraType == CameraType.Game;

                int kernel;

                if (highQuality)
                {
                    if (enableReprojection)
                    {
                        kernel = m_VolumetricLightingCS.FindKernel(enableClustered ? "VolumetricLightingClusteredReprojHQ"
                                                                                   : "VolumetricLightingBruteforceReprojHQ");
                    }
                    else
                    {
                        kernel = m_VolumetricLightingCS.FindKernel(enableClustered ? "VolumetricLightingClusteredHQ"
                                                                                   : "VolumetricLightingBruteforceHQ");
                    }
                }
                else
                {
                    if (enableReprojection)
                    {
                        kernel = m_VolumetricLightingCS.FindKernel(enableClustered ? "VolumetricLightingClusteredReprojMQ"
                                                                                   : "VolumetricLightingBruteforceReprojMQ");
                    }
                    else
                    {
                        kernel = m_VolumetricLightingCS.FindKernel(enableClustered ? "VolumetricLightingClusteredMQ"
                                                                                   : "VolumetricLightingBruteforceMQ");
                    }
                }

                var     frameParams = hdCamera.vBufferParams[0];
                Vector4 resolution  = frameParams.resolution;
                float   vFoV        = hdCamera.camera.fieldOfView * Mathf.Deg2Rad;
                // Compose the matrix which allows us to compute the world space view direction.
                Matrix4x4 transform = HDUtils.ComputePixelCoordToWorldSpaceViewDirectionMatrix(vFoV, resolution, hdCamera.viewMatrix, false);

                Vector2[] xySeq = GetHexagonalClosePackedSpheres7();

                // This is a sequence of 7 equidistant numbers from 1/14 to 13/14.
                // Each of them is the centroid of the interval of length 2/14.
                // They've been rearranged in a sequence of pairs {small, large}, s.t. (small + large) = 1.
                // That way, the running average position is close to 0.5.
                // | 6 | 2 | 4 | 1 | 5 | 3 | 7 |
                // |   |   |   | o |   |   |   |
                // |   | o |   | x |   |   |   |
                // |   | x |   | x |   | o |   |
                // |   | x | o | x |   | x |   |
                // |   | x | x | x | o | x |   |
                // | o | x | x | x | x | x |   |
                // | x | x | x | x | x | x | o |
                // | x | x | x | x | x | x | x |
                float[] zSeq = { 7.0f / 14.0f, 3.0f / 14.0f, 11.0f / 14.0f, 5.0f / 14.0f, 9.0f / 14.0f, 1.0f / 14.0f, 13.0f / 14.0f };

                int sampleIndex = (int)frameIndex % 7;

                // TODO: should we somehow reorder offsets in Z based on the offset in XY? S.t. the samples more evenly cover the domain.
                // Currently, we assume that they are completely uncorrelated, but maybe we should correlate them somehow.
                Vector4 offset = new Vector4(xySeq[sampleIndex].x, xySeq[sampleIndex].y, zSeq[sampleIndex], frameIndex);

                // Get the interpolated anisotropy value.
                var fog = VolumeManager.instance.stack.GetComponent <VolumetricFog>();

                // TODO: set 'm_VolumetricLightingPreset'.
                // TODO: set the constant buffer data only once.
                cmd.SetComputeMatrixParam(m_VolumetricLightingCS, HDShaderIDs._VBufferCoordToViewDirWS, transform);
                cmd.SetComputeVectorParam(m_VolumetricLightingCS, HDShaderIDs._VBufferSampleOffset, offset);
                cmd.SetComputeFloatParam(m_VolumetricLightingCS, HDShaderIDs._CornetteShanksConstant, CornetteShanksPhasePartConstant(fog.anisotropy));
                cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferDensity, m_DensityBufferHandle);           // Read
                cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingIntegral, m_LightingBufferHandle); // Write
                if (enableReprojection)
                {
                    var historyRT  = hdCamera.GetPreviousFrameRT((int)HDCameraFrameHistoryType.VolumetricLighting);
                    var feedbackRT = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.VolumetricLighting);

                    // Detect if the history buffer has been recreated or resized.
                    Vector3Int currentResolutionOfHistoryBuffer = new Vector3Int();
                    currentResolutionOfHistoryBuffer.x = historyRT.rt.width;
                    currentResolutionOfHistoryBuffer.y = historyRT.rt.height;
                    currentResolutionOfHistoryBuffer.z = historyRT.rt.volumeDepth;

                    // We allow downsizing, as this does not cause a reallocation.
                    bool validHistory = (currentResolutionOfHistoryBuffer.x <= m_PreviousResolutionOfHistoryBuffer.x &&
                                         currentResolutionOfHistoryBuffer.y <= m_PreviousResolutionOfHistoryBuffer.y &&
                                         currentResolutionOfHistoryBuffer.z <= m_PreviousResolutionOfHistoryBuffer.z);

                    cmd.SetComputeIntParam(m_VolumetricLightingCS, HDShaderIDs._VBufferLightingHistoryIsValid, validHistory ? 1 : 0);
                    cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingHistory, historyRT);         // Read
                    cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingFeedback, feedbackRT);       // Write

                    m_PreviousResolutionOfHistoryBuffer = currentResolutionOfHistoryBuffer;
                }

                int w = (int)resolution.x;
                int h = (int)resolution.y;

                // The shader defines GROUP_SIZE_1D = 8.
                cmd.DispatchCompute(m_VolumetricLightingCS, kernel, (w + 7) / 8, (h + 7) / 8, 1);
            }
        }