static internal void ReinitializeVolumetricBufferParams(HDCamera hdCamera)
        {
            if (!Fog.IsVolumetricFogEnabled(hdCamera))
            {
                return;
            }

            bool fog  = Fog.IsVolumetricFogEnabled(hdCamera);
            bool init = hdCamera.vBufferParams != null;

            if (fog ^ init)
            {
                if (init)
                {
                    // Deinitialize.
                    hdCamera.vBufferParams = null;
                }
                else
                {
                    // Initialize.
                    // Start with the same parameters for both frames. Then update them one by one every frame.
                    var parameters = ComputeVolumetricBufferParameters(hdCamera);
                    hdCamera.vBufferParams    = new VBufferParameters[2];
                    hdCamera.vBufferParams[0] = parameters;
                    hdCamera.vBufferParams[1] = parameters;
                }
            }
        }
        // This function relies on being called once per camera per frame.
        // The results are undefined otherwise.
        static internal void UpdateVolumetricBufferParams(HDCamera hdCamera)
        {
            if (!Fog.IsVolumetricFogEnabled(hdCamera))
            {
                return;
            }

            Debug.Assert(hdCamera.vBufferParams != null);
            Debug.Assert(hdCamera.vBufferParams.Length == 2);

            var currentParams = ComputeVolumetricBufferParameters(hdCamera);

            int frameIndex = (int)VolumetricFrameIndex(hdCamera);
            var currIdx    = (frameIndex + 0) & 1;
            var prevIdx    = (frameIndex + 1) & 1;

            hdCamera.vBufferParams[currIdx] = currentParams;

            // Handle case of first frame. When we are on the first frame, we reuse the value of original frame.
            if (hdCamera.vBufferParams[prevIdx].viewportSize.x == 0.0f && hdCamera.vBufferParams[prevIdx].viewportSize.y == 0.0f)
            {
                hdCamera.vBufferParams[prevIdx] = currentParams;
            }

            // Update size used to create volumetric buffers.
            s_CurrentVolumetricBufferSize = new Vector3Int(Math.Max(s_CurrentVolumetricBufferSize.x, currentParams.viewportSize.x),
                                                           Math.Max(s_CurrentVolumetricBufferSize.y, currentParams.viewportSize.y),
                                                           Math.Max(s_CurrentVolumetricBufferSize.z, currentParams.viewportSize.z));
        }
示例#3
0
        void UpdateShaderVariablesGlobalVolumetrics(ref ShaderVariablesGlobal cb, HDCamera hdCamera)
        {
            if (!Fog.IsVolumetricFogEnabled(hdCamera))
            {
                return;
            }

            // Get the interpolated anisotropy value.
            var fog        = hdCamera.volumeStack.GetComponent <Fog>();
            int frameIndex = m_FrameCount;
            int currIdx    = (frameIndex + 0) & 1;

            var currParams = hdCamera.vBufferParams[currIdx];

            // The lighting & density buffers are shared by all cameras.
            // The history & feedback buffers are specific to the camera.
            // These 2 types of buffers can have different sizes.
            // Additionally, history buffers can have different sizes, since they are not resized at the same time.
            var cvp = currParams.viewportSize;

            // Adjust slices for XR rendering: VBuffer is shared for all single-pass views
            uint sliceCount = (uint)(cvp.z / hdCamera.viewCount);

            cb._VBufferViewportSize           = new Vector4(cvp.x, cvp.y, 1.0f / cvp.x, 1.0f / cvp.y);
            cb._VBufferSliceCount             = sliceCount;
            cb._VBufferRcpSliceCount          = 1.0f / sliceCount;
            cb._VBufferLightingViewportScale  = currParams.ComputeViewportScale(m_CurrentVolumetricBufferSize);
            cb._VBufferLightingViewportLimit  = currParams.ComputeViewportLimit(m_CurrentVolumetricBufferSize);
            cb._VBufferDistanceEncodingParams = currParams.depthEncodingParams;
            cb._VBufferDistanceDecodingParams = currParams.depthDecodingParams;
            cb._VBufferLastSliceDist          = currParams.ComputeLastSliceDistance(sliceCount);
            cb._VBufferRcpInstancedViewCount  = 1.0f / hdCamera.viewCount;
        }
示例#4
0
        // Must be called AFTER UpdateVolumetricBufferParams.
        internal void ResizeVolumetricLightingBuffers(HDCamera hdCamera, int frameIndex)
        {
            if (!Fog.IsVolumetricFogEnabled(hdCamera))
            {
                return;
            }

            Debug.Assert(hdCamera.vBufferParams != null);

            // Render texture contents can become "lost" on certain events, like loading a new level,
            // system going to a screensaver mode, in and out of fullscreen and so on.
            // https://docs.unity3d.com/ScriptReference/RenderTexture.html
            if (m_DensityBuffer == null || m_LightingBuffer == null)
            {
                DestroyVolumetricLightingBuffers();
                CreateVolumetricLightingBuffers();
            }

            var currIdx = (frameIndex + 0) & 1;
            var prevIdx = (frameIndex + 1) & 1;

            var currentParams = hdCamera.vBufferParams[currIdx];

            ResizeVolumetricBuffer(ref m_DensityBuffer, "VBufferDensity", currentParams.viewportSize.x,
                                   currentParams.viewportSize.y,
                                   currentParams.viewportSize.z);
            ResizeVolumetricBuffer(ref m_LightingBuffer, "VBufferLighting", currentParams.viewportSize.x,
                                   currentParams.viewportSize.y,
                                   currentParams.viewportSize.z);

            // TODO RENDERGRAPH: For now those texture are not handled by render graph.
            // When they are we won't have the m_DensityBuffer handy for getting the current size in UpdateShaderVariablesGlobalVolumetrics
            // So we store the size here and in time we'll fill this vector differently.
            m_CurrentVolumetricBufferSize = new Vector3Int(m_DensityBuffer.rt.width, m_DensityBuffer.rt.height, m_DensityBuffer.rt.volumeDepth);
        }
示例#5
0
        // Must be called AFTER UpdateVolumetricBufferParams.
        internal void ResizeVolumetricLightingBuffers(HDCamera hdCamera, int frameIndex)
        {
            if (!Fog.IsVolumetricFogEnabled(hdCamera))
            {
                return;
            }

            Debug.Assert(hdCamera.vBufferParams != null);

            // Render texture contents can become "lost" on certain events, like loading a new level,
            // system going to a screensaver mode, in and out of fullscreen and so on.
            // https://docs.unity3d.com/ScriptReference/RenderTexture.html
            if (m_DensityBuffer == null || m_LightingBuffer == null)
            {
                DestroyVolumetricLightingBuffers();
                CreateVolumetricLightingBuffers();
            }

            var currIdx = (frameIndex + 0) & 1;
            var prevIdx = (frameIndex + 1) & 1;

            var currentParams = hdCamera.vBufferParams[currIdx];

            ResizeVolumetricBuffer(ref m_DensityBuffer, "VBufferDensity", currentParams.viewportSize.x,
                                   currentParams.viewportSize.y,
                                   currentParams.viewportSize.z);
            ResizeVolumetricBuffer(ref m_LightingBuffer, "VBufferLighting", currentParams.viewportSize.x,
                                   currentParams.viewportSize.y,
                                   currentParams.viewportSize.z);
        }
示例#6
0
        DensityVolumeList PrepareVisibleDensityVolumeList(HDCamera hdCamera, CommandBuffer cmd, float time)
        {
            DensityVolumeList densityVolumes = new DensityVolumeList();

            if (!Fog.IsVolumetricFogEnabled(hdCamera))
            {
                return(densityVolumes);
            }

            using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.PrepareVisibleDensityVolumeList)))
            {
                Vector3 camPosition = hdCamera.camera.transform.position;
                Vector3 camOffset   = Vector3.zero;// World-origin-relative

                if (ShaderConfig.s_CameraRelativeRendering != 0)
                {
                    camOffset = camPosition; // Camera-relative
                }

                m_VisibleVolumeBounds.Clear();
                m_VisibleVolumeData.Clear();

                // Collect all visible finite volume data, and upload it to the GPU.
                var volumes = DensityVolumeManager.manager.PrepareDensityVolumeData(cmd, hdCamera, time);

                for (int i = 0; i < Math.Min(volumes.Count, k_MaxVisibleVolumeCount); i++)
                {
                    DensityVolume volume = volumes[i];

                    // TODO: cache these?
                    var obb = new OrientedBBox(Matrix4x4.TRS(volume.transform.position, volume.transform.rotation, volume.parameters.size));

                    // Handle camera-relative rendering.
                    obb.center -= camOffset;

                    // Frustum cull on the CPU for now. TODO: do it on the GPU.
                    // TODO: account for custom near and far planes of the V-Buffer's frustum.
                    // It's typically much shorter (along the Z axis) than the camera's frustum.
                    if (GeometryUtils.Overlap(obb, hdCamera.frustum, 6, 8))
                    {
                        // TODO: cache these?
                        var data = volume.parameters.ConvertToEngineData();

                        m_VisibleVolumeBounds.Add(obb);
                        m_VisibleVolumeData.Add(data);
                    }
                }

                m_VisibleVolumeBoundsBuffer.SetData(m_VisibleVolumeBounds);
                m_VisibleVolumeDataBuffer.SetData(m_VisibleVolumeData);

                // Fill the struct with pointers in order to share the data with the light loop.
                densityVolumes.bounds  = m_VisibleVolumeBounds;
                densityVolumes.density = m_VisibleVolumeData;

                return(densityVolumes);
            }
        }
示例#7
0
        // This function relies on being called once per camera per frame.
        // The results are undefined otherwise.
        internal void UpdateVolumetricBufferParams(HDCamera hdCamera)
        {
            if (!Fog.IsVolumetricFogEnabled(hdCamera))
            {
                return;
            }

            var parameters = ComputeVBufferParameters(hdCamera);

            // Double-buffer. I assume the cost of copying is negligible (don't want to use the frame index).
            // Handle case of first frame. When we are on the first frame, we reuse the value of original frame.
            if (hdCamera.vBufferParams[0].viewportSize.x == 0.0f && hdCamera.vBufferParams[0].viewportSize.y == 0.0f)
            {
                hdCamera.vBufferParams[1] = parameters;
            }
            else
            {
                hdCamera.vBufferParams[1] = hdCamera.vBufferParams[0];
            }
            hdCamera.vBufferParams[0] = parameters;
        }
示例#8
0
        // This function relies on being called once per camera per frame.
        // The results are undefined otherwise.
        static internal void UpdateVolumetricBufferParams(HDCamera hdCamera, int frameIndex)
        {
            if (!Fog.IsVolumetricFogEnabled(hdCamera))
            {
                return;
            }

            Debug.Assert(hdCamera.vBufferParams != null);
            Debug.Assert(hdCamera.vBufferParams.Length == 2);

            var currentParams = ComputeVolumetricBufferParameters(hdCamera);

            var currIdx = (frameIndex + 0) & 1;
            var prevIdx = (frameIndex + 1) & 1;

            hdCamera.vBufferParams[currIdx] = currentParams;

            // Handle case of first frame. When we are on the first frame, we reuse the value of original frame.
            if (hdCamera.vBufferParams[prevIdx].viewportSize.x == 0.0f && hdCamera.vBufferParams[prevIdx].viewportSize.y == 0.0f)
            {
                hdCamera.vBufferParams[prevIdx] = currentParams;
            }
        }
示例#9
0
        static internal void CreateVolumetricHistoryBuffers(HDCamera hdCamera, int bufferCount)
        {
            if (!Fog.IsVolumetricFogEnabled(hdCamera))
            {
                return;
            }

            Debug.Assert(hdCamera.volumetricHistoryBuffers == null);

            hdCamera.volumetricHistoryBuffers = new RTHandle[bufferCount];

            // Allocation happens early in the frame. So we shouldn't rely on 'hdCamera.vBufferParams'.
            // Allocate the smallest possible 3D texture.
            // We will perform rescaling manually, in a custom manner, based on volume parameters.
            const int minSize = 4;

            for (int i = 0; i < bufferCount; i++)
            {
                hdCamera.volumetricHistoryBuffers[i] = RTHandles.Alloc(minSize, minSize, minSize, colorFormat: GraphicsFormat.R16G16B16A16_SFloat, // 8888_sRGB is not precise enough
                                                                       dimension: TextureDimension.Tex3D, enableRandomWrite: true, name: string.Format("VBufferHistory{0}", i));
            }

            hdCamera.volumetricHistoryIsValid = false;
        }
示例#10
0
        void PushVolumetricLightingGlobalParams(HDCamera hdCamera, CommandBuffer cmd, int frameIndex)
        {
            if (!Fog.IsVolumetricFogEnabled(hdCamera))
            {
                cmd.SetGlobalTexture(HDShaderIDs._VBufferLighting, HDUtils.clearTexture3D);
                return;
            }

            // Get the interpolated anisotropy value.
            var fog = hdCamera.volumeStack.GetComponent <Fog>();

            SetPreconvolvedAmbientLightProbe(hdCamera, cmd, fog.globalLightProbeDimmer.value, fog.anisotropy.value);

            var currFrameParams = hdCamera.vBufferParams[0];
            var prevFrameParams = hdCamera.vBufferParams[1];

            // The lighting & density buffers are shared by all cameras.
            // The history & feedback buffers are specific to the camera.
            // These 2 types of buffers can have different sizes.
            // Additionally, history buffers can have different sizes, since they are not resized at the same time
            // (every frame, we swap the buffers, and resize the feedback buffer but not the history buffer).
            // The viewport size is the same for all of these buffers.
            // All of these buffers may have sub-native-resolution viewports.
            // The 3rd dimension (number of slices) is the same for all of these buffers.
            Vector2Int sharedBufferSize = new Vector2Int(m_LightingBufferHandle.rt.width, m_LightingBufferHandle.rt.height);

            Debug.Assert(m_LightingBufferHandle.rt.width == m_DensityBufferHandle.rt.width);
            Debug.Assert(m_LightingBufferHandle.rt.height == m_DensityBufferHandle.rt.height);

            Vector2Int historyBufferSize = Vector2Int.zero;

            if (hdCamera.IsVolumetricReprojectionEnabled())
            {
                var historyRT = hdCamera.GetPreviousFrameRT((int)HDCameraFrameHistoryType.VolumetricLighting);

                historyBufferSize = new Vector2Int(historyRT.rt.width, historyRT.rt.height);

                // Handle case of first frame. When we are on the first frame, we reuse the value of original frame.
                if (historyBufferSize.x == 0.0f && historyBufferSize.y == 0.0f)
                {
                    historyBufferSize = sharedBufferSize;
                }
            }

            var cvp = currFrameParams.viewportSize;
            var pvp = prevFrameParams.viewportSize;

            // Adjust slices for XR rendering: VBuffer is shared for all single-pass views
            int sliceCount = cvp.z / hdCamera.viewCount;

            cmd.SetGlobalVector(HDShaderIDs._VBufferViewportSize, new Vector4(cvp.x, cvp.y, 1.0f / cvp.x, 1.0f / cvp.y));
            cmd.SetGlobalInt(HDShaderIDs._VBufferSliceCount, sliceCount);
            cmd.SetGlobalFloat(HDShaderIDs._VBufferRcpSliceCount, 1.0f / sliceCount);
            cmd.SetGlobalVector(HDShaderIDs._VBufferSharedUvScaleAndLimit, currFrameParams.ComputeUvScaleAndLimit(sharedBufferSize));
            cmd.SetGlobalVector(HDShaderIDs._VBufferDistanceEncodingParams, currFrameParams.depthEncodingParams);
            cmd.SetGlobalVector(HDShaderIDs._VBufferDistanceDecodingParams, currFrameParams.depthDecodingParams);
            cmd.SetGlobalFloat(HDShaderIDs._VBufferLastSliceDist, currFrameParams.ComputeLastSliceDistance(sliceCount));
            cmd.SetGlobalFloat(HDShaderIDs._VBufferRcpInstancedViewCount, 1.0f / hdCamera.viewCount);

            cmd.SetGlobalVector(HDShaderIDs._VBufferPrevViewportSize, new Vector4(pvp.x, pvp.y, 1.0f / pvp.x, 1.0f / pvp.y));
            cmd.SetGlobalVector(HDShaderIDs._VBufferHistoryPrevUvScaleAndLimit, prevFrameParams.ComputeUvScaleAndLimit(historyBufferSize));
            cmd.SetGlobalVector(HDShaderIDs._VBufferPrevDepthEncodingParams, prevFrameParams.depthEncodingParams);
            cmd.SetGlobalVector(HDShaderIDs._VBufferPrevDepthDecodingParams, prevFrameParams.depthDecodingParams);

            cmd.SetGlobalTexture(HDShaderIDs._VBufferLighting, m_LightingBufferHandle);
        }