예제 #1
0
        // This function relies on being called once per camera per frame.
        // The results are undefined otherwise.
        internal void UpdateVolumetricBufferParams(HDCamera hdCamera)
        {
            if (!Fog.IsVolumetricFogEnabled(hdCamera))
            {
                return;
            }

            var parameters = ComputeVBufferParameters(hdCamera);

            // Double-buffer. I assume the cost of copying is negligible (don't want to use the frame index).
            // Handle case of first frame. When we are on the first frame, we reuse the value of original frame.
            if (hdCamera.vBufferParams[0].viewportSize.x == 0.0f && hdCamera.vBufferParams[0].viewportSize.y == 0.0f)
            {
                hdCamera.vBufferParams[1] = parameters;
            }
            else
            {
                hdCamera.vBufferParams[1] = hdCamera.vBufferParams[0];
            }
            hdCamera.vBufferParams[0] = parameters;
        }
예제 #2
0
        public void RenderOpaqueAtmosphericScattering(CommandBuffer cmd, HDCamera hdCamera,
                                                      RTHandle colorBuffer,
                                                      RTHandle volumetricLighting,
                                                      RTHandle intermediateBuffer,
                                                      RTHandle depthBuffer,
                                                      Matrix4x4 pixelCoordToViewDirWS, bool isMSAA)
        {
            using (new ProfilingSample(cmd, "Opaque Atmospheric Scattering"))
            {
                m_OpaqueAtmScatteringBlock.SetMatrix(HDShaderIDs._PixelCoordToViewDirWS, pixelCoordToViewDirWS);
                if (isMSAA)
                {
                    m_OpaqueAtmScatteringBlock.SetTexture(HDShaderIDs._ColorTextureMS, colorBuffer);
                }
                else
                {
                    m_OpaqueAtmScatteringBlock.SetTexture(HDShaderIDs._ColorTexture, colorBuffer);
                }
                // The texture can be null when volumetrics are disabled.
                if (volumetricLighting != null)
                {
                    m_OpaqueAtmScatteringBlock.SetTexture(HDShaderIDs._VBufferLighting, volumetricLighting);
                }

                if (Fog.IsPBRFogEnabled(hdCamera))
                {
                    // Color -> Intermediate.
                    HDUtils.DrawFullScreen(cmd, m_OpaqueAtmScatteringMaterial, intermediateBuffer, depthBuffer, m_OpaqueAtmScatteringBlock, isMSAA ? 3 : 2);
                    // Intermediate -> Color.
                    // Note: Blit does not support MSAA (and is probably slower).
                    cmd.CopyTexture(intermediateBuffer, colorBuffer);
                }
                else
                {
                    HDUtils.DrawFullScreen(cmd, m_OpaqueAtmScatteringMaterial, colorBuffer, depthBuffer, m_OpaqueAtmScatteringBlock, isMSAA ? 1 : 0);
                }
            }
        }
        void PushVolumetricLightingGlobalParams(HDCamera hdCamera, CommandBuffer cmd, int frameIndex)
        {
            if (!Fog.IsVolumetricLightingEnabled(hdCamera))
            {
                cmd.SetGlobalTexture(HDShaderIDs._VBufferLighting, HDUtils.clearTexture3D);
                return;
            }

            // Get the interpolated anisotropy value.
            var fog = VolumeManager.instance.stack.GetComponent <Fog>();

            SetPreconvolvedAmbientLightProbe(cmd, fog.globalLightProbeDimmer.value, fog.anisotropy.value);

            var currFrameParams = hdCamera.vBufferParams[0];
            var prevFrameParams = hdCamera.vBufferParams[1];

            // All buffers are of the same size, and are resized at the same time, at the beginning of the frame.
            Vector2Int bufferSize = new Vector2Int(m_LightingBufferHandle.rt.width, m_LightingBufferHandle.rt.height);

            var cvp = currFrameParams.viewportSize;
            var pvp = prevFrameParams.viewportSize;

            cmd.SetGlobalVector(HDShaderIDs._VBufferResolution, new Vector4(cvp.x, cvp.y, 1.0f / cvp.x, 1.0f / cvp.y));
            cmd.SetGlobalInt(HDShaderIDs._VBufferSliceCount, cvp.z);
            cmd.SetGlobalFloat(HDShaderIDs._VBufferRcpSliceCount, 1.0f / cvp.z);
            cmd.SetGlobalVector(HDShaderIDs._VBufferUvScaleAndLimit, currFrameParams.ComputeUvScaleAndLimit(bufferSize));
            cmd.SetGlobalVector(HDShaderIDs._VBufferDistanceEncodingParams, currFrameParams.depthEncodingParams);
            cmd.SetGlobalVector(HDShaderIDs._VBufferDistanceDecodingParams, currFrameParams.depthDecodingParams);
            cmd.SetGlobalFloat(HDShaderIDs._VBufferLastSliceDist, currFrameParams.ComputeLastSliceDistance());
            cmd.SetGlobalFloat(HDShaderIDs._VBufferRcpInstancedViewCount, 1.0f / hdCamera.viewCount);

            cmd.SetGlobalVector(HDShaderIDs._VBufferPrevResolution, new Vector4(pvp.x, pvp.y, 1.0f / pvp.x, 1.0f / pvp.y));
            cmd.SetGlobalVector(HDShaderIDs._VBufferPrevUvScaleAndLimit, prevFrameParams.ComputeUvScaleAndLimit(bufferSize));
            cmd.SetGlobalVector(HDShaderIDs._VBufferPrevDepthEncodingParams, prevFrameParams.depthEncodingParams);
            cmd.SetGlobalVector(HDShaderIDs._VBufferPrevDepthDecodingParams, prevFrameParams.depthDecodingParams);

            cmd.SetGlobalTexture(HDShaderIDs._VBufferLighting, m_LightingBufferHandle);
        }
예제 #4
0
        internal void ReinitializeVolumetricBufferParams(HDCamera hdCamera)
        {
            bool fog  = Fog.IsVolumetricFogEnabled(hdCamera);
            bool init = hdCamera.vBufferParams != null;

            if (fog ^ init)
            {
                if (init)
                {
                    // Deinitialize.
                    hdCamera.vBufferParams = null;
                }
                else
                {
                    // Initialize.
                    // Start with the same parameters for both frames. Then update them one by one every frame.
                    var parameters = ComputeVBufferParameters(hdCamera);
                    hdCamera.vBufferParams    = new VBufferParameters[2];
                    hdCamera.vBufferParams[0] = parameters;
                    hdCamera.vBufferParams[1] = parameters;
                }
            }
        }
예제 #5
0
        // This function relies on being called once per camera per frame.
        // The results are undefined otherwise.
        static internal void UpdateVolumetricBufferParams(HDCamera hdCamera, int frameIndex)
        {
            if (!Fog.IsVolumetricFogEnabled(hdCamera))
            {
                return;
            }

            Debug.Assert(hdCamera.vBufferParams != null);
            Debug.Assert(hdCamera.vBufferParams.Length == 2);

            var currentParams = ComputeVolumetricBufferParameters(hdCamera);

            var currIdx = (frameIndex + 0) & 1;
            var prevIdx = (frameIndex + 1) & 1;

            hdCamera.vBufferParams[currIdx] = currentParams;

            // Handle case of first frame. When we are on the first frame, we reuse the value of original frame.
            if (hdCamera.vBufferParams[prevIdx].viewportSize.x == 0.0f && hdCamera.vBufferParams[prevIdx].viewportSize.y == 0.0f)
            {
                hdCamera.vBufferParams[prevIdx] = currentParams;
            }
        }
예제 #6
0
        static internal void CreateVolumetricHistoryBuffers(HDCamera hdCamera, int bufferCount)
        {
            if (!Fog.IsVolumetricFogEnabled(hdCamera))
            {
                return;
            }

            Debug.Assert(hdCamera.volumetricHistoryBuffers == null);

            hdCamera.volumetricHistoryBuffers = new RTHandle[bufferCount];

            // Allocation happens early in the frame. So we shouldn't rely on 'hdCamera.vBufferParams'.
            // Allocate the smallest possible 3D texture.
            // We will perform rescaling manually, in a custom manner, based on volume parameters.
            const int minSize = 4;

            for (int i = 0; i < bufferCount; i++)
            {
                hdCamera.volumetricHistoryBuffers[i] = RTHandles.Alloc(minSize, minSize, minSize, colorFormat: GraphicsFormat.R16G16B16A16_SFloat, // 8888_sRGB is not precise enough
                                                                       dimension: TextureDimension.Tex3D, enableRandomWrite: true, name: string.Format("VBufferHistory{0}", i));
            }

            hdCamera.volumetricHistoryIsValid = false;
        }
예제 #7
0
        private void CheckDirtiness(HDCamera hdCamera)
        {
            if (m_SubFrameManager.isRecording)
            {
                return;
            }

            // Grab the cached data for the current camera
            int        camID   = hdCamera.camera.GetInstanceID();
            CameraData camData = m_SubFrameManager.GetCameraData(camID);

            // Check camera resolution dirtiness
            if (hdCamera.actualWidth != camData.width || hdCamera.actualHeight != camData.height)
            {
                camData.width  = (uint)hdCamera.actualWidth;
                camData.height = (uint)hdCamera.actualHeight;
                camData.ResetIteration();
                m_SubFrameManager.SetCameraData(camID, camData);
                return;
            }

            // Check camera sky dirtiness
            bool enabled = (hdCamera.clearColorMode == HDAdditionalCameraData.ClearColorMode.Sky);

            if (enabled != camData.skyEnabled)
            {
                camData.skyEnabled = enabled;
                camData.ResetIteration();
                m_SubFrameManager.SetCameraData(camID, camData);
                return;
            }

            // Check camera fog dirtiness
            enabled = Fog.IsFogEnabled(hdCamera);
            if (enabled != camData.fogEnabled)
            {
                camData.fogEnabled = enabled;
                camData.ResetIteration();
                m_SubFrameManager.SetCameraData(camID, camData);
                return;
            }

            // Check camera matrix dirtiness
            if (hdCamera.mainViewConstants.nonJitteredViewProjMatrix != (hdCamera.mainViewConstants.prevViewProjMatrix))
            {
                camData.ResetIteration();
                m_SubFrameManager.SetCameraData(camID, camData);
                return;
            }

            // Check materials dirtiness
            if (m_MaterialsDirty)
            {
                m_MaterialsDirty = false;
                ResetPathTracing();
                return;
            }

            // Check light or geometry transforms dirtiness
            if (m_TransformDirty)
            {
                m_TransformDirty = false;
                ResetPathTracing();
            }

            // Check lights dirtiness
            if (m_CacheLightCount != m_RayTracingLights.lightCount)
            {
                m_CacheLightCount = (uint)m_RayTracingLights.lightCount;
                ResetPathTracing();
                return;
            }

            // Check geometry dirtiness
            ulong accelSize = m_CurrentRAS.GetSize();

            if (accelSize != m_CacheAccelSize)
            {
                m_CacheAccelSize = accelSize;
                ResetPathTracing();
            }
        }
예제 #8
0
        private void CheckDirtiness(HDCamera hdCamera)
        {
            if (m_SubFrameManager.isRecording)
            {
                return;
            }

            // Check camera clear mode dirtiness
            bool enabled = (hdCamera.clearColorMode == HDAdditionalCameraData.ClearColorMode.Sky);

            if (enabled != m_CameraSkyEnabled)
            {
                m_CameraSkyEnabled = enabled;
                ResetPathTracing();
                return;
            }

            // Check camera resolution dirtiness
            if (hdCamera.actualWidth != m_CacheCameraWidth || hdCamera.actualHeight != m_CacheCameraHeight)
            {
                m_CacheCameraWidth  = (uint)hdCamera.actualWidth;
                m_CacheCameraHeight = (uint)hdCamera.actualHeight;
                ResetPathTracing();
                return;
            }

            // Check camera matrix dirtiness
            if (hdCamera.mainViewConstants.nonJitteredViewProjMatrix != (hdCamera.mainViewConstants.prevViewProjMatrix))
            {
                ResetPathTracing();
                return;
            }

            // Check fog dirtiness
            enabled = Fog.IsFogEnabled(hdCamera);
            if (enabled != m_FogEnabled)
            {
                m_FogEnabled = enabled;
                ResetPathTracing();
                return;
            }

            // Check materials dirtiness
            if (m_MaterialsDirty)
            {
                ResetPathTracing();
                m_MaterialsDirty = false;
                return;
            }

            // Check lights dirtiness
            if (m_CacheLightCount != m_RayTracingLights.lightCount)
            {
                m_CacheLightCount = (uint)m_RayTracingLights.lightCount;
                ResetPathTracing();
                return;
            }

            // Check geometry dirtiness
            ulong accelSize = m_CurrentRAS.GetSize();

            if (accelSize != m_CacheAccelSize)
            {
                m_CacheAccelSize = accelSize;
                ResetPathTracing();
            }
        }
예제 #9
0
        void PushVolumetricLightingGlobalParams(HDCamera hdCamera, CommandBuffer cmd, int frameIndex)
        {
            if (!Fog.IsVolumetricFogEnabled(hdCamera))
            {
                cmd.SetGlobalTexture(HDShaderIDs._VBufferLighting, HDUtils.clearTexture3D);
                return;
            }

            // Get the interpolated anisotropy value.
            var fog = hdCamera.volumeStack.GetComponent <Fog>();

            SetPreconvolvedAmbientLightProbe(hdCamera, cmd, fog.globalLightProbeDimmer.value, fog.anisotropy.value);

            var currFrameParams = hdCamera.vBufferParams[0];
            var prevFrameParams = hdCamera.vBufferParams[1];

            // The lighting & density buffers are shared by all cameras.
            // The history & feedback buffers are specific to the camera.
            // These 2 types of buffers can have different sizes.
            // Additionally, history buffers can have different sizes, since they are not resized at the same time
            // (every frame, we swap the buffers, and resize the feedback buffer but not the history buffer).
            // The viewport size is the same for all of these buffers.
            // All of these buffers may have sub-native-resolution viewports.
            // The 3rd dimension (number of slices) is the same for all of these buffers.
            Vector2Int sharedBufferSize = new Vector2Int(m_LightingBufferHandle.rt.width, m_LightingBufferHandle.rt.height);

            Debug.Assert(m_LightingBufferHandle.rt.width == m_DensityBufferHandle.rt.width);
            Debug.Assert(m_LightingBufferHandle.rt.height == m_DensityBufferHandle.rt.height);

            Vector2Int historyBufferSize = Vector2Int.zero;

            if (hdCamera.IsVolumetricReprojectionEnabled())
            {
                var historyRT = hdCamera.GetPreviousFrameRT((int)HDCameraFrameHistoryType.VolumetricLighting);

                historyBufferSize = new Vector2Int(historyRT.rt.width, historyRT.rt.height);

                // Handle case of first frame. When we are on the first frame, we reuse the value of original frame.
                if (historyBufferSize.x == 0.0f && historyBufferSize.y == 0.0f)
                {
                    historyBufferSize = sharedBufferSize;
                }
            }

            var cvp = currFrameParams.viewportSize;
            var pvp = prevFrameParams.viewportSize;

            // Adjust slices for XR rendering: VBuffer is shared for all single-pass views
            int sliceCount = cvp.z / hdCamera.viewCount;

            cmd.SetGlobalVector(HDShaderIDs._VBufferViewportSize, new Vector4(cvp.x, cvp.y, 1.0f / cvp.x, 1.0f / cvp.y));
            cmd.SetGlobalInt(HDShaderIDs._VBufferSliceCount, sliceCount);
            cmd.SetGlobalFloat(HDShaderIDs._VBufferRcpSliceCount, 1.0f / sliceCount);
            cmd.SetGlobalVector(HDShaderIDs._VBufferSharedUvScaleAndLimit, currFrameParams.ComputeUvScaleAndLimit(sharedBufferSize));
            cmd.SetGlobalVector(HDShaderIDs._VBufferDistanceEncodingParams, currFrameParams.depthEncodingParams);
            cmd.SetGlobalVector(HDShaderIDs._VBufferDistanceDecodingParams, currFrameParams.depthDecodingParams);
            cmd.SetGlobalFloat(HDShaderIDs._VBufferLastSliceDist, currFrameParams.ComputeLastSliceDistance(sliceCount));
            cmd.SetGlobalFloat(HDShaderIDs._VBufferRcpInstancedViewCount, 1.0f / hdCamera.viewCount);

            cmd.SetGlobalVector(HDShaderIDs._VBufferPrevViewportSize, new Vector4(pvp.x, pvp.y, 1.0f / pvp.x, 1.0f / pvp.y));
            cmd.SetGlobalVector(HDShaderIDs._VBufferHistoryPrevUvScaleAndLimit, prevFrameParams.ComputeUvScaleAndLimit(historyBufferSize));
            cmd.SetGlobalVector(HDShaderIDs._VBufferPrevDepthEncodingParams, prevFrameParams.depthEncodingParams);
            cmd.SetGlobalVector(HDShaderIDs._VBufferPrevDepthDecodingParams, prevFrameParams.depthDecodingParams);

            cmd.SetGlobalTexture(HDShaderIDs._VBufferLighting, m_LightingBufferHandle);
        }
예제 #10
0
        private CameraData CheckDirtiness(HDCamera hdCamera, int camID, CameraData camData)
        {
            // Check camera resolution dirtiness
            if (hdCamera.actualWidth != camData.width || hdCamera.actualHeight != camData.height)
            {
                camData.width  = (uint)hdCamera.actualWidth;
                camData.height = (uint)hdCamera.actualHeight;
                return(ResetPathTracing(camID, camData));
            }

            // Check camera sky dirtiness
            bool enabled = (hdCamera.clearColorMode == HDAdditionalCameraData.ClearColorMode.Sky);

            if (enabled != camData.skyEnabled)
            {
                camData.skyEnabled = enabled;
                return(ResetPathTracing(camID, camData));
            }

            // Check camera fog dirtiness
            enabled = Fog.IsFogEnabled(hdCamera);
            if (enabled != camData.fogEnabled)
            {
                camData.fogEnabled = enabled;
                return(ResetPathTracing(camID, camData));
            }

            // Check camera matrix dirtiness
            if (hdCamera.mainViewConstants.nonJitteredViewProjMatrix != (hdCamera.mainViewConstants.prevViewProjMatrix))
            {
                return(ResetPathTracing(camID, camData));
            }

            // Check materials dirtiness
            if (m_MaterialsDirty)
            {
                m_MaterialsDirty = false;
                ResetPathTracing();
                return(camData);
            }

            // Check light or geometry transforms dirtiness
            if (m_TransformDirty)
            {
                m_TransformDirty = false;
                ResetPathTracing();
                return(camData);
            }

            // Check lights dirtiness
            if (m_CacheLightCount != m_RayTracingLights.lightCount)
            {
                m_CacheLightCount = (uint)m_RayTracingLights.lightCount;
                ResetPathTracing();
                return(camData);
            }

            // Check geometry dirtiness
            ulong accelSize = m_CurrentRAS.GetSize();

            if (accelSize != m_CacheAccelSize)
            {
                m_CacheAccelSize = accelSize;
                ResetPathTracing();
            }

            // If the camera has changed, re-render the sky texture
            if (camID != m_CameraID)
            {
                m_RenderSky = true;
                m_CameraID  = camID;
            }

            return(camData);
        }
예제 #11
0
        unsafe void SetPreconvolvedAmbientLightProbe(ref ShaderVariablesVolumetric cb, HDCamera hdCamera, Fog fog)
        {
            SphericalHarmonicsL2 probeSH = SphericalHarmonicMath.UndoCosineRescaling(m_SkyManager.GetAmbientProbe(hdCamera));

            probeSH = SphericalHarmonicMath.RescaleCoefficients(probeSH, fog.globalLightProbeDimmer.value);
            ZonalHarmonicsL2.GetCornetteShanksPhaseFunction(m_PhaseZH, fog.anisotropy.value);
            SphericalHarmonicsL2 finalSH = SphericalHarmonicMath.PremultiplyCoefficients(SphericalHarmonicMath.Convolve(probeSH, m_PhaseZH));

            SphericalHarmonicMath.PackCoefficients(m_PackedCoeffs, finalSH);
            for (int i = 0; i < 7; i++)
            {
                for (int j = 0; j < 4; ++j)
                {
                    cb._AmbientProbeCoeffs[i * 4 + j] = m_PackedCoeffs[i][j];
                }
            }
        }