Example #1
0
        void UpdateAntialiasing()
        {
            // Handle post-process AA
            //  - If post-processing is disabled all together, no AA
            //  - In scene view, only enable TAA if animated materials are enabled
            //  - Else just use the currently set AA mode on the camera
            {
                if (!m_frameSettings.IsEnabled(FrameSettingsField.Postprocess) || !CoreUtils.ArePostProcessesEnabled(camera))
                {
                    antialiasing = AntialiasingMode.None;
                }
#if UNITY_EDITOR
                else if (camera.cameraType == CameraType.SceneView)
                {
                    var mode = HDRenderPipelinePreferences.sceneViewAntialiasing;

                    if (mode == AntialiasingMode.TemporalAntialiasing && !CoreUtils.AreAnimatedMaterialsEnabled(camera))
                    {
                        antialiasing = AntialiasingMode.None;
                    }
                    else
                    {
                        antialiasing = mode;
                    }
                }
#endif
                else if (m_AdditionalCameraData != null)
                {
                    antialiasing = m_AdditionalCameraData.antialiasing;
                    if (antialiasing == AntialiasingMode.SubpixelMorphologicalAntiAliasing)
                    {
                        SMAAQuality = m_AdditionalCameraData.SMAAQuality;
                    }
                }
                else
                {
                    antialiasing = AntialiasingMode.None;
                }
            }

            if (antialiasing != AntialiasingMode.TemporalAntialiasing)
            {
                taaFrameIndex = 0;
                taaJitter     = Vector4.zero;
            }

            // TODO: is this used?
            {
                float t = taaFrameIndex * (0.5f * Mathf.PI);
                taaFrameRotation = new Vector2(Mathf.Sin(t), Mathf.Cos(t));
            }
        }
Example #2
0
        // Set up UnityPerView CBuffer.
        public void SetupGlobalParams(CommandBuffer cmd, float time, float lastTime, uint frameCount)
        {
            bool taaEnabled = m_frameSettings.IsEnabled(FrameSettingsField.Postprocess) &&
                              antialiasing == AntialiasingMode.TemporalAntialiasing &&
                              camera.cameraType == CameraType.Game;

            cmd.SetGlobalMatrix(HDShaderIDs._ViewMatrix, viewMatrix);
            cmd.SetGlobalMatrix(HDShaderIDs._InvViewMatrix, viewMatrix.inverse);
            cmd.SetGlobalMatrix(HDShaderIDs._ProjMatrix, projMatrix);
            cmd.SetGlobalMatrix(HDShaderIDs._InvProjMatrix, projMatrix.inverse);
            cmd.SetGlobalMatrix(HDShaderIDs._ViewProjMatrix, viewProjMatrix);
            cmd.SetGlobalMatrix(HDShaderIDs._InvViewProjMatrix, viewProjMatrix.inverse);
            cmd.SetGlobalMatrix(HDShaderIDs._NonJitteredViewProjMatrix, nonJitteredViewProjMatrix);
            cmd.SetGlobalMatrix(HDShaderIDs._PrevViewProjMatrix, prevViewProjMatrix);
            cmd.SetGlobalMatrix(HDShaderIDs._CameraViewProjMatrix, viewProjMatrix);
            cmd.SetGlobalVector(HDShaderIDs._WorldSpaceCameraPos, worldSpaceCameraPos);
            cmd.SetGlobalVector(HDShaderIDs._PrevCamPosRWS, prevWorldSpaceCameraPos);
            cmd.SetGlobalVector(HDShaderIDs._ScreenSize, screenSize);
            cmd.SetGlobalVector(HDShaderIDs._ScreenToTargetScale, doubleBufferedViewportScale);
            cmd.SetGlobalVector(HDShaderIDs._ScreenToTargetScaleHistory, doubleBufferedViewportScaleHistory);
            cmd.SetGlobalVector(HDShaderIDs._ZBufferParams, zBufferParams);
            cmd.SetGlobalVector(HDShaderIDs._ProjectionParams, projectionParams);
            cmd.SetGlobalVector(HDShaderIDs.unity_OrthoParams, unity_OrthoParams);
            cmd.SetGlobalVector(HDShaderIDs._ScreenParams, screenParams);
            cmd.SetGlobalVector(HDShaderIDs._TaaFrameInfo, new Vector4(taaFrameRotation.x, taaFrameRotation.y, taaFrameIndex, taaEnabled ? 1 : 0));
            cmd.SetGlobalVector(HDShaderIDs._TaaJitterStrength, taaJitter);
            cmd.SetGlobalVectorArray(HDShaderIDs._FrustumPlanes, frustumPlaneEquations);

            // Time is also a part of the UnityPerView CBuffer.
            // Different views can have different values of the "Animated Materials" setting.
            bool animateMaterials = CoreUtils.AreAnimatedMaterialsEnabled(camera);

            float ct  = animateMaterials ? time     : 0;
            float pt  = animateMaterials ? lastTime : 0;
            float dt  = Time.deltaTime;
            float sdt = Time.smoothDeltaTime;

            cmd.SetGlobalVector(HDShaderIDs._Time, new Vector4(ct * 0.05f, ct, ct * 2.0f, ct * 3.0f));
            cmd.SetGlobalVector(HDShaderIDs._LastTime, new Vector4(pt * 0.05f, pt, pt * 2.0f, pt * 3.0f));
            cmd.SetGlobalVector(HDShaderIDs.unity_DeltaTime, new Vector4(dt, 1.0f / dt, sdt, 1.0f / sdt));
            cmd.SetGlobalVector(HDShaderIDs._SinTime, new Vector4(Mathf.Sin(ct * 0.125f), Mathf.Sin(ct * 0.25f), Mathf.Sin(ct * 0.5f), Mathf.Sin(ct)));
            cmd.SetGlobalVector(HDShaderIDs._CosTime, new Vector4(Mathf.Cos(ct * 0.125f), Mathf.Cos(ct * 0.25f), Mathf.Cos(ct * 0.5f), Mathf.Cos(ct)));
            cmd.SetGlobalInt(HDShaderIDs._FrameCount, (int)frameCount);


            // TODO VR: Current solution for compute shaders grabs matrices from
            // stereo matrices even when not rendering stereo in order to reduce shader variants.
            // After native fix for compute shader keywords is completed, qualify this with stereoEnabled.
            SetupGlobalStereoParams(cmd);
        }
Example #3
0
        // Set up UnityPerView CBuffer.
        public void SetupGlobalParams(CommandBuffer cmd, float time, float lastTime, uint frameCount)
        {
            bool taaEnabled = m_frameSettings.IsEnabled(FrameSettingsField.Postprocess) &&
                              antialiasing == AntialiasingMode.TemporalAntialiasing &&
                              camera.cameraType == CameraType.Game;

            cmd.SetGlobalMatrix(HDShaderIDs._ViewMatrix, mainViewConstants.viewMatrix);
            cmd.SetGlobalMatrix(HDShaderIDs._InvViewMatrix, mainViewConstants.invViewMatrix);
            cmd.SetGlobalMatrix(HDShaderIDs._ProjMatrix, mainViewConstants.projMatrix);
            cmd.SetGlobalMatrix(HDShaderIDs._InvProjMatrix, mainViewConstants.invProjMatrix);
            cmd.SetGlobalMatrix(HDShaderIDs._ViewProjMatrix, mainViewConstants.viewProjMatrix);
            cmd.SetGlobalMatrix(HDShaderIDs._InvViewProjMatrix, mainViewConstants.invViewProjMatrix);
            cmd.SetGlobalMatrix(HDShaderIDs._NonJitteredViewProjMatrix, mainViewConstants.nonJitteredViewProjMatrix);
            cmd.SetGlobalMatrix(HDShaderIDs._PrevViewProjMatrix, mainViewConstants.prevViewProjMatrix);
            cmd.SetGlobalMatrix(HDShaderIDs._CameraViewProjMatrix, mainViewConstants.viewProjMatrix);
            cmd.SetGlobalVector(HDShaderIDs._WorldSpaceCameraPos, mainViewConstants.worldSpaceCameraPos);
            cmd.SetGlobalVector(HDShaderIDs._PrevCamPosRWS, mainViewConstants.prevWorldSpaceCameraPos);
            cmd.SetGlobalVector(HDShaderIDs._ScreenSize, screenSize);
            cmd.SetGlobalVector(HDShaderIDs._ScreenToTargetScale, doubleBufferedViewportScale);
            cmd.SetGlobalVector(HDShaderIDs._ScreenToTargetScaleHistory, doubleBufferedViewportScaleHistory);
            cmd.SetGlobalVector(HDShaderIDs._ZBufferParams, zBufferParams);
            cmd.SetGlobalVector(HDShaderIDs._ProjectionParams, projectionParams);
            cmd.SetGlobalVector(HDShaderIDs.unity_OrthoParams, unity_OrthoParams);
            cmd.SetGlobalVector(HDShaderIDs._ScreenParams, screenParams);
            cmd.SetGlobalVector(HDShaderIDs._TaaFrameInfo, new Vector4(taaFrameRotation.x, taaFrameRotation.y, taaFrameIndex, taaEnabled ? 1 : 0));
            cmd.SetGlobalVector(HDShaderIDs._TaaJitterStrength, taaJitter);
            cmd.SetGlobalVectorArray(HDShaderIDs._FrustumPlanes, frustumPlaneEquations);

            // Time is also a part of the UnityPerView CBuffer.
            // Different views can have different values of the "Animated Materials" setting.
            bool animateMaterials = CoreUtils.AreAnimatedMaterialsEnabled(camera);

            float ct  = animateMaterials ? time     : 0;
            float pt  = animateMaterials ? lastTime : 0;
            float dt  = Time.deltaTime;
            float sdt = Time.smoothDeltaTime;

            cmd.SetGlobalVector(HDShaderIDs._Time, new Vector4(ct * 0.05f, ct, ct * 2.0f, ct * 3.0f));
            cmd.SetGlobalVector(HDShaderIDs._LastTime, new Vector4(pt * 0.05f, pt, pt * 2.0f, pt * 3.0f));
            cmd.SetGlobalVector(HDShaderIDs.unity_DeltaTime, new Vector4(dt, 1.0f / dt, sdt, 1.0f / sdt));
            cmd.SetGlobalVector(HDShaderIDs._SinTime, new Vector4(Mathf.Sin(ct * 0.125f), Mathf.Sin(ct * 0.25f), Mathf.Sin(ct * 0.5f), Mathf.Sin(ct)));
            cmd.SetGlobalVector(HDShaderIDs._CosTime, new Vector4(Mathf.Cos(ct * 0.125f), Mathf.Cos(ct * 0.25f), Mathf.Cos(ct * 0.5f), Mathf.Cos(ct)));
            cmd.SetGlobalInt(HDShaderIDs._FrameCount, (int)frameCount);

            // TODO: qualify this code with xrInstancingEnabled when compute shaders can use keywords
            cmd.SetGlobalBuffer(HDShaderIDs._XRViewConstants, xrViewConstantsGpu);

            // XRTODO: double-wide cleanup
            cmd.SetGlobalVector(HDShaderIDs._TextureWidthScaling, textureWidthScaling);
        }
Example #4
0
        public void SetupGlobalParams(CommandBuffer cmd, float time, float lastTime)
        {
            cmd.SetGlobalMatrix(ClusterShaderIDs._ViewMatrix, viewMatrix);
            cmd.SetGlobalMatrix(ClusterShaderIDs._InvViewMatrix, viewMatrix.inverse);
            cmd.SetGlobalMatrix(ClusterShaderIDs._ProjMatrix, projMatrix);
            cmd.SetGlobalMatrix(ClusterShaderIDs._InvProjMatrix, projMatrix.inverse);
            cmd.SetGlobalMatrix(ClusterShaderIDs._ViewProjMatrix, viewProjMatrix);
            cmd.SetGlobalMatrix(ClusterShaderIDs._InvViewProjMatrix, viewProjMatrix.inverse);
            cmd.SetGlobalMatrix(ClusterShaderIDs._NonJitteredViewProjMatrix, nonJitteredViewProjMatrix);
            cmd.SetGlobalMatrix(ClusterShaderIDs._PrevViewProjMatrix, prevViewProjMatrix);
            cmd.SetGlobalVector(ClusterShaderIDs._WorldSpaceCameraPos, worldSpaceCameraPos);
            cmd.SetGlobalFloat(ClusterShaderIDs._DetViewMatrix, detViewMatrix);
            cmd.SetGlobalVector(ClusterShaderIDs._ScreenSize, screenSize);
            cmd.SetGlobalVector(ClusterShaderIDs._ScreenToTargetScale, doubleBufferedViewportScale);
            cmd.SetGlobalVector(ClusterShaderIDs._ZBufferParams, zBufferParams);
            cmd.SetGlobalVector(ClusterShaderIDs._ProjectionParams, projectionParams);
            cmd.SetGlobalVector(ClusterShaderIDs.unity_OrthoParams, unity_OrthoParams);
            cmd.SetGlobalVector(ClusterShaderIDs._ScreenParams, screenParam);
            cmd.SetGlobalVector(ClusterShaderIDs._TaaFrameRotation, taaFrameRotation);
            //cmd.SetGlobalVectorArray(ClusterShaderIDs._FrustumPlanes, frustumPlaneEquations);

            // Time is also a part of the UnityPerView CBuffer.
            // Different views can have different values of the "Animated Materials" setting.
            bool animateMaterials = CoreUtils.AreAnimatedMaterialsEnabled(camera);

            float ct  = animateMaterials ? time : 0;
            float pt  = animateMaterials ? lastTime : 0;
            float dt  = Time.deltaTime;
            float sdt = Time.smoothDeltaTime;

            Vector4 timeVector = new Vector4((float)ct * 0.05f, ct, ct * 2.0f, ct * 3.0f);

            cmd.SetGlobalVector(ClusterShaderIDs._Time, timeVector);
            cmd.SetGlobalVector(ClusterShaderIDs._LastTime, new Vector4(pt * 0.05f, pt, pt * 2.0f, pt * 3.0f));
            cmd.SetGlobalVector(ClusterShaderIDs.unity_DeltaTime, new Vector4(dt, 1.0f / dt, sdt, 1.0f / sdt));
            cmd.SetGlobalVector(ClusterShaderIDs._SinTime, new Vector4(Mathf.Sin(ct * 0.125f), Mathf.Sin(ct * 0.25f), Mathf.Sin(ct * 0.5f), Mathf.Sin(ct)));
            cmd.SetGlobalVector(ClusterShaderIDs._CosTime, new Vector4(Mathf.Cos(ct * 0.125f), Mathf.Cos(ct * 0.25f), Mathf.Cos(ct * 0.5f), Mathf.Cos(ct)));

            cmd.SetGlobalMatrix("Cluster_Matrix_LinearZ", MVPMatrixLinearZ);
            cmd.SetGlobalMatrix("matrix_MVPInv", MVPMatrix.inverse);
            cmd.SetGlobalVector("ClusterProjParams", new Vector4(NearClipPlane, FarClipPlane, 0, 0));
            cmd.SetGlobalVector("ClusterScreenParams", new Vector4(ClusterFrustumWidth, ClusterFrustumHeight, 0, 0));

            cmd.SetGlobalVectorArray("CameraBottomLeft", cameraBottomLeftCorner);
            cmd.SetGlobalVector("CameraUpDirLength", cameraUpDir);
            cmd.SetGlobalVector("CameraRightDirLength", cameraRightDir);
            cmd.SetGlobalVector("ClusterBottomLeft", clusterBottomLeftCorner);
            cmd.SetGlobalVector("ClusterUpDirLength", clusterUpDir);
            cmd.SetGlobalVector("ClusterRightDirLength", clusterRightDir);
        }
Example #5
0
        public DensityVolume[] PrepareDensityVolumeData(CommandBuffer cmd, Camera currentCam, float time)
        {
            //Update volumes
            bool animate = CoreUtils.AreAnimatedMaterialsEnabled(currentCam);

            foreach (DensityVolume volume in volumes)
            {
                volume.PrepareParameters(animate, time);
            }

            if (atlasNeedsRefresh)
            {
                atlasNeedsRefresh = false;
                VolumeAtlasRefresh();
            }

            volumeAtlas.GenerateVolumeAtlas(cmd);

            return(volumes.ToArray());
        }
Example #6
0
        // Set up UnityPerView CBuffer.
        public void SetupGlobalParams(CommandBuffer cmd, float time, float lastTime, uint frameCount)
        {
            cmd.SetGlobalMatrix(HDShaderIDs._ViewMatrix, viewMatrix);
            cmd.SetGlobalMatrix(HDShaderIDs._InvViewMatrix, viewMatrix.inverse);
            cmd.SetGlobalMatrix(HDShaderIDs._ProjMatrix, projMatrix);
            cmd.SetGlobalMatrix(HDShaderIDs._InvProjMatrix, projMatrix.inverse);
            cmd.SetGlobalMatrix(HDShaderIDs._ViewProjMatrix, viewProjMatrix);
            cmd.SetGlobalMatrix(HDShaderIDs._InvViewProjMatrix, viewProjMatrix.inverse);
            cmd.SetGlobalMatrix(HDShaderIDs._NonJitteredViewProjMatrix, nonJitteredViewProjMatrix);
            cmd.SetGlobalMatrix(HDShaderIDs._PrevViewProjMatrix, prevViewProjMatrix);
            cmd.SetGlobalVector(HDShaderIDs._WorldSpaceCameraPos, worldSpaceCameraPos);
            cmd.SetGlobalFloat(HDShaderIDs._DetViewMatrix, detViewMatrix);
            cmd.SetGlobalVector(HDShaderIDs._ScreenSize, screenSize);
            cmd.SetGlobalVector(HDShaderIDs._ScreenToTargetScale, doubleBufferedViewportScale);
            cmd.SetGlobalVector(HDShaderIDs._ZBufferParams, zBufferParams);
            cmd.SetGlobalVector(HDShaderIDs._ProjectionParams, projectionParams);
            cmd.SetGlobalVector(HDShaderIDs.unity_OrthoParams, unity_OrthoParams);
            cmd.SetGlobalVector(HDShaderIDs._ScreenParams, screenParams);
            cmd.SetGlobalVector(HDShaderIDs._TaaFrameRotation, taaFrameRotation);
            cmd.SetGlobalVectorArray(HDShaderIDs._FrustumPlanes, frustumPlaneEquations);

            // Time is also a part of the UnityPerView CBuffer.
            // Different views can have different values of the "Animated Materials" setting.
            bool animateMaterials = CoreUtils.AreAnimatedMaterialsEnabled(camera);

            float ct  = animateMaterials ? time     : 0;
            float pt  = animateMaterials ? lastTime : 0;
            float dt  = Time.deltaTime;
            float sdt = Time.smoothDeltaTime;

            cmd.SetGlobalVector(HDShaderIDs._Time, new Vector4(ct * 0.05f, ct, ct * 2.0f, ct * 3.0f));
            cmd.SetGlobalVector(HDShaderIDs._LastTime, new Vector4(pt * 0.05f, pt, pt * 2.0f, pt * 3.0f));
            cmd.SetGlobalVector(HDShaderIDs.unity_DeltaTime, new Vector4(dt, 1.0f / dt, sdt, 1.0f / sdt));
            cmd.SetGlobalVector(HDShaderIDs._SinTime, new Vector4(Mathf.Sin(ct * 0.125f), Mathf.Sin(ct * 0.25f), Mathf.Sin(ct * 0.5f), Mathf.Sin(ct)));
            cmd.SetGlobalVector(HDShaderIDs._CosTime, new Vector4(Mathf.Cos(ct * 0.125f), Mathf.Cos(ct * 0.25f), Mathf.Cos(ct * 0.5f), Mathf.Cos(ct)));
            cmd.SetGlobalInt(HDShaderIDs._FrameCount, (int)frameCount);
        }
Example #7
0
        PrepassOutput RenderPrepass(RenderGraph renderGraph, RenderGraphMutableResource sssBuffer, CullingResults cullingResults, HDCamera hdCamera)
        {
            m_IsDepthBufferCopyValid = false;

            var result = new PrepassOutput();

            result.gbuffer = m_GBufferOutput;
            result.dbuffer = m_DBufferOutput;

            bool msaa = hdCamera.frameSettings.IsEnabled(FrameSettingsField.MSAA);
            bool clearMotionVectors = hdCamera.camera.cameraType == CameraType.SceneView && !CoreUtils.AreAnimatedMaterialsEnabled(hdCamera.camera);


            // TODO: See how to clean this. Some buffers are created outside, some inside functions...
            result.motionVectorsBuffer = CreateMotionVectorBuffer(renderGraph, msaa, clearMotionVectors);
            result.depthBuffer         = CreateDepthBuffer(renderGraph, msaa);

            RenderXROcclusionMeshes(renderGraph, hdCamera, result.depthBuffer);

            using (new XRSinglePassScope(renderGraph, hdCamera))
            {
                // TODO RENDERGRAPH
                //// Bind the custom color/depth before the first custom pass
                //if (hdCamera.frameSettings.IsEnabled(FrameSettingsField.CustomPass))
                //{
                //    if (m_CustomPassColorBuffer.IsValueCreated)
                //        cmd.SetGlobalTexture(HDShaderIDs._CustomColorTexture, m_CustomPassColorBuffer.Value);
                //    if (m_CustomPassDepthBuffer.IsValueCreated)
                //        cmd.SetGlobalTexture(HDShaderIDs._CustomDepthTexture, m_CustomPassDepthBuffer.Value);
                //}
                //RenderCustomPass(renderContext, cmd, hdCamera, customPassCullingResults, CustomPassInjectionPoint.BeforeRendering);

                bool renderMotionVectorAfterGBuffer = RenderDepthPrepass(renderGraph, cullingResults, hdCamera, ref result);

                if (!renderMotionVectorAfterGBuffer)
                {
                    // If objects motion vectors are enabled, this will render the objects with motion vector into the target buffers (in addition to the depth)
                    // Note: An object with motion vector must not be render in the prepass otherwise we can have motion vector write that should have been rejected
                    RenderObjectsMotionVectors(renderGraph, cullingResults, hdCamera, result);
                }

                // TODO RENDERGRAPH
                //PreRenderSky(hdCamera, cmd);

                // At this point in forward all objects have been rendered to the prepass (depth/normal/motion vectors) so we can resolve them
                ResolvePrepassBuffers(renderGraph, hdCamera, ref result);

                RenderDecals(renderGraph, hdCamera, ref result, cullingResults);

                RenderGBuffer(renderGraph, sssBuffer, ref result, cullingResults, hdCamera);

                // TODO RENDERGRAPH
                //// After Depth and Normals/roughness including decals
                //RenderCustomPass(renderContext, cmd, hdCamera, customPassCullingResults, CustomPassInjectionPoint.AfterOpaqueDepthAndNormal);

                // In both forward and deferred, everything opaque should have been rendered at this point so we can safely copy the depth buffer for later processing.
                GenerateDepthPyramid(renderGraph, hdCamera, ref result);

                // TODO RENDERGRAPH
                //// Send all the geometry graphics buffer to client systems if required (must be done after the pyramid and before the transparent depth pre-pass)
                //SendGeometryGraphicsBuffers(cmd, hdCamera);

                if (renderMotionVectorAfterGBuffer)
                {
                    // See the call RenderObjectsMotionVectors() above and comment
                    RenderObjectsMotionVectors(renderGraph, cullingResults, hdCamera, result);
                }

                RenderCameraMotionVectors(renderGraph, hdCamera, result.depthPyramidTexture, result.resolvedMotionVectorsBuffer);

                // TODO RENDERGRAPH
                //RenderTransparencyOverdraw(cullingResults, hdCamera, renderContext, cmd);

                ResolveStencilBufferIfNeeded(renderGraph, hdCamera, ref result);
            }

            return(result);
        }
Example #8
0
        // Pass all the systems that may want to update per-camera data here.
        // That way you will never update an HDCamera and forget to update the dependent system.
        public void Update(FrameSettings currentFrameSettings, VolumetricLightingSystem vlSys, MSAASamples msaaSamples)
        {
            // store a shortcut on HDAdditionalCameraData (done here and not in the constructor as
            // we don't create HDCamera at every frame and user can change the HDAdditionalData later (Like when they create a new scene).
            m_AdditionalCameraData = camera.GetComponent <HDAdditionalCameraData>();

            m_frameSettings = currentFrameSettings;

            // Handle post-process AA
            //  - If post-processing is disabled all together, no AA
            //  - In scene view, only enable TAA if animated materials are enabled
            //  - Else just use the currently set AA mode on the camera
            {
                if (!m_frameSettings.IsEnabled(FrameSettingsField.Postprocess) || !CoreUtils.ArePostProcessesEnabled(camera))
                {
                    antialiasing = AntialiasingMode.None;
                }
#if UNITY_EDITOR
                else if (camera.cameraType == CameraType.SceneView)
                {
                    var mode = HDRenderPipelinePreferences.sceneViewAntialiasing;

                    if (mode == AntialiasingMode.TemporalAntialiasing && !CoreUtils.AreAnimatedMaterialsEnabled(camera))
                    {
                        antialiasing = AntialiasingMode.None;
                    }
                    else
                    {
                        antialiasing = mode;
                    }
                }
#endif
                else if (m_AdditionalCameraData != null)
                {
                    antialiasing = m_AdditionalCameraData.antialiasing;
                }
                else
                {
                    antialiasing = AntialiasingMode.None;
                }
            }

            // Handle memory allocation.
            {
                bool isColorPyramidHistoryRequired = m_frameSettings.IsEnabled(FrameSettingsField.SSR); // TODO: TAA as well
                bool isVolumetricHistoryRequired   = m_frameSettings.IsEnabled(FrameSettingsField.Volumetrics) && m_frameSettings.IsEnabled(FrameSettingsField.ReprojectionForVolumetrics);

                int numColorPyramidBuffersRequired = isColorPyramidHistoryRequired ? 2 : 1; // TODO: 1 -> 0
                int numVolumetricBuffersRequired   = isVolumetricHistoryRequired   ? 2 : 0; // History + feedback

                if ((numColorPyramidBuffersAllocated != numColorPyramidBuffersRequired) ||
                    (numVolumetricBuffersAllocated != numVolumetricBuffersRequired))
                {
                    // Reinit the system.
                    colorPyramidHistoryIsValid = false;
                    vlSys.DeinitializePerCameraData(this);

                    // The history system only supports the "nuke all" option.
                    m_HistoryRTSystem.Dispose();
                    m_HistoryRTSystem = new BufferedRTHandleSystem();

                    if (numColorPyramidBuffersRequired != 0)
                    {
                        AllocHistoryFrameRT((int)HDCameraFrameHistoryType.ColorBufferMipChain, HistoryBufferAllocatorFunction, numColorPyramidBuffersRequired);
                        colorPyramidHistoryIsValid = false;
                    }

                    vlSys.InitializePerCameraData(this, numVolumetricBuffersRequired);

                    // Mark as init.
                    numColorPyramidBuffersAllocated = numColorPyramidBuffersRequired;
                    numVolumetricBuffersAllocated   = numVolumetricBuffersRequired;
                }
            }

            // If TAA is enabled projMatrix will hold a jittered projection matrix. The original,
            // non-jittered projection matrix can be accessed via nonJitteredProjMatrix.
            bool taaEnabled = antialiasing == AntialiasingMode.TemporalAntialiasing;

            if (!taaEnabled)
            {
                taaFrameIndex = 0;
                taaJitter     = Vector4.zero;
            }

            var nonJitteredCameraProj = camera.projectionMatrix;
            var cameraProj            = taaEnabled
                ? GetJitteredProjectionMatrix(nonJitteredCameraProj)
                : nonJitteredCameraProj;

            // The actual projection matrix used in shaders is actually massaged a bit to work across all platforms
            // (different Z value ranges etc.)
            var gpuProj            = GL.GetGPUProjectionMatrix(cameraProj, true); // Had to change this from 'false'
            var gpuView            = camera.worldToCameraMatrix;
            var gpuNonJitteredProj = GL.GetGPUProjectionMatrix(nonJitteredCameraProj, true);

            // Update viewport sizes.
            m_ViewportSizePrevFrame = new Vector2Int(m_ActualWidth, m_ActualHeight);
            m_ActualWidth           = Math.Max(camera.pixelWidth, 1);
            m_ActualHeight          = Math.Max(camera.pixelHeight, 1);

            Vector2Int nonScaledSize = new Vector2Int(m_ActualWidth, m_ActualHeight);
            if (isMainGameView)
            {
                Vector2Int scaledSize = HDDynamicResolutionHandler.instance.GetRTHandleScale(new Vector2Int(camera.pixelWidth, camera.pixelHeight));
                nonScaledSize  = HDDynamicResolutionHandler.instance.cachedOriginalSize;
                m_ActualWidth  = scaledSize.x;
                m_ActualHeight = scaledSize.y;
            }

            var screenWidth  = m_ActualWidth;
            var screenHeight = m_ActualHeight;
            textureWidthScaling = new Vector4(1.0f, 1.0f, 0.0f, 0.0f);

            numEyes = camera.stereoEnabled ? (uint)2 : (uint)1; // TODO VR: Generalize this when support for >2 eyes comes out with XR SDK

            if (camera.stereoEnabled)
            {
                if (XRGraphics.stereoRenderingMode == XRGraphics.StereoRenderingMode.SinglePass)
                {
                    textureWidthScaling = new Vector4(2.0f, 0.5f, 0.0f, 0.0f);
                }

                for (uint eyeIndex = 0; eyeIndex < 2; eyeIndex++)
                {
                    // For VR, TAA proj matrices don't need to be jittered
                    var currProjStereo    = camera.GetStereoProjectionMatrix((Camera.StereoscopicEye)eyeIndex);
                    var gpuCurrProjStereo = GL.GetGPUProjectionMatrix(currProjStereo, true);
                    var gpuCurrViewStereo = camera.GetStereoViewMatrix((Camera.StereoscopicEye)eyeIndex);

                    if (ShaderConfig.s_CameraRelativeRendering != 0)
                    {
                        // Zero out the translation component.
                        gpuCurrViewStereo.SetColumn(3, new Vector4(0, 0, 0, 1));
                    }
                    var gpuCurrVPStereo = gpuCurrProjStereo * gpuCurrViewStereo;

                    // A camera could be rendered multiple times per frame, only updates the previous view proj & pos if needed
                    if (m_LastFrameActive != Time.frameCount)
                    {
                        if (isFirstFrame)
                        {
                            prevWorldSpaceCameraPosStereo[eyeIndex] = gpuCurrViewStereo.inverse.GetColumn(3);
                            prevViewProjMatrixStereo[eyeIndex]      = gpuCurrVPStereo;
                        }
                        else
                        {
                            prevWorldSpaceCameraPosStereo[eyeIndex] = worldSpaceCameraPosStereo[eyeIndex];
                            prevViewProjMatrixStereo[eyeIndex]      = GetViewProjMatrixStereo(eyeIndex); // Grabbing this before ConfigureStereoMatrices updates view/proj
                        }

                        isFirstFrame = false;
                    }
                }

                // XRTODO: fix this
                isFirstFrame = true; // So that mono vars can still update when stereo active

                // XRTODO: remove once SPI is working
                if (XRGraphics.stereoRenderingMode == XRGraphics.StereoRenderingMode.SinglePass)
                {
                    Debug.Assert(HDDynamicResolutionHandler.instance.SoftwareDynamicResIsEnabled() == false);

                    var xrDesc = XRGraphics.eyeTextureDesc;
                    nonScaledSize.x = screenWidth = m_ActualWidth = xrDesc.width;
                    nonScaledSize.y = screenHeight = m_ActualHeight = xrDesc.height;
                }
            }

            if (ShaderConfig.s_CameraRelativeRendering != 0)
            {
                // Zero out the translation component.
                gpuView.SetColumn(3, new Vector4(0, 0, 0, 1));
            }

            var gpuVP = gpuNonJitteredProj * gpuView;

            // A camera could be rendered multiple times per frame, only updates the previous view proj & pos if needed
            // Note: if your first rendered view during the frame is not the Game view, everything breaks.
            if (m_LastFrameActive != Time.frameCount)
            {
                if (isFirstFrame)
                {
                    prevWorldSpaceCameraPos = camera.transform.position;
                    prevViewProjMatrix      = gpuVP;
                }
                else
                {
                    prevWorldSpaceCameraPos         = worldSpaceCameraPos;
                    prevViewProjMatrix              = nonJitteredViewProjMatrix;
                    prevViewProjMatrixNoCameraTrans = prevViewProjMatrix;
                }

                isFirstFrame = false;
            }

            // In stereo, this corresponds to the center eye position
            worldSpaceCameraPos = camera.transform.position;

            taaFrameRotation = new Vector2(Mathf.Sin(taaFrameIndex * (0.5f * Mathf.PI)),
                                           Mathf.Cos(taaFrameIndex * (0.5f * Mathf.PI)));

            viewMatrix            = gpuView;
            projMatrix            = gpuProj;
            nonJitteredProjMatrix = gpuNonJitteredProj;

            ConfigureStereoMatrices();

            if (ShaderConfig.s_CameraRelativeRendering != 0)
            {
                prevWorldSpaceCameraPos = worldSpaceCameraPos - prevWorldSpaceCameraPos;
                // This fixes issue with cameraDisplacement stacking in prevViewProjMatrix when same camera renders multiple times each logical frame
                // causing glitchy motion blur when editor paused.
                if (m_LastFrameActive != Time.frameCount)
                {
                    Matrix4x4 cameraDisplacement = Matrix4x4.Translate(prevWorldSpaceCameraPos);
                    prevViewProjMatrix *= cameraDisplacement; // Now prevViewProjMatrix correctly transforms this frame's camera-relative positionWS
                }
            }
            else
            {
                Matrix4x4 noTransViewMatrix = camera.worldToCameraMatrix;
                noTransViewMatrix.SetColumn(3, new Vector4(0, 0, 0, 1));
                prevViewProjMatrixNoCameraTrans = nonJitteredProjMatrix * noTransViewMatrix;
            }

            float n = camera.nearClipPlane;
            float f = camera.farClipPlane;

            // Analyze the projection matrix.
            // p[2][3] = (reverseZ ? 1 : -1) * (depth_0_1 ? 1 : 2) * (f * n) / (f - n)
            float scale     = projMatrix[2, 3] / (f * n) * (f - n);
            bool  depth_0_1 = Mathf.Abs(scale) < 1.5f;
            bool  reverseZ  = scale > 0;
            bool  flipProj  = projMatrix.inverse.MultiplyPoint(new Vector3(0, 1, 0)).y < 0;

            // http://www.humus.name/temp/Linearize%20depth.txt
            if (reverseZ)
            {
                zBufferParams = new Vector4(-1 + f / n, 1, -1 / f + 1 / n, 1 / f);
            }
            else
            {
                zBufferParams = new Vector4(1 - f / n, f / n, 1 / f - 1 / n, 1 / n);
            }

            projectionParams = new Vector4(flipProj ? -1 : 1, n, f, 1.0f / f);

            float orthoHeight = camera.orthographic ? 2 * camera.orthographicSize : 0;
            float orthoWidth  = orthoHeight * camera.aspect;
            unity_OrthoParams = new Vector4(orthoWidth, orthoHeight, 0, camera.orthographic ? 1 : 0);

            Frustum.Create(frustum, viewProjMatrix, depth_0_1, reverseZ);

            // Left, right, top, bottom, near, far.
            for (int i = 0; i < 6; i++)
            {
                frustumPlaneEquations[i] = new Vector4(frustum.planes[i].normal.x, frustum.planes[i].normal.y, frustum.planes[i].normal.z, frustum.planes[i].distance);
            }

            m_LastFrameActive = Time.frameCount;

            // TODO: cache this, or make the history system spill the beans...
            Vector2Int prevColorPyramidBufferSize = Vector2Int.zero;

            if (numColorPyramidBuffersAllocated > 0)
            {
                var rt = GetCurrentFrameRT((int)HDCameraFrameHistoryType.ColorBufferMipChain).rt;

                prevColorPyramidBufferSize.x = rt.width;
                prevColorPyramidBufferSize.y = rt.height;
            }

            // TODO: cache this, or make the history system spill the beans...
            Vector3Int prevVolumetricBufferSize = Vector3Int.zero;

            if (numVolumetricBuffersAllocated != 0)
            {
                var rt = GetCurrentFrameRT((int)HDCameraFrameHistoryType.VolumetricLighting).rt;

                prevVolumetricBufferSize.x = rt.width;
                prevVolumetricBufferSize.y = rt.height;
                prevVolumetricBufferSize.z = rt.volumeDepth;
            }

            m_msaaSamples = msaaSamples;
            // Here we use the non scaled resolution for the RTHandleSystem ref size because we assume that at some point we will need full resolution anyway.
            // This is also useful because we have some RT after final up-rez that will need the full size.
            RTHandles.SetReferenceSize(nonScaledSize.x, nonScaledSize.y, m_msaaSamples);
            m_HistoryRTSystem.SetReferenceSize(nonScaledSize.x, nonScaledSize.y, m_msaaSamples);
            m_HistoryRTSystem.Swap();

            Vector3Int currColorPyramidBufferSize = Vector3Int.zero;

            if (numColorPyramidBuffersAllocated != 0)
            {
                var rt = GetCurrentFrameRT((int)HDCameraFrameHistoryType.ColorBufferMipChain).rt;

                currColorPyramidBufferSize.x = rt.width;
                currColorPyramidBufferSize.y = rt.height;

                if ((currColorPyramidBufferSize.x != prevColorPyramidBufferSize.x) ||
                    (currColorPyramidBufferSize.y != prevColorPyramidBufferSize.y))
                {
                    // A reallocation has happened, so the new texture likely contains garbage.
                    colorPyramidHistoryIsValid = false;
                }
            }

            Vector3Int currVolumetricBufferSize = Vector3Int.zero;

            if (numVolumetricBuffersAllocated != 0)
            {
                var rt = GetCurrentFrameRT((int)HDCameraFrameHistoryType.VolumetricLighting).rt;

                currVolumetricBufferSize.x = rt.width;
                currVolumetricBufferSize.y = rt.height;
                currVolumetricBufferSize.z = rt.volumeDepth;

                if ((currVolumetricBufferSize.x != prevVolumetricBufferSize.x) ||
                    (currVolumetricBufferSize.y != prevVolumetricBufferSize.y) ||
                    (currVolumetricBufferSize.z != prevVolumetricBufferSize.z))
                {
                    // A reallocation has happened, so the new texture likely contains garbage.
                    volumetricHistoryIsValid = false;
                }
            }

            int maxWidth  = RTHandles.maxWidth;
            int maxHeight = RTHandles.maxHeight;

            Vector2 rcpTextureSize = Vector2.one / new Vector2(maxWidth, maxHeight);

            m_ViewportScalePreviousFrame = m_ViewportSizePrevFrame * rcpTextureSize;
            m_ViewportScaleCurrentFrame  = new Vector2Int(m_ActualWidth, m_ActualHeight) * rcpTextureSize;

            screenSize   = new Vector4(screenWidth, screenHeight, 1.0f / screenWidth, 1.0f / screenHeight);
            screenParams = new Vector4(screenSize.x, screenSize.y, 1 + screenSize.z, 1 + screenSize.w);

            finalViewport = new Rect(camera.pixelRect.x, camera.pixelRect.y, nonScaledSize.x, nonScaledSize.y);

            if (vlSys != null)
            {
                vlSys.UpdatePerCameraData(this);
            }

            UpdateVolumeParameters();
        }
Example #9
0
        PrepassOutput RenderPrepass(RenderGraph renderGraph, RenderGraphMutableResource sssBuffer, CullingResults cullingResults, HDCamera hdCamera)
        {
            m_IsDepthBufferCopyValid = false;

            var result = new PrepassOutput();

            result.gbuffer = m_GBufferOutput;
            result.dbuffer = m_DBufferOutput;

            bool msaa = hdCamera.frameSettings.IsEnabled(FrameSettingsField.MSAA);
            bool clearMotionVectors = hdCamera.camera.cameraType == CameraType.SceneView && !CoreUtils.AreAnimatedMaterialsEnabled(hdCamera.camera);


            // TODO: See how to clean this. Some buffers are created outside, some inside functions...
            result.motionVectorsBuffer = CreateMotionVectorBuffer(renderGraph, msaa, clearMotionVectors);
            result.depthBuffer         = CreateDepthBuffer(renderGraph, msaa);

            RenderOcclusionMeshes(renderGraph, hdCamera, result.depthBuffer);
            StartSinglePass(renderGraph, hdCamera);

            bool renderMotionVectorAfterGBuffer = RenderDepthPrepass(renderGraph, cullingResults, hdCamera, ref result);

            if (!renderMotionVectorAfterGBuffer)
            {
                // If objects motion vectors are enabled, this will render the objects with motion vector into the target buffers (in addition to the depth)
                // Note: An object with motion vector must not be render in the prepass otherwise we can have motion vector write that should have been rejected
                RenderObjectsMotionVectors(renderGraph, cullingResults, hdCamera, result);
            }

            // At this point in forward all objects have been rendered to the prepass (depth/normal/motion vectors) so we can resolve them
            ResolvePrepassBuffers(renderGraph, hdCamera, ref result);

            RenderDecals(renderGraph, hdCamera, ref result, cullingResults);

            RenderGBuffer(renderGraph, sssBuffer, ref result, cullingResults, hdCamera);

            // In both forward and deferred, everything opaque should have been rendered at this point so we can safely copy the depth buffer for later processing.
            GenerateDepthPyramid(renderGraph, hdCamera, ref result);

            if (renderMotionVectorAfterGBuffer)
            {
                // See the call RenderObjectsMotionVectors() above and comment
                RenderObjectsMotionVectors(renderGraph, cullingResults, hdCamera, result);
            }

            RenderCameraMotionVectors(renderGraph, hdCamera, result.depthPyramidTexture, result.resolvedMotionVectorsBuffer);

            result.stencilBufferCopy = CopyStencilBufferIfNeeded(m_RenderGraph, hdCamera, result.depthBuffer, m_CopyStencil, m_CopyStencilForSSR);

            StopSinglePass(renderGraph, hdCamera);

            return(result);
        }