예제 #1
0
        public void InitSharedBuffers(GBufferManager gbufferManager, RenderPipelineSettings settings, RenderPipelineResources resources)
        {
            // Set the flags
            m_MSAASupported      = settings.supportMSAA;
            m_ReuseGBufferMemory = !settings.supportOnlyForward;

            // Create the depth/stencil buffer
            m_CameraDepthStencilBuffer = RTHandles.Alloc(Vector2.one, depthBufferBits: DepthBits.Depth32, colorFormat: RenderTextureFormat.Depth, filterMode: FilterMode.Point, name: "CameraDepthStencil");

            // Create the mip chain buffer
            m_CameraDepthBufferMipChainInfo = new HDUtils.PackedMipChainInfo();
            m_CameraDepthBufferMipChainInfo.Allocate();
            m_CameraDepthBufferMipChain = RTHandles.Alloc(ComputeDepthBufferMipChainSize, colorFormat: RenderTextureFormat.RFloat, filterMode: FilterMode.Point, sRGB: false, enableRandomWrite: true, name: "CameraDepthBufferMipChain");

            // Technically we won't need this buffer in some cases, but nothing that we can determine at init time.
            m_CameraStencilBufferCopy = RTHandles.Alloc(Vector2.one, depthBufferBits: DepthBits.None, colorFormat: RenderTextureFormat.R8, sRGB: false, filterMode: FilterMode.Point, name: "CameraStencilCopy"); // DXGI_FORMAT_R8_UINT is not supported by Unity

            // Allocate the additional textures only if MSAA is supported
            if (m_MSAASupported)
            {
                // Let's create the MSAA textures
                m_CameraDepthStencilMSAABuffer = RTHandles.Alloc(Vector2.one, depthBufferBits: DepthBits.Depth24, colorFormat: RenderTextureFormat.Depth, filterMode: FilterMode.Point, bindTextureMS: true, enableMSAA: true, name: "CameraDepthStencilMSAA");
                m_CameraDepthValuesBuffer      = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: RenderTextureFormat.ARGBFloat, sRGB: false, name: "DepthValuesBuffer");

                // Create the required resolve materials
                m_DepthResolveMaterial = CoreUtils.CreateEngineMaterial(resources.depthValues);
                m_ColorResolveMaterial = CoreUtils.CreateEngineMaterial(resources.colorResolve);
            }

            // If we are in the forward only mode
            if (!m_ReuseGBufferMemory)
            {
                // In case of full forward we must allocate the render target for normal buffer (or reuse one already existing)
                // TODO: Provide a way to reuse a render target
                m_NormalRT = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: RenderTextureFormat.ARGB32, sRGB: false, enableRandomWrite: true, name: "NormalBuffer");

                // Is MSAA supported?
                if (m_MSAASupported)
                {
                    // Allocate the two render textures we need in the MSAA case
                    m_NormalMSAART       = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: RenderTextureFormat.ARGB32, sRGB: false, enableMSAA: true, bindTextureMS: true, name: "NormalBufferMSAA");
                    m_DepthAsColorMSAART = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: RenderTextureFormat.RFloat, sRGB: false, bindTextureMS: true, enableMSAA: true, name: "DepthAsColorMSAA");
                }
            }
            else
            {
                // When not forward only we should are using the normal buffer of the gbuffer
                // In case of deferred, we must be in sync with NormalBuffer.hlsl and lit.hlsl files and setup the correct buffers
                m_NormalRT = gbufferManager.GetNormalBuffer(0); // Normal + Roughness
            }
        }
        public AmbientOcclusionSystem(HDRenderPipelineAsset hdAsset)
        {
            m_Settings  = hdAsset.currentPlatformRenderPipelineSettings;
            m_Resources = hdAsset.renderPipelineResources;
#if ENABLE_RAYTRACING
            m_RTResources = hdAsset.renderPipelineRayTracingResources;
#endif

            if (!hdAsset.currentPlatformRenderPipelineSettings.supportSSAO)
            {
                return;
            }

            AllocRT(0.5f);
        }
 public void InitNormalBuffers(GBufferManager gbufferManager, RenderPipelineSettings settings)
 {
     if (settings.supportOnlyForward)
     {
         // In case of full forward we must allocate the render target for normal buffer (or reuse one already existing)
         // TODO: Provide a way to reuse a render target
         m_ColorMRTs[0]      = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: RenderTextureFormat.ARGB32, sRGB: false, name: "NormalBuffer");
         m_ExternalBuffer[0] = false;
     }
     else
     {
         // In case of deferred, we must be in sync with NormalBuffer.hlsl and lit.hlsl files and setup the correct buffers
         m_ColorMRTs[0]      = gbufferManager.GetBuffer(1); // Normal + Roughness is GBuffer(1)
         m_ExternalBuffer[0] = true;
     }
 }
예제 #4
0
        public void InitSSSBuffers(GBufferManager gbufferManager, RenderPipelineSettings settings)
        {
            // Reset the msaa flag
            m_MSAASupport = settings.supportMSAA;

            if (settings.supportedLitShaderMode == RenderPipelineSettings.SupportedLitShaderMode.ForwardOnly) //forward only
            {
                // In case of full forward we must allocate the render target for forward SSS (or reuse one already existing)
                // TODO: Provide a way to reuse a render target
                m_ColorMRTs[0]           = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: UnityEngine.Experimental.Rendering.HDPipeline.HDRenderPipeline.OverrideRTGraphicsFormat(GraphicsFormat.R8G8B8A8_SRGB), xrInstancing: true, useDynamicScale: true, name: "SSSBuffer");
                m_ReuseGBufferMemory [0] = false;
            }

            // We need to allocate the texture if we are in forward or both in case one of the cameras is in enable forward only mode
            if (m_MSAASupport)
            {
                m_ColorMSAAMRTs[0] = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: UnityEngine.Experimental.Rendering.HDPipeline.HDRenderPipeline.OverrideRTGraphicsFormat(GraphicsFormat.R8G8B8A8_SRGB), enableMSAA: true, bindTextureMS: true, xrInstancing: true, useDynamicScale: true, name: "SSSBufferMSAA");
            }

            if ((settings.supportedLitShaderMode & RenderPipelineSettings.SupportedLitShaderMode.DeferredOnly) != 0) //deferred or both
            {
                // In case of deferred, we must be in sync with SubsurfaceScattering.hlsl and lit.hlsl files and setup the correct buffers
                m_ColorMRTs[0]           = gbufferManager.GetSubsurfaceScatteringBuffer(0); // Note: This buffer must be sRGB (which is the case with Lit.shader)
                m_ReuseGBufferMemory [0] = true;
            }

            if (NeedTemporarySubsurfaceBuffer() || settings.supportMSAA)
            {
                // Caution: must be same format as m_CameraSssDiffuseLightingBuffer
                m_CameraFilteringBuffer = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: UnityEngine.Experimental.Rendering.HDPipeline.HDRenderPipeline.OverrideRTGraphicsFormat(GraphicsFormat.B10G11R11_UFloatPack32), enableRandomWrite: true, xrInstancing: true, useDynamicScale: true, name: "SSSCameraFiltering"); // Enable UAV
            }

            // We use 8x8 tiles in order to match the native GCN HTile as closely as possible.
            m_HTile = RTHandles.Alloc(size => new Vector2Int((size.x + 7) / 8, (size.y + 7) / 8), filterMode: FilterMode.Point, colorFormat: UnityEngine.Experimental.Rendering.HDPipeline.HDRenderPipeline.OverrideRTGraphicsFormat(GraphicsFormat.R8_UNorm), enableRandomWrite: true, xrInstancing: true, useDynamicScale: true, name: "SSSHtile"); // Enable UAV

            // fill the list with the max number of diffusion profile so we dont have
            // the error: exceeds previous array size (5 vs 3). Cap to previous size.
            thicknessRemaps = new Vector4[DiffusionProfileConstants.DIFFUSION_PROFILE_COUNT];
            shapeParams     = new Vector4[DiffusionProfileConstants.DIFFUSION_PROFILE_COUNT];
            transmissionTintsAndFresnel0         = new Vector4[DiffusionProfileConstants.DIFFUSION_PROFILE_COUNT];
            disabledTransmissionTintsAndFresnel0 = new Vector4[DiffusionProfileConstants.DIFFUSION_PROFILE_COUNT];
            worldScales            = new Vector4[DiffusionProfileConstants.DIFFUSION_PROFILE_COUNT];
            filterKernels          = new Vector4[DiffusionProfileConstants.DIFFUSION_PROFILE_COUNT * DiffusionProfileConstants.SSS_N_SAMPLES_NEAR_FIELD];
            diffusionProfileHashes = new float[DiffusionProfileConstants.DIFFUSION_PROFILE_COUNT];
            diffusionProfileUpdate = new int[DiffusionProfileConstants.DIFFUSION_PROFILE_COUNT];
            setDiffusionProfiles   = new DiffusionProfileSettings[DiffusionProfileConstants.DIFFUSION_PROFILE_COUNT];
        }
        public void Init(RenderPipelineResources rpResources, HDRenderPipelineRayTracingResources rpRTResources, RenderPipelineSettings pipelineSettings, HDRaytracingManager raytracingManager, SharedRTManager sharedRTManager)
        {
            // Keep track of the pipeline asset
            m_PipelineSettings            = pipelineSettings;
            m_PipelineResources           = rpResources;
            m_PipelineRayTracingResources = rpRTResources;

            // keep track of the ray tracing manager
            m_RaytracingManager = raytracingManager;

            // Keep track of the shared rt manager
            m_SharedRTManager = sharedRTManager;

            // Intermediate buffer that holds the pre-denoised texture
            m_IntermediateBuffer = RTHandles.Alloc(Vector2.one, TextureXR.slices, colorFormat: GraphicsFormat.R16G16B16A16_SFloat, dimension: TextureXR.dimension, enableRandomWrite: true, useDynamicScale: true, useMipMap: false, autoGenerateMips: false, name: "IntermediateAOBuffer");

            // Buffer that holds the uncompressed normal buffer
            m_ViewSpaceNormalBuffer = RTHandles.Alloc(Vector2.one, TextureXR.slices, colorFormat: GraphicsFormat.R16G16B16A16_SFloat, dimension: TextureXR.dimension, enableRandomWrite: true, useDynamicScale: true, useMipMap: false, autoGenerateMips: false, name: "ViewSpaceNormalBuffer");
        }
예제 #6
0
        public void Init(RenderPipelineResources pipelineResources, RenderPipelineSettings pipelineSettings, HDRaytracingManager raytracingManager, SharedRTManager sharedRTManager)
        {
            // Keep track of the pipeline asset
            m_PipelineSettings  = pipelineSettings;
            m_PipelineResources = pipelineResources;

            // keep track of the ray tracing manager
            m_RaytracingManager = raytracingManager;

            // Keep track of the shared rt manager
            m_SharedRTManager = sharedRTManager;

            // Intermediate buffer that holds the pre-denoised texture
            m_IntermediateBuffer = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: UnityEngine.Experimental.Rendering.HDPipeline.HDRenderPipeline.OverrideRTGraphicsFormat(GraphicsFormat.R16G16B16A16_SFloat), enableRandomWrite: true, xrInstancing: true, useDynamicScale: true, useMipMap: false, autoGenerateMips: false, name: "IntermediateAOBuffer");

            // Buffer that holds the average distance of the rays
            m_HitDistanceBuffer = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: UnityEngine.Experimental.Rendering.HDPipeline.HDRenderPipeline.OverrideRTGraphicsFormat(GraphicsFormat.R32_SFloat), enableRandomWrite: true, xrInstancing: true, useDynamicScale: true, useMipMap: false, autoGenerateMips: false, name: "HitDistanceBuffer");

            // Buffer that holds the uncompressed normal buffer
            m_ViewSpaceNormalBuffer = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: UnityEngine.Experimental.Rendering.HDPipeline.HDRenderPipeline.OverrideRTGraphicsFormat(GraphicsFormat.R16G16B16A16_SFloat), enableRandomWrite: true, xrInstancing: true, useDynamicScale: true, useMipMap: false, autoGenerateMips: false, name: "ViewSpaceNormalBuffer");
        }
        // aggregateFrameSettings already contain the aggregation of RenderPipelineSettings and FrameSettings (regular and/or debug)
        static public LightLoopSettings InitializeLightLoopSettings(Camera camera, FrameSettings aggregateFrameSettings,
                                                                    RenderPipelineSettings renderPipelineSettings, FrameSettings frameSettings)
        {
            LightLoopSettings aggregate = new LightLoopSettings();

            aggregate.enableTileAndCluster          = frameSettings.lightLoopSettings.enableTileAndCluster;
            aggregate.enableComputeLightEvaluation  = frameSettings.lightLoopSettings.enableComputeLightEvaluation;
            aggregate.enableComputeLightVariants    = frameSettings.lightLoopSettings.enableComputeLightVariants;
            aggregate.enableComputeMaterialVariants = frameSettings.lightLoopSettings.enableComputeMaterialVariants;
            aggregate.enableFptlForForwardOpaque    = frameSettings.lightLoopSettings.enableFptlForForwardOpaque;
            aggregate.enableBigTilePrepass          = frameSettings.lightLoopSettings.enableBigTilePrepass;

            // Deferred opaque are always using Fptl. Forward opaque can use Fptl or Cluster, transparent use cluster.
            // When MSAA is enabled we disable Fptl as it become expensive compare to cluster
            // In HD, MSAA is only supported for forward only rendering, no MSAA in deferred mode (for code complexity reasons)
            aggregate.enableFptlForForwardOpaque = aggregate.enableFptlForForwardOpaque && aggregateFrameSettings.enableMSAA;
            // If Deferred, enable Fptl. If we are forward renderer only and not using Fptl for forward opaque, disable Fptl
            aggregate.isFptlEnabled = !aggregateFrameSettings.enableForwardRenderingOnly || aggregate.enableFptlForForwardOpaque;

            return(aggregate);
        }
예제 #8
0
        public void Init(RenderPipelineSettings settings, RenderPipelineResources rpResources, HDRenderPipelineRayTracingResources rayTracingResources, BlueNoise blueNoise, HDRenderPipeline renderPipeline, SharedRTManager sharedRTManager, DebugDisplaySettings currentDebugDisplaySettings)
        {
            // Keep track of the resources
            m_Resources   = rpResources;
            m_RTResources = rayTracingResources;

            // Keep track of the settings
            m_Settings = settings;

            // Keep track of the render pipeline
            m_RenderPipeline = renderPipeline;

            // Keep track of the shared RT manager
            m_SharedRTManager = sharedRTManager;

            // Keep track of the blue noise manager
            m_BlueNoise = blueNoise;

            // Create the list of environments
            m_Environments = new List <HDRaytracingEnvironment>();

            // Grab all the ray-tracing graphs that have been created before (in case the order of initialization has not been respected, which happens when we open unity the first time)
            HDRaytracingEnvironment[] environmentArray = Object.FindObjectsOfType <HDRaytracingEnvironment>();
            for (int envIdx = 0; envIdx < environmentArray.Length; ++envIdx)
            {
                RegisterEnvironment(environmentArray[envIdx]);
            }

            // Init the simple denoiser
            m_SimpleDenoiser.Init(rayTracingResources, m_SharedRTManager);

            // Init the ray count manager
            m_RayCountManager.Init(rayTracingResources, currentDebugDisplaySettings);

#if UNITY_EDITOR
            // We need to invalidate the acceleration structures in case the hierarchy changed
            EditorApplication.hierarchyChanged += OnHierarchyChanged;
#endif
        }
        // Init a FrameSettings from renderpipeline settings, frame settings and debug settings (if any)
        // This will aggregate the various option
        public static void InitializeFrameSettings(Camera camera, RenderPipelineSettings renderPipelineSettings, FrameSettings srcFrameSettings, ref FrameSettings aggregate)
        {
            if (aggregate == null)
            {
                aggregate = new FrameSettings();
            }

            // When rendering reflection probe we disable specular as it is view dependent
            if (camera.cameraType == CameraType.Reflection)
            {
                aggregate.diffuseGlobalDimmer  = 1.0f;
                aggregate.specularGlobalDimmer = 0.0f;
            }
            else
            {
                aggregate.diffuseGlobalDimmer  = 1.0f;
                aggregate.specularGlobalDimmer = 1.0f;
            }

            aggregate.enableShadow                = srcFrameSettings.enableShadow;
            aggregate.enableContactShadows        = srcFrameSettings.enableContactShadows;
            aggregate.enableShadowMask            = srcFrameSettings.enableShadowMask && renderPipelineSettings.supportShadowMask;
            aggregate.enableSSR                   = camera.cameraType != CameraType.Reflection && srcFrameSettings.enableSSR && renderPipelineSettings.supportSSR;
            aggregate.enableSSAO                  = srcFrameSettings.enableSSAO && renderPipelineSettings.supportSSAO;
            aggregate.enableSubsurfaceScattering  = camera.cameraType != CameraType.Reflection && srcFrameSettings.enableSubsurfaceScattering && renderPipelineSettings.supportSubsurfaceScattering;
            aggregate.enableTransmission          = srcFrameSettings.enableTransmission;
            aggregate.enableAtmosphericScattering = srcFrameSettings.enableAtmosphericScattering;
            // We must take care of the scene view fog flags in the editor
            if (!CoreUtils.IsSceneViewFogEnabled(camera))
            {
                aggregate.enableAtmosphericScattering = false;
            }
            // Volumetric are disabled if there is no atmospheric scattering
            aggregate.enableVolumetrics = srcFrameSettings.enableVolumetrics && renderPipelineSettings.supportVolumetrics && aggregate.enableAtmosphericScattering;

            // TODO: Add support of volumetric in planar reflection
            if (camera.cameraType == CameraType.Reflection)
            {
                aggregate.enableVolumetrics = false;
            }

            aggregate.enableLightLayers = srcFrameSettings.enableLightLayers && renderPipelineSettings.supportLightLayers;

            // We have to fall back to forward-only rendering when scene view is using wireframe rendering mode
            // as rendering everything in wireframe + deferred do not play well together
            aggregate.enableForwardRenderingOnly = srcFrameSettings.enableForwardRenderingOnly || GL.wireframe || renderPipelineSettings.supportOnlyForward;
            aggregate.enableDepthPrepassWithDeferredRendering = srcFrameSettings.enableDepthPrepassWithDeferredRendering;

            aggregate.enableTransparentPrepass  = srcFrameSettings.enableTransparentPrepass;
            aggregate.enableMotionVectors       = camera.cameraType != CameraType.Reflection && srcFrameSettings.enableMotionVectors && renderPipelineSettings.supportMotionVectors;
            aggregate.enableObjectMotionVectors = camera.cameraType != CameraType.Reflection && srcFrameSettings.enableObjectMotionVectors && renderPipelineSettings.supportMotionVectors;
            aggregate.enableDecals              = srcFrameSettings.enableDecals && renderPipelineSettings.supportDecals;
            aggregate.enableRoughRefraction     = srcFrameSettings.enableRoughRefraction;
            aggregate.enableTransparentPostpass = srcFrameSettings.enableTransparentPostpass;
            aggregate.enableDistortion          = camera.cameraType != CameraType.Reflection && srcFrameSettings.enableDistortion;

            // Planar and real time cubemap doesn't need post process and render in FP16
            aggregate.enablePostprocess = camera.cameraType != CameraType.Reflection && srcFrameSettings.enablePostprocess;

#if UNITY_SWITCH
            aggregate.enableStereo = false;
#else
            aggregate.enableStereo = camera.cameraType != CameraType.Reflection && srcFrameSettings.enableStereo && XRSettings.isDeviceActive && (camera.stereoTargetEye == StereoTargetEyeMask.Both) && renderPipelineSettings.supportStereo;
#endif

            aggregate.enableAsyncCompute = srcFrameSettings.enableAsyncCompute && SystemInfo.supportsAsyncCompute;

            aggregate.enableOpaqueObjects      = srcFrameSettings.enableOpaqueObjects;
            aggregate.enableTransparentObjects = srcFrameSettings.enableTransparentObjects;

            aggregate.enableMSAA = srcFrameSettings.enableMSAA && renderPipelineSettings.supportMSAA;

            aggregate.ConfigureMSAADependentSettings();
            aggregate.ConfigureStereoDependentSettings();

            // Disable various option for the preview except if we are a Camera Editor preview
            if (HDUtils.IsRegularPreviewCamera(camera))
            {
                aggregate.enableShadow                = false;
                aggregate.enableContactShadows        = false;
                aggregate.enableShadowMask            = false;
                aggregate.enableSSR                   = false;
                aggregate.enableSSAO                  = false;
                aggregate.enableAtmosphericScattering = false;
                aggregate.enableVolumetrics           = false;
                aggregate.enableLightLayers           = false;
                aggregate.enableTransparentPrepass    = false;
                aggregate.enableMotionVectors         = false;
                aggregate.enableObjectMotionVectors   = false;
                aggregate.enableDecals                = false;
                aggregate.enableTransparentPostpass   = false;
                aggregate.enableDistortion            = false;
                aggregate.enablePostprocess           = false;
                aggregate.enableStereo                = false;
            }

            LightLoopSettings.InitializeLightLoopSettings(camera, aggregate, renderPipelineSettings, srcFrameSettings, ref aggregate.lightLoopSettings);
        }
예제 #10
0
        public void InitSharedBuffers(GBufferManager gbufferManager, RenderPipelineSettings settings, RenderPipelineResources resources)
        {
            // Set the flags
            m_MSAASupported        = settings.supportMSAA;
            m_MSAASamples          = m_MSAASupported ? settings.msaaSampleCount : MSAASamples.None;
            m_MotionVectorsSupport = settings.supportMotionVectors;
            m_ReuseGBufferMemory   = settings.supportedLitShaderMode != RenderPipelineSettings.SupportedLitShaderMode.ForwardOnly;

            // Create the depth/stencil buffer
            m_CameraDepthStencilBuffer = RTHandles.Alloc(Vector2.one, depthBufferBits: DepthBits.Depth32, filterMode: FilterMode.Point, xrInstancing: true, useDynamicScale: true, name: "CameraDepthStencil");

            // Create the mip chain buffer
            m_CameraDepthBufferMipChainInfo = new HDUtils.PackedMipChainInfo();
            m_CameraDepthBufferMipChainInfo.Allocate();
            m_CameraDepthBufferMipChain = RTHandles.Alloc(ComputeDepthBufferMipChainSize, colorFormat: UnityEngine.Experimental.Rendering.HDPipeline.HDRenderPipeline.OverrideRTGraphicsFormat(GraphicsFormat.R32_SFloat), filterMode: FilterMode.Point, enableRandomWrite: true, xrInstancing: true, useDynamicScale: true, name: "CameraDepthBufferMipChain");

            if (settings.lowresTransparentSettings.enabled)
            {
                // Create the half res depth buffer used for low resolution transparency
                m_CameraHalfResDepthBuffer = RTHandles.Alloc(Vector2.one * 0.5f, depthBufferBits: DepthBits.Depth32, filterMode: FilterMode.Point, xrInstancing: true, useDynamicScale: true, name: "LowResDepthBuffer");
            }

            // Technically we won't need this buffer in some cases, but nothing that we can determine at init time.
            m_CameraStencilBufferCopy = RTHandles.Alloc(Vector2.one, depthBufferBits: DepthBits.None, colorFormat: UnityEngine.Experimental.Rendering.HDPipeline.HDRenderPipeline.OverrideRTGraphicsFormat(GraphicsFormat.R8_UNorm), filterMode: FilterMode.Point, enableRandomWrite: true, xrInstancing: true, useDynamicScale: true, name: "CameraStencilCopy"); // DXGI_FORMAT_R8_UINT is not supported by Unity

            if (m_MotionVectorsSupport)
            {
                m_MotionVectorsRT = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: Builtin.GetMotionVectorFormat(), xrInstancing: true, useDynamicScale: true, name: "MotionVectors");
                if (m_MSAASupported)
                {
                    m_MotionVectorsMSAART = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: Builtin.GetMotionVectorFormat(), enableMSAA: true, bindTextureMS: true, xrInstancing: true, useDynamicScale: true, name: "MotionVectorsMSAA");
                }
            }

            // Allocate the additional textures only if MSAA is supported
            if (m_MSAASupported)
            {
                // Let's create the MSAA textures
                m_CameraDepthStencilMSAABuffer = RTHandles.Alloc(Vector2.one, depthBufferBits: DepthBits.Depth24, filterMode: FilterMode.Point, bindTextureMS: true, enableMSAA: true, xrInstancing: true, useDynamicScale: true, name: "CameraDepthStencilMSAA");
                m_CameraDepthValuesBuffer      = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: UnityEngine.Experimental.Rendering.HDPipeline.HDRenderPipeline.OverrideRTGraphicsFormat(GraphicsFormat.R32G32B32A32_SFloat), xrInstancing: true, useDynamicScale: true, name: "DepthValuesBuffer");
                m_DepthAsColorMSAART           = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: UnityEngine.Experimental.Rendering.HDPipeline.HDRenderPipeline.OverrideRTGraphicsFormat(GraphicsFormat.R32_SFloat), bindTextureMS: true, enableMSAA: true, xrInstancing: true, useDynamicScale: true, name: "DepthAsColorMSAA");

                // We need to allocate this texture as long as msaa is supported because on both mode, one of the cameras can be forward only using the framesettings
                m_NormalMSAART = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: UnityEngine.Experimental.Rendering.HDPipeline.HDRenderPipeline.OverrideRTGraphicsFormat(GraphicsFormat.R8G8B8A8_UNorm), enableMSAA: true, bindTextureMS: true, xrInstancing: true, useDynamicScale: true, name: "NormalBufferMSAA");

                // Create the required resolve materials
                m_DepthResolveMaterial = CoreUtils.CreateEngineMaterial(resources.shaders.depthValuesPS);
                m_ColorResolveMaterial = CoreUtils.CreateEngineMaterial(resources.shaders.colorResolvePS);
            }

            // If we are in the forward only mode
            if (!m_ReuseGBufferMemory)
            {
                // In case of full forward we must allocate the render target for normal buffer (or reuse one already existing)
                // TODO: Provide a way to reuse a render target
                m_NormalRT = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: UnityEngine.Experimental.Rendering.HDPipeline.HDRenderPipeline.OverrideRTGraphicsFormat(GraphicsFormat.R8G8B8A8_UNorm), enableRandomWrite: true, xrInstancing: true, useDynamicScale: true, name: "NormalBuffer");
            }
            else
            {
                // When not forward only we should are using the normal buffer of the gbuffer
                // In case of deferred, we must be in sync with NormalBuffer.hlsl and lit.hlsl files and setup the correct buffers
                m_NormalRT = gbufferManager.GetNormalBuffer(0); // Normal + Roughness
            }
        }
        // Init a FrameSettings from renderpipeline settings, frame settings and debug settings (if any)
        // This will aggregate the various option
        public static void InitializeFrameSettings(Camera camera, RenderPipelineSettings renderPipelineSettings, FrameSettings srcFrameSettings, ref FrameSettings aggregate)
        {
            if (aggregate == null)
            {
                aggregate = new FrameSettings();
            }

            // When rendering reflection probe we disable specular as it is view dependent
            if (camera.cameraType == CameraType.Reflection)
            {
                aggregate.diffuseGlobalDimmer  = 1.0f;
                aggregate.specularGlobalDimmer = 0.0f;
            }
            else
            {
                aggregate.diffuseGlobalDimmer  = 1.0f;
                aggregate.specularGlobalDimmer = 1.0f;
            }

            aggregate.enableShadow               = srcFrameSettings.enableShadow;
            aggregate.enableContactShadows       = srcFrameSettings.enableContactShadows;
            aggregate.enableSSR                  = camera.cameraType != CameraType.Reflection && srcFrameSettings.enableSSR && renderPipelineSettings.supportSSR;
            aggregate.enableSSAO                 = srcFrameSettings.enableSSAO && renderPipelineSettings.supportSSAO;
            aggregate.enableSubsurfaceScattering = camera.cameraType != CameraType.Reflection && srcFrameSettings.enableSubsurfaceScattering && renderPipelineSettings.supportSubsurfaceScattering;
            aggregate.enableTransmission         = srcFrameSettings.enableTransmission;

            // We have to fall back to forward-only rendering when scene view is using wireframe rendering mode
            // as rendering everything in wireframe + deferred do not play well together
            aggregate.enableForwardRenderingOnly = srcFrameSettings.enableForwardRenderingOnly || GL.wireframe || renderPipelineSettings.supportForwardOnly;
            aggregate.enableDepthPrepassWithDeferredRendering = srcFrameSettings.enableDepthPrepassWithDeferredRendering;
            aggregate.enableAlphaTestOnlyInDeferredPrepass    = srcFrameSettings.enableAlphaTestOnlyInDeferredPrepass;

            aggregate.enableTransparentPrepass    = srcFrameSettings.enableTransparentPrepass;
            aggregate.enableMotionVectors         = camera.cameraType != CameraType.Reflection && srcFrameSettings.enableMotionVectors && renderPipelineSettings.supportMotionVectors;
            aggregate.enableObjectMotionVectors   = camera.cameraType != CameraType.Reflection && srcFrameSettings.enableObjectMotionVectors && renderPipelineSettings.supportMotionVectors;
            aggregate.enableDBuffer               = srcFrameSettings.enableDBuffer && renderPipelineSettings.supportDBuffer;
            aggregate.enableAtmosphericScattering = srcFrameSettings.enableAtmosphericScattering;
            aggregate.enableRoughRefraction       = srcFrameSettings.enableRoughRefraction;
            aggregate.enableTransparentPostpass   = srcFrameSettings.enableTransparentPostpass;
            aggregate.enableDistortion            = camera.cameraType != CameraType.Reflection && srcFrameSettings.enableDistortion;

            // Planar and real time cubemap doesn't need post process and render in FP16
            aggregate.enablePostprocess = camera.cameraType != CameraType.Reflection && srcFrameSettings.enablePostprocess;

            aggregate.enableStereo = camera.cameraType != CameraType.Reflection && srcFrameSettings.enableStereo && XRSettings.isDeviceActive && (camera.stereoTargetEye == StereoTargetEyeMask.Both) && renderPipelineSettings.supportStereo;

            aggregate.enableAsyncCompute = srcFrameSettings.enableAsyncCompute && SystemInfo.supportsAsyncCompute;

            aggregate.enableOpaqueObjects      = srcFrameSettings.enableOpaqueObjects;
            aggregate.enableTransparentObjects = srcFrameSettings.enableTransparentObjects;

            aggregate.enableMSAA = srcFrameSettings.enableMSAA && renderPipelineSettings.supportMSAA;

            aggregate.enableShadowMask = srcFrameSettings.enableShadowMask && renderPipelineSettings.supportShadowMask;

            aggregate.ConfigureMSAADependentSettings();
            aggregate.ConfigureStereoDependentSettings();

            if (camera.cameraType == CameraType.Preview)
            {
                // remove undesired feature in preview
                aggregate.enableShadow                = false;
                aggregate.enableContactShadows        = false;
                aggregate.enableSSR                   = false;
                aggregate.enableSSAO                  = false;
                aggregate.enableTransparentPrepass    = false;
                aggregate.enableMotionVectors         = false;
                aggregate.enableObjectMotionVectors   = false;
                aggregate.enableDBuffer               = false;
                aggregate.enableAtmosphericScattering = false;
                aggregate.enableTransparentPostpass   = false;
                aggregate.enableDistortion            = false;
                aggregate.enablePostprocess           = false;
                aggregate.enableStereo                = false;
                aggregate.enableShadowMask            = false;
            }

            LightLoopSettings.InitializeLightLoopSettings(camera, aggregate, renderPipelineSettings, srcFrameSettings, ref aggregate.lightLoopSettings);
        }
예제 #12
0
        // Init a FrameSettings from renderpipeline settings, frame settings and debug settings (if any)
        // This will aggregate the various option
        public static void InitializeFrameSettings(Camera camera, RenderPipelineSettings renderPipelineSettings, FrameSettings srcFrameSettings, ref FrameSettings aggregate)
        {
            if (aggregate == null)
            {
                aggregate = new FrameSettings();
            }

            // When rendering reflection probe we disable specular as it is view dependent
            if (camera.cameraType == CameraType.Reflection)
            {
                aggregate.diffuseGlobalDimmer  = 1.0f;
                aggregate.specularGlobalDimmer = 0.0f;
            }
            else
            {
                aggregate.diffuseGlobalDimmer  = 1.0f;
                aggregate.specularGlobalDimmer = 1.0f;
            }

            aggregate.enableShadow                = srcFrameSettings.enableShadow;
            aggregate.enableContactShadows        = srcFrameSettings.enableContactShadows;
            aggregate.enableShadowMask            = srcFrameSettings.enableShadowMask && renderPipelineSettings.supportShadowMask;
            aggregate.enableSSR                   = camera.cameraType != CameraType.Reflection && srcFrameSettings.enableSSR && renderPipelineSettings.supportSSR; // No recursive reflections
            aggregate.enableSSAO                  = srcFrameSettings.enableSSAO && renderPipelineSettings.supportSSAO;
            aggregate.enableSubsurfaceScattering  = camera.cameraType != CameraType.Reflection && srcFrameSettings.enableSubsurfaceScattering && renderPipelineSettings.supportSubsurfaceScattering;
            aggregate.enableTransmission          = srcFrameSettings.enableTransmission;
            aggregate.enableAtmosphericScattering = srcFrameSettings.enableAtmosphericScattering;
            // We must take care of the scene view fog flags in the editor
            if (!CoreUtils.IsSceneViewFogEnabled(camera))
            {
                aggregate.enableAtmosphericScattering = false;
            }
            // Volumetric are disabled if there is no atmospheric scattering
            aggregate.enableVolumetrics = srcFrameSettings.enableVolumetrics && renderPipelineSettings.supportVolumetrics && aggregate.enableAtmosphericScattering;
            aggregate.enableReprojectionForVolumetrics = srcFrameSettings.enableReprojectionForVolumetrics;

            // TODO: Add support of volumetric in planar reflection
            if (camera.cameraType == CameraType.Reflection)
            {
                aggregate.enableVolumetrics = false;
            }

            aggregate.enableLightLayers = srcFrameSettings.enableLightLayers && renderPipelineSettings.supportLightLayers;

            // We have to fall back to forward-only rendering when scene view is using wireframe rendering mode
            // as rendering everything in wireframe + deferred do not play well together
            if (GL.wireframe) //force forward mode for wireframe
            {
                aggregate.shaderLitMode = LitShaderMode.Forward;
            }
            else
            {
                switch (renderPipelineSettings.supportedLitShaderMode)
                {
                case RenderPipelineSettings.SupportedLitShaderMode.ForwardOnly:
                    aggregate.shaderLitMode = LitShaderMode.Forward;
                    break;

                case RenderPipelineSettings.SupportedLitShaderMode.DeferredOnly:
                    aggregate.shaderLitMode = LitShaderMode.Deferred;
                    break;

                case RenderPipelineSettings.SupportedLitShaderMode.Both:
                    aggregate.shaderLitMode = srcFrameSettings.shaderLitMode;
                    break;
                }
            }

            aggregate.enableDepthPrepassWithDeferredRendering = srcFrameSettings.enableDepthPrepassWithDeferredRendering;

            aggregate.enableTransparentPrepass = srcFrameSettings.enableTransparentPrepass;
            aggregate.enableMotionVectors      = camera.cameraType != CameraType.Reflection && srcFrameSettings.enableMotionVectors && renderPipelineSettings.supportMotionVectors;
            // Object motion vector are disabled if motion vector are disabled
            aggregate.enableObjectMotionVectors = srcFrameSettings.enableObjectMotionVectors && aggregate.enableMotionVectors;
            aggregate.enableDecals              = srcFrameSettings.enableDecals && renderPipelineSettings.supportDecals;
            aggregate.enableRoughRefraction     = srcFrameSettings.enableRoughRefraction;
            aggregate.enableTransparentPostpass = srcFrameSettings.enableTransparentPostpass;
            aggregate.enableDistortion          = camera.cameraType != CameraType.Reflection && srcFrameSettings.enableDistortion;

            // Planar and real time cubemap doesn't need post process and render in FP16
            aggregate.enablePostprocess = camera.cameraType != CameraType.Reflection && srcFrameSettings.enablePostprocess;

            aggregate.enableAsyncCompute = srcFrameSettings.enableAsyncCompute && SystemInfo.supportsAsyncCompute;

            aggregate.enableOpaqueObjects            = srcFrameSettings.enableOpaqueObjects;
            aggregate.enableTransparentObjects       = srcFrameSettings.enableTransparentObjects;
            aggregate.enableRealtimePlanarReflection = srcFrameSettings.enableRealtimePlanarReflection;

            //MSAA only supported in forward
            aggregate.enableMSAA = srcFrameSettings.enableMSAA && renderPipelineSettings.supportMSAA && aggregate.shaderLitMode == LitShaderMode.Forward;

            aggregate.ConfigureMSAADependentSettings();
            aggregate.ConfigureStereoDependentSettings(camera);

            // Disable various option for the preview except if we are a Camera Editor preview
            if (HDUtils.IsRegularPreviewCamera(camera))
            {
                aggregate.enableShadow                     = false;
                aggregate.enableContactShadows             = false;
                aggregate.enableShadowMask                 = false;
                aggregate.enableSSR                        = false;
                aggregate.enableSSAO                       = false;
                aggregate.enableAtmosphericScattering      = false;
                aggregate.enableVolumetrics                = false;
                aggregate.enableReprojectionForVolumetrics = false;
                aggregate.enableLightLayers                = false;
                aggregate.enableTransparentPrepass         = false;
                aggregate.enableMotionVectors              = false;
                aggregate.enableObjectMotionVectors        = false;
                aggregate.enableDecals                     = false;
                aggregate.enableTransparentPostpass        = false;
                aggregate.enableDistortion                 = false;
                aggregate.enablePostprocess                = false;
            }

            LightLoopSettings.InitializeLightLoopSettings(camera, aggregate, renderPipelineSettings, srcFrameSettings, ref aggregate.lightLoopSettings);
        }
예제 #13
0
        public AmbientOcclusionSystem(HDRenderPipelineAsset hdAsset)
        {
            m_Settings  = hdAsset.currentPlatformRenderPipelineSettings;
            m_Resources = hdAsset.renderPipelineResources;

            if (!hdAsset.currentPlatformRenderPipelineSettings.supportSSAO)
            {
                return;
            }

            bool supportMSAA = hdAsset.currentPlatformRenderPipelineSettings.supportMSAA;

            // Destination targets
            m_AmbientOcclusionTex = RTHandles.Alloc(Vector2.one,
                                                    filterMode: FilterMode.Bilinear,
                                                    colorFormat: GraphicsFormat.R8_UNorm,
                                                    enableRandomWrite: true,
                                                    xrInstancing: true,
                                                    useDynamicScale: true,
                                                    name: "Ambient Occlusion"
                                                    );

            if (supportMSAA)
            {
                m_MultiAmbientOcclusionTex = RTHandles.Alloc(Vector2.one,
                                                             filterMode: FilterMode.Bilinear,
                                                             colorFormat: GraphicsFormat.R8G8_UNorm,
                                                             enableRandomWrite: true,
                                                             xrInstancing: true,
                                                             useDynamicScale: true,
                                                             name: "Ambient Occlusion MSAA"
                                                             );

                m_ResolveMaterial      = CoreUtils.CreateEngineMaterial(m_Resources.shaders.aoResolvePS);
                m_ResolvePropertyBlock = new MaterialPropertyBlock();
            }

            // Prepare scale functors
            m_ScaleFunctors    = new ScaleFunc[(int)MipLevel.Count];
            m_ScaleFunctors[0] = size => size; // 0 is original size (mip0)

            for (int i = 1; i < m_ScaleFunctors.Length; i++)
            {
                int mult = i;
                m_ScaleFunctors[i] = size =>
                {
                    int div = 1 << mult;
                    return(new Vector2Int(
                               (size.x + (div - 1)) / div,
                               (size.y + (div - 1)) / div
                               ));
                };
            }

            var fmtFP16 = supportMSAA ? GraphicsFormat.R16G16_SFloat  : GraphicsFormat.R16_SFloat;
            var fmtFP32 = supportMSAA ? GraphicsFormat.R32G32_SFloat : GraphicsFormat.R32_SFloat;
            var fmtFX8  = supportMSAA ? GraphicsFormat.R8G8_UNorm    : GraphicsFormat.R8_UNorm;

            // All of these are pre-allocated to 1x1 and will be automatically scaled properly by
            // the internal RTHandle system
            Alloc(out m_LinearDepthTex, MipLevel.Original, fmtFP16, true, "AOLinearDepth");

            Alloc(out m_LowDepth1Tex, MipLevel.L1, fmtFP32, true, "AOLowDepth1");
            Alloc(out m_LowDepth2Tex, MipLevel.L2, fmtFP32, true, "AOLowDepth2");
            Alloc(out m_LowDepth3Tex, MipLevel.L3, fmtFP32, true, "AOLowDepth3");
            Alloc(out m_LowDepth4Tex, MipLevel.L4, fmtFP32, true, "AOLowDepth4");

            AllocArray(out m_TiledDepth1Tex, MipLevel.L3, fmtFP16, true, "AOTiledDepth1");
            AllocArray(out m_TiledDepth2Tex, MipLevel.L4, fmtFP16, true, "AOTiledDepth2");
            AllocArray(out m_TiledDepth3Tex, MipLevel.L5, fmtFP16, true, "AOTiledDepth3");
            AllocArray(out m_TiledDepth4Tex, MipLevel.L6, fmtFP16, true, "AOTiledDepth4");

            Alloc(out m_Occlusion1Tex, MipLevel.L1, fmtFX8, true, "AOOcclusion1");
            Alloc(out m_Occlusion2Tex, MipLevel.L2, fmtFX8, true, "AOOcclusion2");
            Alloc(out m_Occlusion3Tex, MipLevel.L3, fmtFX8, true, "AOOcclusion3");
            Alloc(out m_Occlusion4Tex, MipLevel.L4, fmtFX8, true, "AOOcclusion4");

            Alloc(out m_Combined1Tex, MipLevel.L1, fmtFX8, true, "AOCombined1");
            Alloc(out m_Combined2Tex, MipLevel.L2, fmtFX8, true, "AOCombined2");
            Alloc(out m_Combined3Tex, MipLevel.L3, fmtFX8, true, "AOCombined3");
        }
예제 #14
0
        // Note: this version is the one tested as there is issue getting HDRenderPipelineAsset in batchmode in unit test framework currently.
        /// <summary>Same than FrameSettings.AggregateFrameSettings but keep history of agregation in a collection for DebugMenu.
        /// Aggregation is default with override of the renderer then sanitazed depending on supported features of hdrpasset. Then the DebugMenu override occurs.</summary>
        /// <param name="aggregatedFrameSettings">The aggregated FrameSettings result.</param>
        /// <param name="camera">The camera rendering.</param>
        /// <param name="additionalData">Additional data of the camera rendering.</param>
        /// <param name="defaultFrameSettings">Base framesettings to copy prior any override.</param>
        /// <param name="supportedFeatures">Currently supported feature for the sanitazation pass.</param>
        public static void AggregateFrameSettings(ref FrameSettings aggregatedFrameSettings, Camera camera, HDAdditionalCameraData additionalData, ref FrameSettings defaultFrameSettings, RenderPipelineSettings supportedFeatures)
        {
            FrameSettingsHistory history = new FrameSettingsHistory
            {
                camera      = camera,
                defaultType = additionalData ? additionalData.defaultFrameSettings : FrameSettingsRenderType.Camera
            };

            aggregatedFrameSettings = defaultFrameSettings;
            if (additionalData && additionalData.customRenderingSettings)
            {
                FrameSettings.Override(ref aggregatedFrameSettings, additionalData.renderingPathCustomFrameSettings, additionalData.renderingPathCustomFrameSettingsOverrideMask);
                history.customMask = additionalData.renderingPathCustomFrameSettingsOverrideMask;
            }
            history.overridden = aggregatedFrameSettings;
            FrameSettings.Sanitize(ref aggregatedFrameSettings, camera, supportedFeatures);

            bool noHistory        = !frameSettingsHistory.ContainsKey(camera);
            bool updatedComponent = !noHistory && frameSettingsHistory[camera].sanitazed != aggregatedFrameSettings;
            bool dirty            = noHistory || updatedComponent;

            history.sanitazed = aggregatedFrameSettings;
            if (dirty)
            {
                history.debug = history.sanitazed;
            }
            else
            {
                history.debug = frameSettingsHistory[camera].debug;

                // Ensure user is not trying to activate unsupported settings in DebugMenu
                FrameSettings.Sanitize(ref history.debug, camera, supportedFeatures);
            }

            aggregatedFrameSettings      = history.debug;
            frameSettingsHistory[camera] = history;
        }