public void Init(HDRenderPipelineAsset asset, SkyManager skyManager, HDRaytracingManager raytracingManager, SharedRTManager sharedRTManager) { // Keep track of the pipeline asset m_PipelineAsset = asset; m_PipelineResources = asset.renderPipelineResources; // Keep track of the sky manager m_SkyManager = skyManager; // keep track of the ray tracing manager m_RaytracingManager = raytracingManager; // Keep track of the shared rt manager m_SharedRTManager = sharedRTManager; m_IndirectDiffuseTexture = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: GraphicsFormat.R16G16B16A16_SFloat, enableRandomWrite: true, useDynamicScale: true, useMipMap: false, name: "IndirectDiffuseBuffer"); }
public void Initialize(RenderPipelineResources resources, HDRaytracingManager raytracingManager, SharedRTManager sharedRTManager, LightLoop lightLoop) { // Keep track of the external buffers m_RenderPipelineResources = resources; m_RaytracingManager = raytracingManager; // Keep track of the lightloop m_LightLoop = lightLoop; // Keep track of the shader rt manager m_SharedRTManager = sharedRTManager; // Texture used to output debug information m_DebugLightClusterTexture = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: GraphicsFormat.R16G16B16A16_SFloat, enableRandomWrite: true, useDynamicScale: true, useMipMap: false, name: "DebugLightClusterTexture"); // Pre allocate the cluster with a dummy size m_LightCluster = new ComputeBuffer(1, sizeof(uint)); }
public void Init(HDRenderPipelineAsset asset, SkyManager skyManager, HDRaytracingManager raytracingManager, SharedRTManager sharedRTManager, GBufferManager gbufferManager) { // Keep track of the pipeline asset m_PipelineAsset = asset; m_PipelineResources = asset.renderPipelineResources; // Keep track of the sky manager m_SkyManager = skyManager; // keep track of the ray tracing manager m_RaytracingManager = raytracingManager; // Keep track of the shared rt manager m_SharedRTManager = sharedRTManager; m_GBufferManager = gbufferManager; m_IndirectDiffuseTexture = RTHandles.Alloc(Vector2.one, TextureXR.slices, colorFormat: GraphicsFormat.R16G16B16A16_SFloat, dimension: TextureXR.dimension, enableRandomWrite: true, useDynamicScale: true, useMipMap: false, autoGenerateMips: false, name: "IndirectDiffuseBuffer"); m_DenoiseBuffer0 = RTHandles.Alloc(Vector2.one, TextureXR.slices, colorFormat: GraphicsFormat.R16G16B16A16_SFloat, dimension: TextureXR.dimension, enableRandomWrite: true, useDynamicScale: true, useMipMap: false, autoGenerateMips: false, name: "IndirectDiffuseDenoiseBuffer"); }
public void Init(RenderPipelineResources rpResources, HDRenderPipelineRayTracingResources rpRTResources, RenderPipelineSettings pipelineSettings, HDRaytracingManager raytracingManager, SharedRTManager sharedRTManager) { // Keep track of the pipeline asset m_PipelineSettings = pipelineSettings; m_PipelineResources = rpResources; m_PipelineRayTracingResources = rpRTResources; // keep track of the ray tracing manager m_RaytracingManager = raytracingManager; // Keep track of the shared rt manager m_SharedRTManager = sharedRTManager; // Intermediate buffer that holds the pre-denoised texture m_IntermediateBuffer = RTHandles.Alloc(Vector2.one, TextureXR.slices, colorFormat: GraphicsFormat.R16G16B16A16_SFloat, dimension: TextureXR.dimension, enableRandomWrite: true, useDynamicScale: true, useMipMap: false, autoGenerateMips: false, name: "IntermediateAOBuffer"); // Buffer that holds the uncompressed normal buffer m_ViewSpaceNormalBuffer = RTHandles.Alloc(Vector2.one, TextureXR.slices, colorFormat: GraphicsFormat.R16G16B16A16_SFloat, dimension: TextureXR.dimension, enableRandomWrite: true, useDynamicScale: true, useMipMap: false, autoGenerateMips: false, name: "ViewSpaceNormalBuffer"); }
public void Init(HDRenderPipelineAsset asset, SkyManager skyManager, HDRaytracingManager raytracingManager, SharedRTManager sharedRTManager, GBufferManager gbufferManager) { // Keep track of the pipeline asset m_PipelineAsset = asset; m_PipelineResources = asset.renderPipelineResources; // Keep track of the sky manager m_SkyManager = skyManager; // keep track of the ray tracing manager m_RaytracingManager = raytracingManager; // Keep track of the shared rt manager m_SharedRTManager = sharedRTManager; m_GBufferManager = gbufferManager; m_IndirectDiffuseTexture = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: UnityEngine.Experimental.Rendering.HDPipeline.HDRenderPipeline.OverrideRTGraphicsFormat(GraphicsFormat.R16G16B16A16_SFloat), enableRandomWrite: true, xrInstancing: true, useDynamicScale: true, useMipMap: false, autoGenerateMips: false, name: "IndirectDiffuseBuffer"); m_DenoiseBuffer0 = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: UnityEngine.Experimental.Rendering.HDPipeline.HDRenderPipeline.OverrideRTGraphicsFormat(GraphicsFormat.R16G16B16A16_SFloat), enableRandomWrite: true, xrInstancing: true, useDynamicScale: true, useMipMap: false, autoGenerateMips: false, name: "IndirectDiffuseDenoiseBuffer"); }
public void Init(RenderPipelineResources pipelineResources, RenderPipelineSettings pipelineSettings, HDRaytracingManager raytracingManager, SharedRTManager sharedRTManager) { // Keep track of the pipeline asset m_PipelineSettings = pipelineSettings; m_PipelineResources = pipelineResources; // keep track of the ray tracing manager m_RaytracingManager = raytracingManager; // Keep track of the shared rt manager m_SharedRTManager = sharedRTManager; // Intermediate buffer that holds the pre-denoised texture m_IntermediateBuffer = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: UnityEngine.Experimental.Rendering.HDPipeline.HDRenderPipeline.OverrideRTGraphicsFormat(GraphicsFormat.R16G16B16A16_SFloat), enableRandomWrite: true, xrInstancing: true, useDynamicScale: true, useMipMap: false, autoGenerateMips: false, name: "IntermediateAOBuffer"); // Buffer that holds the average distance of the rays m_HitDistanceBuffer = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: UnityEngine.Experimental.Rendering.HDPipeline.HDRenderPipeline.OverrideRTGraphicsFormat(GraphicsFormat.R32_SFloat), enableRandomWrite: true, xrInstancing: true, useDynamicScale: true, useMipMap: false, autoGenerateMips: false, name: "HitDistanceBuffer"); // Buffer that holds the uncompressed normal buffer m_ViewSpaceNormalBuffer = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: UnityEngine.Experimental.Rendering.HDPipeline.HDRenderPipeline.OverrideRTGraphicsFormat(GraphicsFormat.R16G16B16A16_SFloat), enableRandomWrite: true, xrInstancing: true, useDynamicScale: true, useMipMap: false, autoGenerateMips: false, name: "ViewSpaceNormalBuffer"); }
public void Init(RenderPipelineSettings settings, RenderPipelineResources rpResources, HDRenderPipelineRayTracingResources rayTracingResources, BlueNoise blueNoise, HDRenderPipeline renderPipeline, SharedRTManager sharedRTManager, DebugDisplaySettings currentDebugDisplaySettings) { // Keep track of the resources m_Resources = rpResources; m_RTResources = rayTracingResources; // Keep track of the settings m_Settings = settings; // Keep track of the render pipeline m_RenderPipeline = renderPipeline; // Keep track of the shared RT manager m_SharedRTManager = sharedRTManager; // Keep track of the blue noise manager m_BlueNoise = blueNoise; // Create the list of environments m_Environments = new List <HDRaytracingEnvironment>(); // Grab all the ray-tracing graphs that have been created before (in case the order of initialization has not been respected, which happens when we open unity the first time) HDRaytracingEnvironment[] environmentArray = Object.FindObjectsOfType <HDRaytracingEnvironment>(); for (int envIdx = 0; envIdx < environmentArray.Length; ++envIdx) { RegisterEnvironment(environmentArray[envIdx]); } // Init the simple denoiser m_SimpleDenoiser.Init(rayTracingResources, m_SharedRTManager); // Init the ray count manager m_RayCountManager.Init(rayTracingResources, currentDebugDisplaySettings); #if UNITY_EDITOR // We need to invalidate the acceleration structures in case the hierarchy changed EditorApplication.hierarchyChanged += OnHierarchyChanged; #endif }
public void Init(HDRenderPipelineAsset asset, SkyManager skyManager, HDRaytracingManager raytracingManager, SharedRTManager sharedRTManager, GBufferManager gbufferManager) { // Keep track of the pipeline asset m_PipelineAsset = asset; m_PipelineResources = asset.renderPipelineResources; // Keep track of the sky manager m_SkyManager = skyManager; // keep track of the ray tracing manager m_RaytracingManager = raytracingManager; // Keep track of the shared rt manager m_SharedRTManager = sharedRTManager; m_GbufferManager = gbufferManager; m_LightingTexture = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: UnityEngine.Experimental.Rendering.HDPipeline.HDRenderPipeline.OverrideRTGraphicsFormat(GraphicsFormat.R16G16B16A16_SFloat), enableRandomWrite: true, xrInstancing: true, useDynamicScale: true, useMipMap: false, autoGenerateMips: false, name: "LightingBuffer"); m_HitPdfTexture = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: UnityEngine.Experimental.Rendering.HDPipeline.HDRenderPipeline.OverrideRTGraphicsFormat(GraphicsFormat.R16G16B16A16_SFloat), enableRandomWrite: true, xrInstancing: true, useDynamicScale: true, useMipMap: false, autoGenerateMips: false, name: "HitPdfBuffer"); m_VarianceBuffer = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: UnityEngine.Experimental.Rendering.HDPipeline.HDRenderPipeline.OverrideRTGraphicsFormat(GraphicsFormat.R8_UNorm), enableRandomWrite: true, xrInstancing: true, useDynamicScale: true, useMipMap: false, autoGenerateMips: false, name: "VarianceBuffer"); m_MinBoundBuffer = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: UnityEngine.Experimental.Rendering.HDPipeline.HDRenderPipeline.OverrideRTGraphicsFormat(GraphicsFormat.B10G11R11_UFloatPack32), enableRandomWrite: true, xrInstancing: true, useDynamicScale: true, useMipMap: false, autoGenerateMips: false, name: "MinBoundBuffer"); m_MaxBoundBuffer = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: UnityEngine.Experimental.Rendering.HDPipeline.HDRenderPipeline.OverrideRTGraphicsFormat(GraphicsFormat.B10G11R11_UFloatPack32), enableRandomWrite: true, xrInstancing: true, useDynamicScale: true, useMipMap: false, autoGenerateMips: false, name: "MaxBoundBuffer"); }
public void Init(HDRenderPipelineAsset asset, SkyManager skyManager, HDRaytracingManager raytracingManager, SharedRTManager sharedRTManager, GBufferManager gbufferManager) { // Keep track of the pipeline asset m_PipelineAsset = asset; m_PipelineResources = asset.renderPipelineResources; // Keep track of the sky manager m_SkyManager = skyManager; // keep track of the ray tracing manager m_RaytracingManager = raytracingManager; // Keep track of the shared rt manager m_SharedRTManager = sharedRTManager; m_GbufferManager = gbufferManager; m_LightingTexture = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: GraphicsFormat.R16G16B16A16_SFloat, enableRandomWrite: true, useDynamicScale: true, useMipMap: false, name: "LightingBuffer"); m_HitPdfTexture = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: GraphicsFormat.R16G16B16A16_SFloat, enableRandomWrite: true, useDynamicScale: true, useMipMap: false, name: "HitPdfBuffer"); m_VarianceBuffer = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: GraphicsFormat.R16_SFloat, enableRandomWrite: true, useDynamicScale: true, useMipMap: false, name: "VarianceBuffer"); m_MinBoundBuffer = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: GraphicsFormat.R16G16B16A16_SFloat, enableRandomWrite: true, useDynamicScale: true, useMipMap: false, name: "MinBoundBuffer"); m_MaxBoundBuffer = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: GraphicsFormat.R16G16B16A16_SFloat, enableRandomWrite: true, useDynamicScale: true, useMipMap: false, name: "MaxBoundBuffer"); }
public void Init(HDRenderPipelineAsset asset, SkyManager skyManager, HDRaytracingManager raytracingManager, SharedRTManager sharedRTManager) { // Keep track of the pipeline asset m_PipelineAsset = asset; m_PipelineResources = asset.renderPipelineResources; // Keep track of the sky manager m_SkyManager = skyManager; // keep track of the ray tracing manager m_RaytracingManager = raytracingManager; // Keep track of the shared rt manager m_SharedRTManager = sharedRTManager; m_RaytracingFlagTarget = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: GraphicsFormat.R8_SNorm, enableRandomWrite: true, useMipMap: false, name: "RaytracingFlagTexture"); m_RaytracingFlagStateBlock = new RenderStateBlock { depthState = new DepthState(false, CompareFunction.LessEqual), mask = RenderStateMask.Depth }; }
public void Init(HDRenderPipelineAsset asset, HDRaytracingManager raytracingManager, SharedRTManager sharedRTManager, LightLoop lightLoop, GBufferManager gbufferManager) { // Keep track of the pipeline asset m_PipelineAsset = asset; m_PipelineResources = asset.renderPipelineResources; // keep track of the ray tracing manager m_RaytracingManager = raytracingManager; // Keep track of the shared rt manager m_SharedRTManager = sharedRTManager; // The lightloop that holds all the lights of the scene m_LightLoop = lightLoop; // GBuffer manager that holds all the data for shading the samples m_GbufferManager = gbufferManager; // Allocate the intermediate buffers m_DenoiseBuffer0 = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: GraphicsFormat.R16G16B16A16_SFloat, enableRandomWrite: true, useDynamicScale: true, useMipMap: false, name: "DenoiseBuffer0"); m_DenoiseBuffer1 = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: GraphicsFormat.R16G16B16A16_SFloat, enableRandomWrite: true, useDynamicScale: true, useMipMap: false, name: "DenoiseBuffer1"); m_AreaShadowTextureArray = RTHandles.Alloc(Vector2.one, slices: 4, dimension: TextureDimension.Tex2DArray, filterMode: FilterMode.Point, colorFormat: GraphicsFormat.R16_SFloat, enableRandomWrite: true, useDynamicScale: true, useMipMap: false, name: "AreaShadowArrayBuffer"); }
public void PostDispatchWork(CommandBuffer cmd, HDCamera camera, SharedRTManager sharedRTManager) { // Grab current settings var settings = VolumeManager.instance.stack.GetComponent <AmbientOcclusion>(); // MSAA Resolve if (camera.frameSettings.IsEnabled(FrameSettingsField.MSAA)) { using (new ProfilingSample(cmd, "Resolve AO Buffer", CustomSamplerId.ResolveSSAO.GetSampler())) { HDUtils.SetRenderTarget(cmd, m_AmbientOcclusionTex); m_ResolvePropertyBlock.SetTexture(HDShaderIDs._DepthValuesTexture, sharedRTManager.GetDepthValuesTexture()); m_ResolvePropertyBlock.SetTexture(HDShaderIDs._MultiAmbientOcclusionTexture, m_MultiAmbientOcclusionTex); cmd.DrawProcedural(Matrix4x4.identity, m_ResolveMaterial, 0, MeshTopology.Triangles, 3, 1, m_ResolvePropertyBlock); } } cmd.SetGlobalTexture(HDShaderIDs._AmbientOcclusionTexture, m_AmbientOcclusionTex); cmd.SetGlobalVector(HDShaderIDs._AmbientOcclusionParam, new Vector4(0f, 0f, 0f, settings.directLightingStrength.value)); // TODO: All the pushdebug stuff should be centralized somewhere (RenderPipelineManager.currentPipeline as HDRenderPipeline).PushFullScreenDebugTexture(camera, cmd, m_AmbientOcclusionTex, FullScreenDebugMode.SSAO); }
public void Init(HDRenderPipelineAsset asset, SkyManager skyManager, HDRaytracingManager raytracingManager, SharedRTManager sharedRTManager) { // Keep track of the pipeline asset m_PipelineAsset = asset; m_PipelineResources = asset.renderPipelineResources; // Keep track of the sky manager m_SkyManager = skyManager; // keep track of the ray tracing manager m_RaytracingManager = raytracingManager; // Keep track of the shared rt manager m_SharedRTManager = sharedRTManager; m_RaytracingFlagTarget = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: UnityEngine.Experimental.Rendering.HDPipeline.HDRenderPipeline.OverrideRTGraphicsFormat(GraphicsFormat.R8_SNorm), enableRandomWrite: true, useMipMap: false, name: "RaytracingFlagTexture"); m_DebugRaytracingTexture = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: UnityEngine.Experimental.Rendering.HDPipeline.HDRenderPipeline.OverrideRTGraphicsFormat(GraphicsFormat.R16G16B16A16_SFloat), enableRandomWrite: true, useDynamicScale: true, useMipMap: false, name: "DebugRaytracingBuffer"); m_RaytracingFlagStateBlock = new RenderStateBlock { depthState = new DepthState(false, CompareFunction.LessEqual), mask = RenderStateMask.Depth }; }
public void Init(RenderPipelineSettings settings, RenderPipelineResources resources, BlueNoise blueNoise, LightLoop lightloop, SharedRTManager sharedRTManager) { // Keep track of the resources m_Resources = resources; // Keep track of the settings m_Settings = settings; // Keep track of the lightloop m_LightLoop = lightloop; // Keep track of the shared RT manager m_SharedRTManager = sharedRTManager; // Keep track of the blue noise manager m_BlueNoise = blueNoise; // Create the list of environments m_Environments = new List <HDRaytracingEnvironment>(); // Grab all the ray-tracing graphs that have been created before (in case the order of initialization has not been respected, which happens when we open unity the first time) HDRaytracingEnvironment[] environmentArray = Object.FindObjectsOfType <HDRaytracingEnvironment>(); for (int envIdx = 0; envIdx < environmentArray.Length; ++envIdx) { RegisterEnvironment(environmentArray[envIdx]); } // keep track of all the graphs that are to be supported m_Filters = new List <HDRayTracingFilter>(); // Create the sub-scenes structure m_SubScenes = new Dictionary <int, HDRayTracingSubScene>(); // The list of masks that are currently requested m_LayerMasks = new List <int>(); // Let's start by building the "default" sub-scene (used by the scene camera) HDRayTracingSubScene defaultSubScene = new HDRayTracingSubScene(); defaultSubScene.mask = m_Settings.editorRaytracingFilterLayerMask.value; defaultSubScene.persistent = true; BuildSubSceneStructure(ref defaultSubScene); m_SubScenes.Add(m_Settings.editorRaytracingFilterLayerMask.value, defaultSubScene); m_LayerMasks.Add(m_Settings.editorRaytracingFilterLayerMask.value); // Grab all the ray-tracing graphs that have been created before HDRayTracingFilter[] filterArray = Object.FindObjectsOfType <HDRayTracingFilter>(); for (int filterIdx = 0; filterIdx < filterArray.Length; ++filterIdx) { RegisterFilter(filterArray[filterIdx]); } m_RayCountManager.Init(resources); #if UNITY_EDITOR // We need to invalidate the acceleration structures in case the hierarchy changed EditorApplication.hierarchyChanged += OnHierarchyChanged; #endif }
public void InitRaytracing(HDRaytracingManager raytracingManager, SharedRTManager sharedRTManager) { m_RayTracingManager = raytracingManager; m_RaytracingAmbientOcclusion.Init(m_Resources, m_Settings, m_RayTracingManager, sharedRTManager); }
private void DenoiseAO(CommandBuffer cmd, HDCamera camera, SharedRTManager sharedRTManager) { var settings = VolumeManager.instance.stack.GetComponent <AmbientOcclusion>(); if (!IsActive(camera, settings)) { return; } var cs = m_Resources.shaders.GTAODenoiseCS; Vector4 aoBufferInfo; Vector2 runningRes; if (m_RunningFullRes) { runningRes = new Vector2(camera.actualWidth, camera.actualHeight); aoBufferInfo = new Vector4(camera.actualWidth, camera.actualHeight, 1.0f / camera.actualWidth, 1.0f / camera.actualHeight); } else { runningRes = new Vector2(camera.actualWidth, camera.actualHeight) * 0.5f; aoBufferInfo = new Vector4(camera.actualWidth * 0.5f, camera.actualHeight * 0.5f, 2.0f / camera.actualWidth, 2.0f / camera.actualHeight); } Vector4 aoParams0 = new Vector4( settings.fullResolution.value ? 0.0f : 1.0f, 0, // not needed settings.radius.value, settings.stepCount.value ); Vector4 aoParams1 = new Vector4( settings.intensity.value, 1.0f / (settings.radius.value * settings.radius.value), 0, 0 ); cmd.SetComputeVectorParam(cs, HDShaderIDs._AOParams0, aoParams0); cmd.SetComputeVectorParam(cs, HDShaderIDs._AOParams1, aoParams1); cmd.SetComputeVectorParam(cs, HDShaderIDs._AOBufferSize, aoBufferInfo); // Spatial using (new ProfilingSample(cmd, "Spatial Denoise GTAO", CustomSamplerId.ResolveSSAO.GetSampler())) { var kernel = cs.FindKernel("GTAODenoise_Spatial"); cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._AOPackedData, m_PackedDataTex); cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._AOPackedBlurred, m_PackedDataBlurred); cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._OcclusionTexture, m_AmbientOcclusionTex); const int groupSizeX = 8; const int groupSizeY = 8; int threadGroupX = ((int)runningRes.x + (groupSizeX - 1)) / groupSizeX; int threadGroupY = ((int)runningRes.y + (groupSizeY - 1)) / groupSizeY; cmd.DispatchCompute(cs, kernel, threadGroupX, threadGroupY, camera.viewCount); } if (!m_HistoryReady) { var kernel = cs.FindKernel("GTAODenoise_CopyHistory"); cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._InputTexture, m_PackedDataTex); cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._OutputTexture, m_PackedHistory[m_HistoryIndex]); const int groupSizeX = 8; const int groupSizeY = 8; int threadGroupX = ((int)runningRes.x + (groupSizeX - 1)) / groupSizeX; int threadGroupY = ((int)runningRes.y + (groupSizeY - 1)) / groupSizeY; cmd.DispatchCompute(cs, kernel, threadGroupX, threadGroupY, camera.viewCount); m_HistoryReady = true; } // Temporal using (new ProfilingSample(cmd, "Temporal GTAO", CustomSamplerId.ResolveSSAO.GetSampler())) { int outputIndex = (m_HistoryIndex + 1) & 1; int kernel; if (m_RunningFullRes) { kernel = cs.FindKernel("GTAODenoise_Temporal_FullRes"); } else { kernel = cs.FindKernel("GTAODenoise_Temporal"); } cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._AOPackedData, m_PackedDataTex); cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._AOPackedBlurred, m_PackedDataBlurred); cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._AOPackedHistory, m_PackedHistory[m_HistoryIndex]); cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._AOOutputHistory, m_PackedHistory[outputIndex]); if (m_RunningFullRes) { cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._OcclusionTexture, m_AmbientOcclusionTex); } else { cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._OcclusionTexture, m_FinalHalfRes); } const int groupSizeX = 8; const int groupSizeY = 8; int threadGroupX = ((int)runningRes.x + (groupSizeX - 1)) / groupSizeX; int threadGroupY = ((int)runningRes.y + (groupSizeY - 1)) / groupSizeY; cmd.DispatchCompute(cs, kernel, threadGroupX, threadGroupY, camera.viewCount); m_HistoryIndex = outputIndex; } // Need upsample if (!m_RunningFullRes) { using (new ProfilingSample(cmd, "Upsample GTAO", CustomSamplerId.ResolveSSAO.GetSampler())) { cs = m_Resources.shaders.GTAOUpsampleCS; var kernel = cs.FindKernel("AOUpsample"); cmd.SetComputeVectorParam(cs, HDShaderIDs._AOParams0, aoParams0); cmd.SetComputeVectorParam(cs, HDShaderIDs._AOParams1, aoParams1); cmd.SetComputeVectorParam(cs, HDShaderIDs._AOBufferSize, aoBufferInfo); cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._AOPackedData, m_FinalHalfRes); cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._OcclusionTexture, m_AmbientOcclusionTex); const int groupSizeX = 8; const int groupSizeY = 8; int threadGroupX = ((int)camera.actualWidth + (groupSizeX - 1)) / groupSizeX; int threadGroupY = ((int)camera.actualHeight + (groupSizeY - 1)) / groupSizeY; cmd.DispatchCompute(cs, kernel, threadGroupX, threadGroupY, camera.viewCount); } } }
private void RenderAO(CommandBuffer cmd, HDCamera camera, SharedRTManager sharedRTManager, int frameCount) { // Grab current settings var settings = VolumeManager.instance.stack.GetComponent <AmbientOcclusion>(); EnsureRTSize(settings); Vector4 aoBufferInfo; Vector2 runningRes; if (settings.fullResolution.value) { runningRes = new Vector2(camera.actualWidth, camera.actualHeight); aoBufferInfo = new Vector4(camera.actualWidth, camera.actualHeight, 1.0f / camera.actualWidth, 1.0f / camera.actualHeight); } else { runningRes = new Vector2(camera.actualWidth, camera.actualHeight) * 0.5f; aoBufferInfo = new Vector4(camera.actualWidth * 0.5f, camera.actualHeight * 0.5f, 2.0f / camera.actualWidth, 2.0f / camera.actualHeight); } float invHalfTanFOV = -camera.mainViewConstants.projMatrix[1, 1]; float aspectRatio = runningRes.y / runningRes.x; Vector4 aoParams0 = new Vector4( settings.fullResolution.value ? 0.0f : 1.0f, runningRes.y * invHalfTanFOV * 0.25f, settings.radius.value, settings.stepCount.value ); Vector4 aoParams1 = new Vector4( settings.intensity.value, 1.0f / (settings.radius.value * settings.radius.value), (frameCount / 6) % 4, (frameCount % 6) ); // We start from screen space position, so we bake in this factor the 1 / resolution as well. Vector4 toViewSpaceProj = new Vector4( 2.0f / (invHalfTanFOV * aspectRatio * runningRes.x), 2.0f / (invHalfTanFOV * runningRes.y), 1.0f / (invHalfTanFOV * aspectRatio), 1.0f / invHalfTanFOV ); float radInPixels = Mathf.Max(16, settings.maximumRadiusInPixels.value * ((runningRes.x * runningRes.y) / (540.0f * 960.0f))); Vector4 aoParams2 = new Vector4( RTHandles.rtHandleProperties.currentRenderTargetSize.x, RTHandles.rtHandleProperties.currentRenderTargetSize.y, 1.0f / ((float)settings.stepCount.value + 1.0f), radInPixels ); var cs = m_Resources.shaders.GTAOCS; var kernel = cs.FindKernel("GTAOMain_HalfRes"); if (m_RunningFullRes) { kernel = cs.FindKernel("GTAOMain_FullRes"); } cmd.SetComputeVectorParam(cs, HDShaderIDs._AOBufferSize, aoBufferInfo); cmd.SetComputeVectorParam(cs, HDShaderIDs._AODepthToViewParams, toViewSpaceProj); cmd.SetComputeVectorParam(cs, HDShaderIDs._AOParams0, aoParams0); cmd.SetComputeVectorParam(cs, HDShaderIDs._AOParams1, aoParams1); cmd.SetComputeVectorParam(cs, HDShaderIDs._AOParams2, aoParams2); cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._OcclusionTexture, m_AmbientOcclusionTex); cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._BentNormalsTexture, m_BentNormalTex); cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._AOPackedData, m_PackedDataTex); const int groupSizeX = 8; const int groupSizeY = 8; int threadGroupX = ((int)runningRes.x + (groupSizeX - 1)) / groupSizeX; int threadGroupY = ((int)runningRes.y + (groupSizeY - 1)) / groupSizeY; using (new ProfilingSample(cmd, "GTAO Horizon search and integration", CustomSamplerId.RenderSSAO.GetSampler())) { cmd.DispatchCompute(cs, kernel, threadGroupX, threadGroupY, camera.viewCount); } }
private void DenoiseAO(CommandBuffer cmd, HDCamera camera, SharedRTManager sharedRTManager) { var settings = VolumeManager.instance.stack.GetComponent <AmbientOcclusion>(); if (!IsActive(camera, settings)) { return; } var cs = m_Resources.shaders.GTAODenoiseCS; Vector4 aoBufferInfo; Vector2 runningRes; if (m_RunningFullRes) { runningRes = new Vector2(camera.actualWidth, camera.actualHeight); aoBufferInfo = new Vector4(camera.actualWidth, camera.actualHeight, 1.0f / camera.actualWidth, 1.0f / camera.actualHeight); } else { runningRes = new Vector2(camera.actualWidth, camera.actualHeight) * 0.5f; aoBufferInfo = new Vector4(camera.actualWidth * 0.5f, camera.actualHeight * 0.5f, 2.0f / camera.actualWidth, 2.0f / camera.actualHeight); } Vector4 aoParams0 = new Vector4( settings.fullResolution.value ? 0.0f : 1.0f, 0, // not needed settings.radius.value, settings.stepCount.value ); Vector4 aoParams1 = new Vector4( settings.intensity.value, 1.0f / (settings.radius.value * settings.radius.value), 0, 0 ); cmd.SetComputeVectorParam(cs, HDShaderIDs._AOParams0, aoParams0); cmd.SetComputeVectorParam(cs, HDShaderIDs._AOParams1, aoParams1); cmd.SetComputeVectorParam(cs, HDShaderIDs._AOBufferSize, aoBufferInfo); // Spatial using (new ProfilingSample(cmd, "Spatial Denoise GTAO", CustomSamplerId.ResolveSSAO.GetSampler())) { var kernel = cs.FindKernel("GTAODenoise_Spatial"); cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._AOPackedData, m_PackedDataTex); cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._AOPackedBlurred, m_PackedDataBlurred); cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._OcclusionTexture, m_AmbientOcclusionTex); const int groupSizeX = 8; const int groupSizeY = 8; int threadGroupX = ((int)runningRes.x + (groupSizeX - 1)) / groupSizeX; int threadGroupY = ((int)runningRes.y + (groupSizeY - 1)) / groupSizeY; cmd.DispatchCompute(cs, kernel, threadGroupX, threadGroupY, camera.viewCount); } RTHandleSystem.RTHandle Allocator(string id, int frameIndex, RTHandleSystem rtHandleSystem) { return(rtHandleSystem.Alloc(Vector2.one * (m_RunningFullRes ? 1.0f : 0.5f), TextureXR.slices, filterMode: FilterMode.Point, colorFormat: GraphicsFormat.R32_UInt, dimension: TextureXR.dimension, useDynamicScale: true, enableRandomWrite: true, name: string.Format("AO Packed history_{0}", frameIndex))); } var currentHistory = camera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.AmbientOcclusion) ?? camera.AllocHistoryFrameRT((int)HDCameraFrameHistoryType.AmbientOcclusion, Allocator, 2); var historyOutput = camera.GetPreviousFrameRT((int)HDCameraFrameHistoryType.AmbientOcclusion); if (!m_HistoryReady) { var kernel = cs.FindKernel("GTAODenoise_CopyHistory"); cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._InputTexture, m_PackedDataTex); cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._OutputTexture, currentHistory); const int groupSizeX = 8; const int groupSizeY = 8; int threadGroupX = ((int)runningRes.x + (groupSizeX - 1)) / groupSizeX; int threadGroupY = ((int)runningRes.y + (groupSizeY - 1)) / groupSizeY; cmd.DispatchCompute(cs, kernel, threadGroupX, threadGroupY, camera.viewCount); m_HistoryReady = true; } // Temporal using (new ProfilingSample(cmd, "Temporal GTAO", CustomSamplerId.ResolveSSAO.GetSampler())) { int kernel; if (m_RunningFullRes) { kernel = cs.FindKernel("GTAODenoise_Temporal_FullRes"); } else { kernel = cs.FindKernel("GTAODenoise_Temporal"); } cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._AOPackedData, m_PackedDataTex); cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._AOPackedBlurred, m_PackedDataBlurred); cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._AOPackedHistory, currentHistory); cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._AOOutputHistory, historyOutput); if (m_RunningFullRes) { cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._OcclusionTexture, m_AmbientOcclusionTex); } else { cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._OcclusionTexture, m_FinalHalfRes); } const int groupSizeX = 8; const int groupSizeY = 8; int threadGroupX = ((int)runningRes.x + (groupSizeX - 1)) / groupSizeX; int threadGroupY = ((int)runningRes.y + (groupSizeY - 1)) / groupSizeY; cmd.DispatchCompute(cs, kernel, threadGroupX, threadGroupY, camera.viewCount); } // Need upsample if (!m_RunningFullRes) { using (new ProfilingSample(cmd, "Upsample GTAO", CustomSamplerId.ResolveSSAO.GetSampler())) { cs = m_Resources.shaders.GTAOUpsampleCS; var kernel = cs.FindKernel("AOUpsample"); cmd.SetComputeVectorParam(cs, HDShaderIDs._AOParams0, aoParams0); cmd.SetComputeVectorParam(cs, HDShaderIDs._AOParams1, aoParams1); cmd.SetComputeVectorParam(cs, HDShaderIDs._AOBufferSize, aoBufferInfo); cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._AOPackedData, m_FinalHalfRes); cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._OcclusionTexture, m_AmbientOcclusionTex); const int groupSizeX = 8; const int groupSizeY = 8; int threadGroupX = ((int)camera.actualWidth + (groupSizeX - 1)) / groupSizeX; int threadGroupY = ((int)camera.actualHeight + (groupSizeY - 1)) / groupSizeY; cmd.DispatchCompute(cs, kernel, threadGroupX, threadGroupY, camera.viewCount); } } }