public TextureHandle Render(RenderGraph renderGraph, HDCamera hdCamera, TextureHandle depthPyramid, TextureHandle motionVectors, int frameCount) { var settings = hdCamera.volumeStack.GetComponent <AmbientOcclusion>(); TextureHandle result; // AO has side effects (as it uses an imported history buffer) // So we can't rely on automatic pass stripping. This is why we have to be explicit here. if (IsActive(hdCamera, settings)) { { EnsureRTSize(settings, hdCamera); var historyRT = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.AmbientOcclusion); var currentHistory = renderGraph.ImportTexture(historyRT); var outputHistory = renderGraph.ImportTexture(hdCamera.GetPreviousFrameRT((int)HDCameraFrameHistoryType.AmbientOcclusion)); Vector2 historySize = new Vector2(historyRT.referenceSize.x * historyRT.scaleFactor.x, historyRT.referenceSize.y * historyRT.scaleFactor.y); var rtScaleForHistory = hdCamera.historyRTHandleProperties.rtHandleScale; var aoParameters = PrepareRenderAOParameters(hdCamera, renderGraph.rtHandleProperties, historySize * rtScaleForHistory, frameCount); var packedData = RenderAO(renderGraph, aoParameters, depthPyramid); result = DenoiseAO(renderGraph, aoParameters, motionVectors, packedData, currentHistory, outputHistory); } } else { result = renderGraph.ImportTexture(TextureXR.GetBlackTexture(), HDShaderIDs._AmbientOcclusionTexture); } return(result); }
void EvaluateShadowDebugView(CommandBuffer cmd, HDCamera hdCamera) { // If this is the right debug mode and the index we are asking for is in the range HDRenderPipeline hdrp = (RenderPipelineManager.currentPipeline as HDRenderPipeline); if (FullScreenDebugMode.ScreenSpaceShadows == hdrp.m_CurrentDebugDisplaySettings.data.fullScreenDebugMode) { if (!hdrp.rayTracingSupported || (m_ScreenSpaceShadowChannelSlot <= hdrp.m_CurrentDebugDisplaySettings.data.screenSpaceShadowIndex)) { // In this case we have not rendered any screenspace shadows, so push a black texture on the debug display hdrp.PushFullScreenDebugTexture(hdCamera, cmd, TextureXR.GetBlackTextureArray(), FullScreenDebugMode.ScreenSpaceShadows); return; } // Fetch the buffer where we we will store our result RTHandle debugResultBuffer = GetRayTracingBuffer(InternalRayTracingBuffers.RGBA0); // Generate the debug view SSShadowDebugParameters sssdParams = PrepareSSShadowDebugParameters(hdCamera, (int)hdrp.m_CurrentDebugDisplaySettings.data.screenSpaceShadowIndex); SSShadowDebugResources sssdResources = PrepareSSShadowDebugResources(debugResultBuffer); ExecuteShadowDebugView(cmd, sssdParams, sssdResources); // Push the full screen debug texture hdrp.PushFullScreenDebugTexture(hdCamera, cmd, debugResultBuffer, FullScreenDebugMode.ScreenSpaceShadows); } }
internal void PushProbeVolumesGlobalParamsDefault(HDCamera hdCamera, CommandBuffer cmd, int frameIndex) { cmd.SetGlobalBuffer(HDShaderIDs._ProbeVolumeBounds, s_VisibleProbeVolumeBoundsBufferDefault); cmd.SetGlobalBuffer(HDShaderIDs._ProbeVolumeDatas, s_VisibleProbeVolumeDataBufferDefault); cmd.SetGlobalTexture(HDShaderIDs._ProbeVolumeAtlasSH, TextureXR.GetBlackTexture3D()); cmd.SetGlobalTexture(HDShaderIDs._ProbeVolumeAtlasOctahedralDepth, Texture2D.blackTexture); }
public RenderGraphResource Render(RenderGraph renderGraph, HDCamera hdCamera, RenderGraphResource depthPyramid, RenderGraphResource motionVectors, int frameCount) { var settings = VolumeManager.instance.stack.GetComponent <AmbientOcclusion>(); RenderGraphResource result; // AO has side effects (as it uses an imported history buffer) // So we can't rely on automatic pass stripping. This is why we have to be explicit here. if (IsActive(hdCamera, settings)) { { EnsureRTSize(settings, hdCamera); var aoParameters = PrepareRenderAOParameters(hdCamera, renderGraph.rtHandleProperties, frameCount); var currentHistory = renderGraph.ImportTexture(hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.AmbientOcclusion)); var outputHistory = renderGraph.ImportTexture(hdCamera.GetPreviousFrameRT((int)HDCameraFrameHistoryType.AmbientOcclusion)); var packedData = RenderAO(renderGraph, aoParameters, depthPyramid); result = DenoiseAO(renderGraph, aoParameters, motionVectors, packedData, currentHistory, outputHistory); } } else { result = renderGraph.ImportTexture(TextureXR.GetBlackTexture(), HDShaderIDs._AmbientOcclusionTexture); } return(result); }
TextureHandle RenderPostProcess(RenderGraph renderGraph, TextureHandle inputColor, TextureHandle depthBuffer, TextureHandle backBuffer, CullingResults cullResults, HDCamera hdCamera) { PostProcessParameters parameters = PreparePostProcess(cullResults, hdCamera); TextureHandle afterPostProcessBuffer = renderGraph.ImportTexture(TextureXR.GetBlackTexture()); TextureHandle dest = HDUtils.PostProcessIsFinalPass(parameters.hdCamera) ? backBuffer : renderGraph.CreateTexture( new TextureDesc(Vector2.one, true, true) { colorFormat = GetColorBufferFormat(), name = "Intermediate Postprocess buffer" }); if (hdCamera.frameSettings.IsEnabled(FrameSettingsField.AfterPostprocess)) { // We render AfterPostProcess objects first into a separate buffer that will be composited in the final post process pass using (var builder = renderGraph.AddRenderPass <AfterPostProcessPassData>("After Post-Process", out var passData, ProfilingSampler.Get(HDProfileId.AfterPostProcessing))) { passData.parameters = parameters; passData.afterPostProcessBuffer = builder.UseColorBuffer(renderGraph.CreateTexture( new TextureDesc(Vector2.one, true, true) { colorFormat = GraphicsFormat.R8G8B8A8_SRGB, clearBuffer = true, clearColor = Color.black, name = "OffScreen AfterPostProcess" }), 0); if (passData.parameters.useDepthBuffer) { passData.depthStencilBuffer = builder.UseDepthBuffer(depthBuffer, DepthAccess.ReadWrite); } passData.opaqueAfterPostprocessRL = builder.UseRendererList(renderGraph.CreateRendererList(passData.parameters.opaqueAfterPPDesc)); passData.transparentAfterPostprocessRL = builder.UseRendererList(renderGraph.CreateRendererList(passData.parameters.transparentAfterPPDesc)); builder.SetRenderFunc( (AfterPostProcessPassData data, RenderGraphContext ctx) => { RenderAfterPostProcess(data.parameters , ctx.resources.GetRendererList(data.opaqueAfterPostprocessRL) , ctx.resources.GetRendererList(data.transparentAfterPostprocessRL) , ctx.renderContext, ctx.cmd); }); afterPostProcessBuffer = passData.afterPostProcessBuffer; } } m_PostProcessSystem.Render( renderGraph, parameters.hdCamera, parameters.blueNoise, inputColor, afterPostProcessBuffer, depthBuffer, dest, parameters.flipYInPostProcess ); return(dest); }
public void Render(CommandBuffer cmd, HDCamera camera, SharedRTManager sharedRTManager, ScriptableRenderContext renderContext, int frameCount) { var settings = VolumeManager.instance.stack.GetComponent <AmbientOcclusion>(); if (!IsActive(camera, settings)) { // No AO applied - neutral is black, see the comment in the shaders cmd.SetGlobalTexture(HDShaderIDs._AmbientOcclusionTexture, TextureXR.GetBlackTexture()); cmd.SetGlobalVector(HDShaderIDs._AmbientOcclusionParam, Vector4.zero); return; } else { #if ENABLE_RAYTRACING if (settings.enableRaytracing.value) { m_RaytracingAmbientOcclusion.RenderAO(camera, cmd, m_AmbientOcclusionTex, renderContext, frameCount); } else #endif { Dispatch(cmd, camera, sharedRTManager); PostDispatchWork(cmd, camera, sharedRTManager); } } }
TemporalFilterArrayResources PrepareTemporalFilterArrayResources(HDCamera hdCamera, RTHandle noisyBuffer, RTHandle distanceBuffer, RTHandle validationBuffer, RTHandle historyBuffer, RTHandle validationHistoryBuffer, RTHandle distanceHistorySignal, RTHandle outputBuffer, RTHandle outputDistanceSignal) { TemporalFilterArrayResources tfaResources = new TemporalFilterArrayResources(); // Input buffers tfaResources.depthStencilBuffer = m_SharedRTManager.GetDepthStencilBuffer(); tfaResources.normalBuffer = m_SharedRTManager.GetNormalBuffer(); tfaResources.velocityBuffer = TextureXR.GetBlackTexture(); tfaResources.historyDepthTexture = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.Depth); tfaResources.historyNormalTexture = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.Normal); tfaResources.noisyBuffer = noisyBuffer; tfaResources.distanceBuffer = distanceBuffer; tfaResources.motionVectorBuffer = m_SharedRTManager.GetMotionVectorsBuffer(); // Temporary buffers tfaResources.validationBuffer = validationBuffer; // InOut buffers tfaResources.historyBuffer = historyBuffer; tfaResources.validationHistoryBuffer = validationHistoryBuffer; tfaResources.distanceHistorySignal = distanceHistorySignal; // Output buffers tfaResources.outputBuffer = outputBuffer; tfaResources.outputDistanceSignal = outputDistanceSignal; return(tfaResources); }
public void BindBlackTextures(CommandBuffer cmd) { for (int i = 0; i < m_BufferCount; ++i) { cmd.SetGlobalTexture(m_TextureShaderIDs[i], TextureXR.GetBlackTexture()); } }
public void Render(CommandBuffer cmd, HDCamera camera, ScriptableRenderContext renderContext, int frameCount) { var settings = VolumeManager.instance.stack.GetComponent <AmbientOcclusion>(); if (!IsActive(camera, settings)) { // No AO applied - neutral is black, see the comment in the shaders cmd.SetGlobalTexture(HDShaderIDs._AmbientOcclusionTexture, TextureXR.GetBlackTexture()); return; } else { #if ENABLE_RAYTRACING HDRaytracingEnvironment rtEnvironement = m_RayTracingManager.CurrentEnvironment(); if (camera.frameSettings.IsEnabled(FrameSettingsField.RayTracing) && rtEnvironement != null && settings.rayTracing.value) { m_RaytracingAmbientOcclusion.RenderAO(camera, cmd, m_AmbientOcclusionTex, renderContext, frameCount); } else #endif { Dispatch(cmd, camera, frameCount); PostDispatchWork(cmd, camera); } } }
public void PostDispatchWork(CommandBuffer cmd, HDCamera camera, SharedRTManager sharedRTManager) { // Grab current settings var settings = VolumeManager.instance.stack.GetComponent <AmbientOcclusion>(); if (!IsActive(camera, settings)) { // No AO applied - neutral is black, see the comment in the shaders cmd.SetGlobalTexture(HDShaderIDs._AmbientOcclusionTexture, TextureXR.GetBlackTexture()); cmd.SetGlobalVector(HDShaderIDs._AmbientOcclusionParam, Vector4.zero); return; } // MSAA Resolve if (camera.frameSettings.IsEnabled(FrameSettingsField.MSAA)) { using (new ProfilingSample(cmd, "Resolve AO Buffer", CustomSamplerId.ResolveSSAO.GetSampler())) { HDUtils.SetRenderTarget(cmd, camera, m_AmbientOcclusionTex); m_ResolvePropertyBlock.SetTexture(HDShaderIDs._DepthValuesTexture, sharedRTManager.GetDepthValuesTexture()); m_ResolvePropertyBlock.SetTexture(HDShaderIDs._MultiAmbientOcclusionTexture, m_MultiAmbientOcclusionTex); cmd.DrawProcedural(Matrix4x4.identity, m_ResolveMaterial, 0, MeshTopology.Triangles, 3, 1, m_ResolvePropertyBlock); } } cmd.SetGlobalTexture(HDShaderIDs._AmbientOcclusionTexture, m_AmbientOcclusionTex); cmd.SetGlobalVector(HDShaderIDs._AmbientOcclusionParam, new Vector4(0f, 0f, 0f, settings.directLightingStrength.value)); // TODO: All the pushdebug stuff should be centralized somewhere (RenderPipelineManager.currentPipeline as HDRenderPipeline).PushFullScreenDebugTexture(camera, cmd, m_AmbientOcclusionTex, FullScreenDebugMode.SSAO); }
public TemporalFilterResources PrepareTemporalFilterResources(HDCamera hdCamera, RTHandle validationBuffer, RTHandle noisyBuffer, RTHandle historyBuffer, RTHandle outputBuffer) { TemporalFilterResources tfResources = new TemporalFilterResources(); tfResources.depthStencilBuffer = m_SharedRTManager.GetDepthStencilBuffer(); tfResources.normalBuffer = m_SharedRTManager.GetNormalBuffer(); tfResources.velocityBuffer = TextureXR.GetBlackTexture(); tfResources.historyDepthTexture = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.Depth); tfResources.historyNormalTexture = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.Normal); tfResources.noisyBuffer = noisyBuffer; if (hdCamera.frameSettings.IsEnabled(FrameSettingsField.MotionVectors)) { tfResources.motionVectorBuffer = m_SharedRTManager.GetMotionVectorsBuffer(); } else { tfResources.motionVectorBuffer = TextureXR.GetBlackTexture(); } // Temporary buffers tfResources.validationBuffer = validationBuffer; // Output buffers tfResources.historyBuffer = historyBuffer; tfResources.outputBuffer = outputBuffer; return(tfResources); }
private void BindAPVRuntimeResources(CommandBuffer cmdBuffer, HDCamera hdCamera) { bool needToBindNeutral = true; // Do this only if the framesetting is on, otherwise there is some hidden cost if (hdCamera.frameSettings.IsEnabled(FrameSettingsField.ProbeVolume)) { var refVolume = ProbeReferenceVolume.instance; ProbeReferenceVolume.RuntimeResources rr = refVolume.GetRuntimeResources(); bool validResources = rr.index != null && rr.L0_L1rx != null && rr.L1_G_ry != null && rr.L1_B_rz != null; if (validResources) { cmdBuffer.SetGlobalBuffer(HDShaderIDs._APVResIndex, rr.index); cmdBuffer.SetGlobalTexture(HDShaderIDs._APVResL0_L1Rx, rr.L0_L1rx); cmdBuffer.SetGlobalTexture(HDShaderIDs._APVResL1G_L1Ry, rr.L1_G_ry); cmdBuffer.SetGlobalTexture(HDShaderIDs._APVResL1B_L1Rz, rr.L1_B_rz); if (m_Asset.currentPlatformRenderPipelineSettings.probeVolumeSHBands == ProbeVolumeSHBands.SphericalHarmonicsL2) { cmdBuffer.SetGlobalTexture(HDShaderIDs._APVResL2_0, rr.L2_0); cmdBuffer.SetGlobalTexture(HDShaderIDs._APVResL2_1, rr.L2_1); cmdBuffer.SetGlobalTexture(HDShaderIDs._APVResL2_2, rr.L2_2); cmdBuffer.SetGlobalTexture(HDShaderIDs._APVResL2_3, rr.L2_3); } needToBindNeutral = false; } } if (needToBindNeutral) { // Lazy init the empty buffer if (m_EmptyIndexBuffer == null) { // Size doesn't really matter here, anything can be bound as long is a valid compute buffer. m_EmptyIndexBuffer = new ComputeBuffer(1, sizeof(uint), ComputeBufferType.Structured); } cmdBuffer.SetGlobalBuffer(HDShaderIDs._APVResIndex, m_EmptyIndexBuffer); cmdBuffer.SetGlobalTexture(HDShaderIDs._APVResL0_L1Rx, TextureXR.GetBlackTexture3D()); cmdBuffer.SetGlobalTexture(HDShaderIDs._APVResL1G_L1Ry, TextureXR.GetBlackTexture3D()); cmdBuffer.SetGlobalTexture(HDShaderIDs._APVResL1B_L1Rz, TextureXR.GetBlackTexture3D()); if (m_Asset.currentPlatformRenderPipelineSettings.probeVolumeSHBands == ProbeVolumeSHBands.SphericalHarmonicsL2) { cmdBuffer.SetGlobalTexture(HDShaderIDs._APVResL2_0, TextureXR.GetBlackTexture3D()); cmdBuffer.SetGlobalTexture(HDShaderIDs._APVResL2_1, TextureXR.GetBlackTexture3D()); cmdBuffer.SetGlobalTexture(HDShaderIDs._APVResL2_2, TextureXR.GetBlackTexture3D()); cmdBuffer.SetGlobalTexture(HDShaderIDs._APVResL2_3, TextureXR.GetBlackTexture3D()); } } }
internal void InitializeForRendering(RenderGraph renderGraph) { blackTexture = renderGraph.ImportTexture(m_BlackTexture2D); whiteTexture = renderGraph.ImportTexture(m_WhiteTexture2D); clearTextureXR = renderGraph.ImportTexture(TextureXR.GetClearTexture()); magentaTextureXR = renderGraph.ImportTexture(TextureXR.GetMagentaTexture()); blackTextureXR = renderGraph.ImportTexture(TextureXR.GetBlackTexture()); blackTextureArrayXR = renderGraph.ImportTexture(TextureXR.GetBlackTextureArray()); blackUIntTextureXR = renderGraph.ImportTexture(TextureXR.GetBlackUIntTexture()); blackTexture3DXR = renderGraph.ImportTexture(TextureXR.GetBlackTexture3D()); whiteTextureXR = renderGraph.ImportTexture(TextureXR.GetWhiteTexture()); }
SSSAreaRayTraceResources PrepareSSSAreaRayTraceResources(HDCamera hdCamera, RTHandle directionBuffer, RTHandle rayLengthBuffer, RTHandle intermediateBufferRGBA0, RTHandle intermediateBufferRGBA1, RTHandle intermediateBufferRG0, RTHandle shadowHistoryArray, RTHandle analyticHistoryArray) { SSSAreaRayTraceResources sssartResources = new SSSAreaRayTraceResources(); // Input Buffers sssartResources.depthStencilBuffer = m_SharedRTManager.GetDepthStencilBuffer(); sssartResources.normalBuffer = m_SharedRTManager.GetNormalBuffer(); sssartResources.lightData = m_LightLoopLightData.lightData; if (hdCamera.frameSettings.litShaderMode == LitShaderMode.Deferred) { sssartResources.gbuffer0 = m_GbufferManager.GetBuffer(0); sssartResources.gbuffer1 = m_GbufferManager.GetBuffer(1); sssartResources.gbuffer2 = m_GbufferManager.GetBuffer(2); sssartResources.gbuffer3 = m_GbufferManager.GetBuffer(3); } else { sssartResources.gbuffer0 = TextureXR.GetBlackTexture(); sssartResources.gbuffer1 = TextureXR.GetBlackTexture(); sssartResources.gbuffer2 = TextureXR.GetBlackTexture(); sssartResources.gbuffer3 = TextureXR.GetBlackTexture(); } sssartResources.cookieAtlasTexture = m_TextureCaches.lightCookieManager.atlasTexture; sssartResources.shadowHistoryArray = shadowHistoryArray; sssartResources.analyticHistoryArray = analyticHistoryArray; // Intermediate buffers sssartResources.directionBuffer = directionBuffer; sssartResources.rayLengthBuffer = rayLengthBuffer; sssartResources.intermediateBufferRGBA0 = intermediateBufferRGBA0; sssartResources.intermediateBufferRGBA1 = intermediateBufferRGBA1; sssartResources.intermediateBufferRG0 = intermediateBufferRG0; // Debug textures RayCountManager rayCountManager = GetRayCountManager(); sssartResources.rayCountTexture = rayCountManager.GetRayCountTexture(); // Output buffers sssartResources.screenSpaceShadowTextureArray = m_ScreenSpaceShadowTextureArray; return(sssartResources); }
public void PushGlobalParams(HDCamera hdCamera, CommandBuffer cmd) { if (hdCamera.frameSettings.IsEnabled(FrameSettingsField.Decals)) { cmd.SetGlobalInt(HDShaderIDs._EnableDecals, enableDecals ? 1 : 0); cmd.SetGlobalVector(HDShaderIDs._DecalAtlasResolution, new Vector2(HDUtils.hdrpSettings.decalSettings.atlasWidth, HDUtils.hdrpSettings.decalSettings.atlasHeight)); BindBufferAsTextures(cmd); } else { cmd.SetGlobalInt(HDShaderIDs._EnableDecals, 0); // We still bind black textures to make sure that something is bound (can be a problem on some platforms) for (int i = 0; i < m_BufferCount; ++i) { cmd.SetGlobalTexture(m_TextureShaderIDs[i], TextureXR.GetBlackTexture()); } } }
RTSAreaRayTraceResources PrepareRTSAreaRayTraceResources(HDCamera hdCamera, RTHandle directionBuffer, RTHandle rayLengthBuffer, RTHandle intermediateBufferRGBA0, RTHandle intermediateBufferRGBA1, RTHandle intermediateBufferRG0, RTHandle shadowHistoryArray, RTHandle analyticHistoryArray) { RTSAreaRayTraceResources rtsartResources = new RTSAreaRayTraceResources(); // Input Buffers rtsartResources.depthStencilBuffer = m_SharedRTManager.GetDepthStencilBuffer(); rtsartResources.normalBuffer = m_SharedRTManager.GetNormalBuffer(); rtsartResources.motionVectorsBuffer = m_SharedRTManager.GetMotionVectorsBuffer(); if (hdCamera.frameSettings.litShaderMode == LitShaderMode.Deferred) { rtsartResources.gbuffer0 = m_GbufferManager.GetBuffer(0); rtsartResources.gbuffer1 = m_GbufferManager.GetBuffer(1); rtsartResources.gbuffer2 = m_GbufferManager.GetBuffer(2); rtsartResources.gbuffer3 = m_GbufferManager.GetBuffer(3); } else { rtsartResources.gbuffer0 = TextureXR.GetBlackTexture(); rtsartResources.gbuffer1 = TextureXR.GetBlackTexture(); rtsartResources.gbuffer2 = TextureXR.GetBlackTexture(); rtsartResources.gbuffer3 = TextureXR.GetBlackTexture(); } rtsartResources.shadowHistoryArray = shadowHistoryArray; rtsartResources.analyticHistoryArray = analyticHistoryArray; // Intermediate buffers rtsartResources.directionBuffer = directionBuffer; rtsartResources.rayLengthBuffer = rayLengthBuffer; rtsartResources.intermediateBufferRGBA1 = intermediateBufferRGBA1; rtsartResources.intermediateBufferRG0 = intermediateBufferRG0; // Debug textures RayCountManager rayCountManager = GetRayCountManager(); rtsartResources.rayCountTexture = rayCountManager.GetRayCountTexture(); // Output texture rtsartResources.outputShadowTexture = intermediateBufferRGBA0; return(rtsartResources); }
public override void BindBufferAsTextures(CommandBuffer cmd) { for (int i = 0; i < m_BufferCount; ++i) { cmd.SetGlobalTexture(m_TextureShaderIDs[i], m_RTs[i]); } // Bind alias for gbuffer usage to simplify shader code (not need to check which gbuffer is the shadowmask or lightlayers) if (m_ShadowMaskIndex >= 0) { cmd.SetGlobalTexture(HDShaderIDs._ShadowMaskTexture, m_RTs[m_ShadowMaskIndex]); } if (m_LightLayers >= 0) { cmd.SetGlobalTexture(HDShaderIDs._LightLayersTexture, m_RTs[m_LightLayers]); } else { cmd.SetGlobalTexture(HDShaderIDs._LightLayersTexture, TextureXR.GetWhiteTexture()); // This is never use but need to be bind as the read is inside a if } }
internal void PushProbeVolumesGlobalParamsDefault(HDCamera hdCamera, CommandBuffer cmd, int frameIndex) { cmd.SetGlobalInt(HDShaderIDs._EnableProbeVolumes, 0); cmd.SetGlobalBuffer(HDShaderIDs._ProbeVolumeBounds, s_VisibleProbeVolumeBoundsBufferDefault); cmd.SetGlobalBuffer(HDShaderIDs._ProbeVolumeDatas, s_VisibleProbeVolumeDataBufferDefault); cmd.SetGlobalInt(HDShaderIDs._ProbeVolumeCount, 0); cmd.SetGlobalTexture(HDShaderIDs._ProbeVolumeAtlasSH, TextureXR.GetBlackTexture3D()); cmd.SetGlobalTexture(HDShaderIDs._ProbeVolumeAtlasOctahedralDepth, Texture2D.blackTexture); cmd.SetGlobalInt(HDShaderIDs._ProbeVolumeLeakMitigationMode, (int)LeakMitigationMode.NormalBias); cmd.SetGlobalFloat(HDShaderIDs._ProbeVolumeNormalBiasWS, 0.0f); cmd.SetGlobalFloat(HDShaderIDs._ProbeVolumeBilateralFilterWeightMin, 0.0f); cmd.SetGlobalFloat(HDShaderIDs._ProbeVolumeBilateralFilterWeight, 0.0f); { // Need to populate ambient probe fallback even in the default case, // As if the feature is enabled in the ShaderConfig, but disabled in the HDRenderPipelineAsset, we need to fallback to ambient probe only. SphericalHarmonicsL2 ambientProbeFallbackSH = m_SkyManager.GetAmbientProbe(hdCamera); SphericalHarmonicMath.PackCoefficients(s_AmbientProbeFallbackPackedCoeffs, ambientProbeFallbackSH); cmd.SetGlobalVectorArray(HDShaderIDs._ProbeVolumeAmbientProbeFallbackPackedCoeffs, s_AmbientProbeFallbackPackedCoeffs); } }
private void BindAPVRuntimeResources(CommandBuffer cmdBuffer, HDCamera hdCamera) { bool needToBindNeutral = true; // Do this only if the framesetting is on, otherwise there is some hidden cost if (hdCamera.frameSettings.IsEnabled(FrameSettingsField.ProbeVolume)) { var refVolume = ProbeReferenceVolume.instance; ProbeReferenceVolume.RuntimeResources rr = refVolume.GetRuntimeResources(); bool validResources = rr.index != null && rr.L0 != null && rr.L1_R != null && rr.L1_G != null && rr.L1_B != null; if (validResources) { cmdBuffer.SetGlobalBuffer(HDShaderIDs._APVResIndex, rr.index); cmdBuffer.SetGlobalTexture(HDShaderIDs._APVResL0, rr.L0); cmdBuffer.SetGlobalTexture(HDShaderIDs._APVResL1_R, rr.L1_R); cmdBuffer.SetGlobalTexture(HDShaderIDs._APVResL1_G, rr.L1_G); cmdBuffer.SetGlobalTexture(HDShaderIDs._APVResL1_B, rr.L1_B); needToBindNeutral = false; } } if (needToBindNeutral) { // Lazy init the empty buffer if (m_EmptyIndexBuffer == null) { // Size doesn't really matter here, anything can be bound as long is a valid compute buffer. m_EmptyIndexBuffer = new ComputeBuffer(1, sizeof(uint), ComputeBufferType.Structured); } cmdBuffer.SetGlobalBuffer(HDShaderIDs._APVResIndex, m_EmptyIndexBuffer); cmdBuffer.SetGlobalTexture(HDShaderIDs._APVResL0, TextureXR.GetBlackTexture3D()); cmdBuffer.SetGlobalTexture(HDShaderIDs._APVResL1_R, TextureXR.GetBlackTexture3D()); cmdBuffer.SetGlobalTexture(HDShaderIDs._APVResL1_G, TextureXR.GetBlackTexture3D()); cmdBuffer.SetGlobalTexture(HDShaderIDs._APVResL1_B, TextureXR.GetBlackTexture3D()); } }
// Denoiser variant for non history array public void DenoiseBuffer(CommandBuffer cmd, HDCamera hdCamera, RTHandle noisySignal, RTHandle historySignal, RTHandle outputSignal, bool singleChannel = true, float historyValidity = 1.0f) { // If we do not have a depth and normal history buffers, we can skip right away var historyDepthBuffer = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.Depth); var historyNormalBuffer = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.Normal); if (historyDepthBuffer == null || historyNormalBuffer == null) { HDUtils.BlitCameraTexture(cmd, noisySignal, historySignal); HDUtils.BlitCameraTexture(cmd, noisySignal, outputSignal); return; } // Fetch texture dimensions int texWidth = hdCamera.actualWidth; int texHeight = hdCamera.actualHeight; // Evaluate the dispatch parameters int areaTileSize = 8; int numTilesX = (texWidth + (areaTileSize - 1)) / areaTileSize; int numTilesY = (texHeight + (areaTileSize - 1)) / areaTileSize; // Request the intermediate buffer we need RTHandle validationBuffer = m_RenderPipeline.GetRayTracingBuffer(InternalRayTracingBuffers.R0); // First of all we need to validate the history to know where we can or cannot use the history signal int m_KernelFilter = m_TemporalFilterCS.FindKernel("ValidateHistory"); cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthStencilBuffer()); cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._HistoryDepthTexture, historyDepthBuffer); cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._NormalBufferTexture, m_SharedRTManager.GetNormalBuffer()); cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._HistoryNormalBufferTexture, historyNormalBuffer); cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._ValidationBufferRW, validationBuffer); cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._VelocityBuffer, TextureXR.GetBlackTexture()); cmd.SetComputeFloatParam(m_TemporalFilterCS, HDShaderIDs._HistoryValidity, historyValidity); cmd.SetComputeFloatParam(m_TemporalFilterCS, HDShaderIDs._PixelSpreadAngleTangent, HDRenderPipeline.GetPixelSpreadTangent(hdCamera.camera.fieldOfView, hdCamera.actualWidth, hdCamera.actualHeight)); cmd.DispatchCompute(m_TemporalFilterCS, m_KernelFilter, numTilesX, numTilesY, hdCamera.viewCount); // Now that we have validated our history, let's accumulate m_KernelFilter = m_TemporalFilterCS.FindKernel(singleChannel ? "TemporalAccumulationSingle" : "TemporalAccumulationColor"); cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._DenoiseInputTexture, noisySignal); cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._HistoryBuffer, historySignal); cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthStencilBuffer()); cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._DenoiseOutputTextureRW, outputSignal); cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._ValidationBuffer, validationBuffer); cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._VelocityBuffer, TextureXR.GetBlackTexture()); cmd.DispatchCompute(m_TemporalFilterCS, m_KernelFilter, numTilesX, numTilesY, hdCamera.viewCount); // Make sure to copy the new-accumulated signal in our history buffer m_KernelFilter = m_TemporalFilterCS.FindKernel(singleChannel ? "CopyHistorySingle" : "CopyHistoryColor"); cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._DenoiseInputTexture, outputSignal); cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._DenoiseOutputTextureRW, historySignal); cmd.DispatchCompute(m_TemporalFilterCS, m_KernelFilter, numTilesX, numTilesY, hdCamera.viewCount); }
public void RenderReflectionsT1(HDCamera hdCamera, CommandBuffer cmd, RTHandle outputTexture, ScriptableRenderContext renderContext, int frameCount) { // Fetch the required resources BlueNoise blueNoise = GetBlueNoiseManager(); RayTracingShader reflectionShaderRT = m_Asset.renderPipelineRayTracingResources.reflectionRaytracingRT; ComputeShader reflectionShaderCS = m_Asset.renderPipelineRayTracingResources.reflectionRaytracingCS; ComputeShader reflectionFilter = m_Asset.renderPipelineRayTracingResources.reflectionBilateralFilterCS; // Fetch all the settings var settings = VolumeManager.instance.stack.GetComponent <ScreenSpaceReflection>(); LightCluster lightClusterSettings = VolumeManager.instance.stack.GetComponent <LightCluster>(); RayTracingSettings rtSettings = VolumeManager.instance.stack.GetComponent <RayTracingSettings>(); // Texture dimensions int texWidth = hdCamera.actualWidth; int texHeight = hdCamera.actualHeight; // Evaluate the dispatch parameters int areaTileSize = 8; int numTilesXHR = 0, numTilesYHR = 0; int currentKernel = 0; RenderTargetIdentifier clearCoatMaskTexture; using (new ProfilingSample(cmd, "Ray Traced Reflection", CustomSamplerId.RaytracingIntegrateReflection.GetSampler())) { if (settings.deferredMode.value) { // Fetch the new sample kernel currentKernel = reflectionShaderCS.FindKernel(settings.fullResolution.value ? "RaytracingReflectionsFullRes" : "RaytracingReflectionsHalfRes"); // Inject the ray-tracing sampling data blueNoise.BindDitheredRNGData8SPP(cmd); // Bind all the required textures cmd.SetComputeTextureParam(reflectionShaderCS, currentKernel, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthStencilBuffer()); cmd.SetComputeTextureParam(reflectionShaderCS, currentKernel, HDShaderIDs._NormalBufferTexture, m_SharedRTManager.GetNormalBuffer()); clearCoatMaskTexture = hdCamera.frameSettings.litShaderMode == LitShaderMode.Deferred ? m_GbufferManager.GetBuffersRTI()[2] : TextureXR.GetBlackTexture(); cmd.SetComputeTextureParam(reflectionShaderCS, currentKernel, HDShaderIDs._SsrClearCoatMaskTexture, clearCoatMaskTexture); // Bind all the required scalars cmd.SetComputeFloatParam(reflectionShaderCS, HDShaderIDs._RaytracingIntensityClamp, settings.clampValue.value); cmd.SetComputeFloatParam(reflectionShaderCS, HDShaderIDs._RaytracingReflectionMinSmoothness, settings.minSmoothness.value); cmd.SetComputeIntParam(reflectionShaderCS, HDShaderIDs._RaytracingIncludeSky, settings.reflectSky.value ? 1 : 0); // Bind the sampling data int frameIndex = hdCamera.IsTAAEnabled() ? hdCamera.taaFrameIndex : (int)m_FrameCount % 8; cmd.SetComputeIntParam(reflectionShaderCS, HDShaderIDs._RaytracingFrameIndex, frameIndex); // Bind the output buffers cmd.SetComputeTextureParam(reflectionShaderCS, currentKernel, HDShaderIDs._RaytracingDirectionBuffer, m_ReflIntermediateTexture1); if (settings.fullResolution.value) { // Evaluate the dispatch parameters numTilesXHR = (texWidth + (areaTileSize - 1)) / areaTileSize; numTilesYHR = (texHeight + (areaTileSize - 1)) / areaTileSize; } else { // Evaluate the dispatch parameters numTilesXHR = (texWidth / 2 + (areaTileSize - 1)) / areaTileSize; numTilesYHR = (texHeight / 2 + (areaTileSize - 1)) / areaTileSize; } // Compute the directions cmd.DispatchCompute(reflectionShaderCS, currentKernel, numTilesXHR, numTilesYHR, hdCamera.viewCount); // Prepare the components for the deferred lighting DeferredLightingRTParameters deferredParamters = PrepareReflectionDeferredLightingRTParameters(hdCamera); DeferredLightingRTResources deferredResources = PrepareDeferredLightingRTResources(hdCamera, m_ReflIntermediateTexture1, m_ReflIntermediateTexture0); // Evaluate the deferred lighting RenderRaytracingDeferredLighting(cmd, deferredParamters, deferredResources); } else { // Bind all the required data for ray tracing BindRayTracedReflectionData(cmd, hdCamera, reflectionShaderRT, settings, lightClusterSettings, rtSettings); // Run the computation if (settings.fullResolution.value) { cmd.DispatchRays(reflectionShaderRT, m_RayGenReflectionFullResName, (uint)hdCamera.actualWidth, (uint)hdCamera.actualHeight, (uint)hdCamera.viewCount); } else { // Run the computation cmd.DispatchRays(reflectionShaderRT, m_RayGenReflectionHalfResName, (uint)(hdCamera.actualWidth / 2), (uint)(hdCamera.actualHeight / 2), (uint)hdCamera.viewCount); } } // Fetch the right filter to use if (settings.fullResolution.value) { currentKernel = reflectionFilter.FindKernel("ReflectionIntegrationUpscaleFullRes"); } else { currentKernel = reflectionFilter.FindKernel("ReflectionIntegrationUpscaleHalfRes"); } // Inject all the parameters for the compute cmd.SetComputeTextureParam(reflectionFilter, currentKernel, HDShaderIDs._SsrLightingTextureRW, m_ReflIntermediateTexture0); cmd.SetComputeTextureParam(reflectionFilter, currentKernel, HDShaderIDs._SsrHitPointTexture, m_ReflIntermediateTexture1); cmd.SetComputeTextureParam(reflectionFilter, currentKernel, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthStencilBuffer()); cmd.SetComputeTextureParam(reflectionFilter, currentKernel, HDShaderIDs._NormalBufferTexture, m_SharedRTManager.GetNormalBuffer()); cmd.SetComputeTextureParam(reflectionFilter, currentKernel, HDShaderIDs._BlueNoiseTexture, blueNoise.textureArray16RGB); cmd.SetComputeTextureParam(reflectionFilter, currentKernel, "_RaytracingReflectionTexture", outputTexture); cmd.SetComputeTextureParam(reflectionFilter, currentKernel, HDShaderIDs._ScramblingTexture, m_Asset.renderPipelineResources.textures.scramblingTex); cmd.SetComputeIntParam(reflectionFilter, HDShaderIDs._SpatialFilterRadius, settings.upscaleRadius.value); cmd.SetComputeIntParam(reflectionFilter, HDShaderIDs._RaytracingDenoiseRadius, settings.denoise.value ? settings.denoiserRadius.value : 0); cmd.SetComputeFloatParam(reflectionFilter, HDShaderIDs._RaytracingReflectionMinSmoothness, settings.minSmoothness.value); numTilesXHR = (texWidth + (areaTileSize - 1)) / areaTileSize; numTilesYHR = (texHeight + (areaTileSize - 1)) / areaTileSize; // Bind the right texture for clear coat support clearCoatMaskTexture = hdCamera.frameSettings.litShaderMode == LitShaderMode.Deferred ? m_GbufferManager.GetBuffersRTI()[2] : TextureXR.GetBlackTexture(); cmd.SetComputeTextureParam(reflectionFilter, currentKernel, HDShaderIDs._SsrClearCoatMaskTexture, clearCoatMaskTexture); // Compute the texture cmd.DispatchCompute(reflectionFilter, currentKernel, numTilesXHR, numTilesYHR, hdCamera.viewCount); } using (new ProfilingSample(cmd, "Filter Reflection", CustomSamplerId.RaytracingFilterReflection.GetSampler())) { if (settings.denoise.value) { // Grab the history buffer RTHandle reflectionHistory = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.RaytracedReflection) ?? hdCamera.AllocHistoryFrameRT((int)HDCameraFrameHistoryType.RaytracedReflection, ReflectionHistoryBufferAllocatorFunction, 1); HDSimpleDenoiser simpleDenoiser = GetSimpleDenoiser(); simpleDenoiser.DenoiseBuffer(cmd, hdCamera, outputTexture, reflectionHistory, m_ReflIntermediateTexture0, settings.denoiserRadius.value, singleChannel: false); HDUtils.BlitCameraTexture(cmd, m_ReflIntermediateTexture0, outputTexture); } } }
void BindRayTracedReflectionData(CommandBuffer cmd, HDCamera hdCamera, RayTracingShader reflectionShader, ScreenSpaceReflection settings, LightCluster lightClusterSettings, RayTracingSettings rtSettings) { // Grab the acceleration structures and the light cluster to use RayTracingAccelerationStructure accelerationStructure = RequestAccelerationStructure(); HDRaytracingLightCluster lightCluster = RequestLightCluster(); BlueNoise blueNoise = GetBlueNoiseManager(); // Define the shader pass to use for the reflection pass cmd.SetRayTracingShaderPass(reflectionShader, "IndirectDXR"); // Set the acceleration structure for the pass cmd.SetRayTracingAccelerationStructure(reflectionShader, HDShaderIDs._RaytracingAccelerationStructureName, accelerationStructure); // Global reflection parameters cmd.SetRayTracingFloatParams(reflectionShader, HDShaderIDs._RaytracingIntensityClamp, settings.clampValue.value); cmd.SetRayTracingFloatParams(reflectionShader, HDShaderIDs._RaytracingReflectionMinSmoothness, settings.minSmoothness.value); cmd.SetRayTracingIntParams(reflectionShader, HDShaderIDs._RaytracingIncludeSky, settings.reflectSky.value ? 1 : 0); // Inject the ray generation data cmd.SetGlobalFloat(HDShaderIDs._RaytracingRayBias, rtSettings.rayBias.value); cmd.SetGlobalFloat(HDShaderIDs._RaytracingRayMaxLength, settings.rayLength.value); cmd.SetRayTracingIntParams(reflectionShader, HDShaderIDs._RaytracingNumSamples, settings.sampleCount.value); int frameIndex = hdCamera.IsTAAEnabled() ? hdCamera.taaFrameIndex : (int)m_FrameCount % 8; cmd.SetRayTracingIntParam(reflectionShader, HDShaderIDs._RaytracingFrameIndex, frameIndex); // Inject the ray-tracing sampling data blueNoise.BindDitheredRNGData8SPP(cmd); // Set the data for the ray generation cmd.SetRayTracingTextureParam(reflectionShader, HDShaderIDs._SsrLightingTextureRW, m_ReflIntermediateTexture0); cmd.SetRayTracingTextureParam(reflectionShader, HDShaderIDs._SsrHitPointTexture, m_ReflIntermediateTexture1); cmd.SetRayTracingTextureParam(reflectionShader, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthStencilBuffer()); cmd.SetRayTracingTextureParam(reflectionShader, HDShaderIDs._NormalBufferTexture, m_SharedRTManager.GetNormalBuffer()); // Set ray count tex RayCountManager rayCountManager = GetRayCountManager(); cmd.SetRayTracingIntParam(reflectionShader, HDShaderIDs._RayCountEnabled, rayCountManager.RayCountIsEnabled()); cmd.SetRayTracingTextureParam(reflectionShader, HDShaderIDs._RayCountTexture, rayCountManager.GetRayCountTexture()); // Compute the pixel spread value cmd.SetGlobalFloat(HDShaderIDs._RaytracingPixelSpreadAngle, GetPixelSpreadAngle(hdCamera.camera.fieldOfView, hdCamera.actualWidth, hdCamera.actualHeight)); // Bind the lightLoop data lightCluster.BindLightClusterData(cmd); // Note: Just in case, we rebind the directional light data (in case they were not) cmd.SetGlobalBuffer(HDShaderIDs._DirectionalLightDatas, m_LightLoopLightData.directionalLightData); cmd.SetGlobalInt(HDShaderIDs._DirectionalLightCount, m_lightList.directionalLights.Count); // Evaluate the clear coat mask texture based on the lit shader mode RenderTargetIdentifier clearCoatMaskTexture = hdCamera.frameSettings.litShaderMode == LitShaderMode.Deferred ? m_GbufferManager.GetBuffersRTI()[2] : TextureXR.GetBlackTexture(); cmd.SetRayTracingTextureParam(reflectionShader, HDShaderIDs._SsrClearCoatMaskTexture, clearCoatMaskTexture); // Set the number of bounces for reflections cmd.SetGlobalInt(HDShaderIDs._RaytracingMaxRecursion, settings.bounceCount.value); // Set the data for the ray miss cmd.SetRayTracingTextureParam(reflectionShader, HDShaderIDs._SkyTexture, m_SkyManager.GetSkyReflection(hdCamera)); }
public void Render(RenderGraph renderGraph, HDCamera hdCamera, BlueNoise blueNoise, TextureHandle colorBuffer, TextureHandle afterPostProcessTexture, TextureHandle depthBuffer, TextureHandle finalRT, bool flipY) { var dynResHandler = DynamicResolutionHandler.instance; bool isSceneView = hdCamera.camera.cameraType == CameraType.SceneView; var source = colorBuffer; TextureHandle alphaTexture = renderGraph.defaultResources.whiteTextureXR; // Save the alpha and apply it back into the final pass if rendering in fp16 and post-processing in r11g11b10 if (m_KeepAlpha) { using (var builder = renderGraph.AddRenderPass <AlphaCopyPassData>("Alpha Copy", out var passData, ProfilingSampler.Get(HDProfileId.AlphaCopy))) { passData.parameters = PrepareCopyAlphaParameters(hdCamera); passData.source = builder.ReadTexture(source); passData.outputAlpha = builder.WriteTexture(renderGraph.CreateTexture(new TextureDesc(Vector2.one, true, true) { name = "Alpha Channel Copy", colorFormat = GraphicsFormat.R16_SFloat, enableRandomWrite = true })); builder.SetRenderFunc( (AlphaCopyPassData data, RenderGraphContext ctx) => { DoCopyAlpha(data.parameters, ctx.resources.GetTexture(data.source), ctx.resources.GetTexture(data.outputAlpha), ctx.cmd); }); alphaTexture = passData.outputAlpha; } } // TODO RENDERGRAPH: Implement // if (m_PostProcessEnabled) // { // // Guard bands (also known as "horrible hack") to avoid bleeding previous RTHandle // // content into smaller viewports with some effects like Bloom that rely on bilinear // // filtering and can't use clamp sampler and the likes // // Note: some platforms can't clear a partial render target so we directly draw black triangles // { // int w = camera.actualWidth; // int h = camera.actualHeight; // cmd.SetRenderTarget(source, 0, CubemapFace.Unknown, -1); // if (w < source.rt.width || h < source.rt.height) // { // cmd.SetViewport(new Rect(w, 0, k_RTGuardBandSize, h)); // cmd.DrawProcedural(Matrix4x4.identity, m_ClearBlackMaterial, 0, MeshTopology.Triangles, 3, 1); // cmd.SetViewport(new Rect(0, h, w + k_RTGuardBandSize, k_RTGuardBandSize)); // cmd.DrawProcedural(Matrix4x4.identity, m_ClearBlackMaterial, 0, MeshTopology.Triangles, 3, 1); // } // } // // Optional NaN killer before post-processing kicks in // bool stopNaNs = camera.stopNaNs && m_StopNaNFS; //#if UNITY_EDITOR // if (isSceneView) // stopNaNs = HDAdditionalSceneViewSettings.sceneViewStopNaNs; //#endif // if (stopNaNs) // { // using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.StopNaNs))) // { // var destination = m_Pool.Get(Vector2.one, m_ColorFormat); // DoStopNaNs(cmd, camera, source, destination); // PoolSource(ref source, destination); // } // } // } // // Dynamic exposure - will be applied in the next frame // // Not considered as a post-process so it's not affected by its enabled state // if (!IsExposureFixed() && m_ExposureControlFS) // { // using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.DynamicExposure))) // { // if (m_Exposure.mode.value == ExposureMode.AutomaticHistogram) // { // DoHistogramBasedExposure(cmd, camera, source); // } // else // { // DoDynamicExposure(cmd, camera, source); // } // // On reset history we need to apply dynamic exposure immediately to avoid // // white or black screen flashes when the current exposure isn't anywhere // // near 0 // if (camera.resetPostProcessingHistory) // { // var destination = m_Pool.Get(Vector2.one, m_ColorFormat); // var cs = m_Resources.shaders.applyExposureCS; // int kernel = cs.FindKernel("KMain"); // // Note: we call GetPrevious instead of GetCurrent because the textures // // are swapped internally as the system expects the texture will be used // // on the next frame. So the actual "current" for this frame is in // // "previous". // cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._ExposureTexture, GetPreviousExposureTexture(camera)); // cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._InputTexture, source); // cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._OutputTexture, destination); // cmd.DispatchCompute(cs, kernel, (camera.actualWidth + 7) / 8, (camera.actualHeight + 7) / 8, camera.viewCount); // PoolSource(ref source, destination); // } // } // } if (m_PostProcessEnabled) { // // Temporal anti-aliasing goes first // bool taaEnabled = false; // if (m_AntialiasingFS) // { // taaEnabled = camera.antialiasing == AntialiasingMode.TemporalAntialiasing; // if (taaEnabled) // { // using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.TemporalAntialiasing))) // { // var destination = m_Pool.Get(Vector2.one, m_ColorFormat); // DoTemporalAntialiasing(cmd, camera, source, destination, depthBuffer, depthMipChain); // PoolSource(ref source, destination); // } // } // else if (camera.antialiasing == AntialiasingMode.SubpixelMorphologicalAntiAliasing) // { // using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.SMAA))) // { // var destination = m_Pool.Get(Vector2.one, m_ColorFormat); // DoSMAA(cmd, camera, source, destination, depthBuffer); // PoolSource(ref source, destination); // } // } // } // if (camera.frameSettings.IsEnabled(FrameSettingsField.CustomPostProcess)) // { // using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.CustomPostProcessBeforePP))) // { // foreach (var typeString in HDRenderPipeline.defaultAsset.beforePostProcessCustomPostProcesses) // RenderCustomPostProcess(cmd, camera, ref source, colorBuffer, Type.GetType(typeString)); // } // } // // If Path tracing is enabled, then DoF is computed in the path tracer by sampling the lens aperure (when using the physical camera mode) // bool isDoFPathTraced = (camera.frameSettings.IsEnabled(FrameSettingsField.RayTracing) && // camera.volumeStack.GetComponent<PathTracing>().enable.value && // camera.camera.cameraType != CameraType.Preview && // m_DepthOfField.focusMode == DepthOfFieldMode.UsePhysicalCamera); // // Depth of Field is done right after TAA as it's easier to just re-project the CoC // // map rather than having to deal with all the implications of doing it before TAA // if (m_DepthOfField.IsActive() && !isSceneView && m_DepthOfFieldFS && !isDoFPathTraced) // { // using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.DepthOfField))) // { // var destination = m_Pool.Get(Vector2.one, m_ColorFormat); // DoDepthOfField(cmd, camera, source, destination, taaEnabled); // PoolSource(ref source, destination); // } // } // // Motion blur after depth of field for aesthetic reasons (better to see motion // // blurred bokeh rather than out of focus motion blur) // if (m_MotionBlur.IsActive() && m_AnimatedMaterialsEnabled && !camera.resetPostProcessingHistory && m_MotionBlurFS) // { // using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.MotionBlur))) // { // var destination = m_Pool.Get(Vector2.one, m_ColorFormat); // DoMotionBlur(cmd, camera, source, destination); // PoolSource(ref source, destination); // } // } // // Panini projection is done as a fullscreen pass after all depth-based effects are // // done and before bloom kicks in // // This is one effect that would benefit from an overscan mode or supersampling in // // HDRP to reduce the amount of resolution lost at the center of the screen // if (m_PaniniProjection.IsActive() && !isSceneView && m_PaniniProjectionFS) // { // using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.PaniniProjection))) // { // var destination = m_Pool.Get(Vector2.one, m_ColorFormat); // DoPaniniProjection(cmd, camera, source, destination); // PoolSource(ref source, destination); // } // } // Uber post-process //// Generate the bloom texture //bool bloomActive = m_Bloom.IsActive() && m_BloomFS; //if (bloomActive) //{ // using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.Bloom))) // { // DoBloom(cmd, camera, source, uberPostParams.uberPostCS, uberPostParams.uberPostKernel); // } //} //else //{ // cmd.SetComputeTextureParam(uberPostParams.uberPostCS, uberPostParams.uberPostKernel, HDShaderIDs._BloomTexture, TextureXR.GetBlackTexture()); // cmd.SetComputeTextureParam(uberPostParams.uberPostCS, uberPostParams.uberPostKernel, HDShaderIDs._BloomDirtTexture, Texture2D.blackTexture); // cmd.SetComputeVectorParam(uberPostParams.uberPostCS, HDShaderIDs._BloomParams, Vector4.zero); //} TextureHandle logLutOutput; using (var builder = renderGraph.AddRenderPass <ColorGradingPassData>("Color Grading", out var passData, ProfilingSampler.Get(HDProfileId.ColorGradingLUTBuilder))) { TextureHandle logLut = renderGraph.CreateTexture(new TextureDesc(m_LutSize, m_LutSize) { name = "Color Grading Log Lut", dimension = TextureDimension.Tex3D, slices = m_LutSize, depthBufferBits = DepthBits.None, colorFormat = m_LutFormat, filterMode = FilterMode.Bilinear, wrapMode = TextureWrapMode.Clamp, anisoLevel = 0, useMipMap = false, enableRandomWrite = true }); passData.parameters = PrepareColorGradingParameters(); passData.logLut = builder.WriteTexture(logLut); logLutOutput = passData.logLut; builder.SetRenderFunc( (ColorGradingPassData data, RenderGraphContext ctx) => { DoColorGrading(data.parameters, ctx.resources.GetTexture(data.logLut), ctx.cmd); }); } using (var builder = renderGraph.AddRenderPass <UberPostPassData>("Uber Post", out var passData, ProfilingSampler.Get(HDProfileId.UberPost))) { TextureHandle dest = renderGraph.CreateTexture(new TextureDesc(Vector2.one, true, true) { name = "Uber Post Destination", colorFormat = m_ColorFormat, useMipMap = false, enableRandomWrite = true }); passData.parameters = PrepareUberPostParameters(hdCamera, isSceneView); passData.source = builder.ReadTexture(source); passData.logLut = builder.ReadTexture(logLutOutput); passData.destination = builder.WriteTexture(dest); builder.SetRenderFunc( (UberPostPassData data, RenderGraphContext ctx) => { // Temp until bloom is implemented. ctx.cmd.SetComputeTextureParam(data.parameters.uberPostCS, data.parameters.uberPostKernel, HDShaderIDs._BloomTexture, TextureXR.GetBlackTexture()); ctx.cmd.SetComputeTextureParam(data.parameters.uberPostCS, data.parameters.uberPostKernel, HDShaderIDs._BloomDirtTexture, Texture2D.blackTexture); ctx.cmd.SetComputeVectorParam(data.parameters.uberPostCS, HDShaderIDs._BloomParams, Vector4.zero); DoUberPostProcess(data.parameters, ctx.resources.GetTexture(data.source), ctx.resources.GetTexture(data.destination), ctx.resources.GetTexture(data.logLut), ctx.resources.GetTexture(data.source), // TODO: TMP VALUE, should be bloom texture and will be as soon as PP is ported to rendergraph. ctx.cmd); }); source = passData.destination; } m_HDInstance.PushFullScreenDebugTexture(renderGraph, source, FullScreenDebugMode.ColorLog); // if (camera.frameSettings.IsEnabled(FrameSettingsField.CustomPostProcess)) // { // using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.CustomPostProcessAfterPP))) // { // foreach (var typeString in HDRenderPipeline.defaultAsset.afterPostProcessCustomPostProcesses) // RenderCustomPostProcess(cmd, camera, ref source, colorBuffer, Type.GetType(typeString)); // } // } } // if (dynResHandler.DynamicResolutionEnabled() && // Dynamic resolution is on. // camera.antialiasing == AntialiasingMode.FastApproximateAntialiasing && // m_AntialiasingFS) // { // using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.FXAA))) // { // var destination = m_Pool.Get(Vector2.one, m_ColorFormat); // DoFXAA(cmd, camera, source, destination); // PoolSource(ref source, destination); // } // } // // Contrast Adaptive Sharpen Upscaling // if (dynResHandler.DynamicResolutionEnabled() && // dynResHandler.filter == DynamicResUpscaleFilter.ContrastAdaptiveSharpen) // { // using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.ContrastAdaptiveSharpen))) // { // var destination = m_Pool.Get(Vector2.one, m_ColorFormat); // var cs = m_Resources.shaders.contrastAdaptiveSharpenCS; // int kInit = cs.FindKernel("KInitialize"); // int kMain = cs.FindKernel("KMain"); // if (kInit >= 0 && kMain >= 0) // { // cmd.SetComputeFloatParam(cs, HDShaderIDs._Sharpness, 1); // cmd.SetComputeTextureParam(cs, kMain, HDShaderIDs._InputTexture, source); // cmd.SetComputeVectorParam(cs, HDShaderIDs._InputTextureDimensions, new Vector4(source.rt.width, source.rt.height)); // cmd.SetComputeTextureParam(cs, kMain, HDShaderIDs._OutputTexture, destination); // cmd.SetComputeVectorParam(cs, HDShaderIDs._OutputTextureDimensions, new Vector4(destination.rt.width, destination.rt.height)); // ValidateComputeBuffer(ref m_ContrastAdaptiveSharpen, 2, sizeof(uint) * 4); // cmd.SetComputeBufferParam(cs, kInit, "CasParameters", m_ContrastAdaptiveSharpen); // cmd.SetComputeBufferParam(cs, kMain, "CasParameters", m_ContrastAdaptiveSharpen); // cmd.DispatchCompute(cs, kInit, 1, 1, 1); // int dispatchX = (int)System.Math.Ceiling(destination.rt.width / 16.0f); // int dispatchY = (int)System.Math.Ceiling(destination.rt.height / 16.0f); // cmd.DispatchCompute(cs, kMain, dispatchX, dispatchY, camera.viewCount); // } // PoolSource(ref source, destination); // } // } using (var builder = renderGraph.AddRenderPass <FinalPassData>("Final Pass", out var passData, ProfilingSampler.Get(HDProfileId.FinalPost))) { passData.parameters = PrepareFinalPass(hdCamera, blueNoise, flipY); passData.source = builder.ReadTexture(source); passData.afterPostProcessTexture = builder.ReadTexture(afterPostProcessTexture); passData.alphaTexture = builder.ReadTexture(alphaTexture); passData.destination = builder.WriteTexture(finalRT); builder.SetRenderFunc( (FinalPassData data, RenderGraphContext ctx) => { DoFinalPass(data.parameters, ctx.resources.GetTexture(data.source), ctx.resources.GetTexture(data.afterPostProcessTexture), ctx.resources.GetTexture(data.destination), ctx.resources.GetTexture(data.alphaTexture), ctx.cmd); }); } }
// If there is no SSGI, bind a black 1x1 texture static void BindBlackIndirectDiffuseTexture(CommandBuffer cmd) { cmd.SetGlobalTexture(HDShaderIDs._IndirectDiffuseTexture, TextureXR.GetBlackTexture()); }
static public void SetDefaultAmbientOcclusionTexture(CommandBuffer cmd) { cmd.SetGlobalTexture(HDShaderIDs._AmbientOcclusionTexture, TextureXR.GetBlackTexture()); }
RTRQualityRenderingResources PrepareRTRQualityRenderingResources(HDCamera hdCamera, RTHandle outputTexture) { RTRQualityRenderingResources rtrQualityRenderingResources = new RTRQualityRenderingResources(); // Input texture rtrQualityRenderingResources.depthBuffer = m_SharedRTManager.GetDepthStencilBuffer(); rtrQualityRenderingResources.normalBuffer = m_SharedRTManager.GetNormalBuffer(); rtrQualityRenderingResources.clearCoatMaskTexture = hdCamera.frameSettings.litShaderMode == LitShaderMode.Deferred ? m_GbufferManager.GetBuffersRTI()[2] : TextureXR.GetBlackTexture(); rtrQualityRenderingResources.stencilBuffer = m_SharedRTManager.GetStencilBuffer(); // Debug texture RayCountManager rayCountManager = GetRayCountManager(); rtrQualityRenderingResources.rayCountTexture = rayCountManager.GetRayCountTexture(); // Output texture rtrQualityRenderingResources.outputTexture = outputTexture; return(rtrQualityRenderingResources); }
SSGITraceResources PrepareSSGITraceResources(HDCamera hdCamera, RTHandle outputBuffer0, RTHandle outputBuffer1, RTHandle hitPointBuffer) { SSGITraceResources ssgiTraceResources = new SSGITraceResources(); // Input buffers ssgiTraceResources.depthTexture = m_SharedRTManager.GetDepthTexture(); ssgiTraceResources.normalBuffer = m_SharedRTManager.GetNormalBuffer(); ssgiTraceResources.motionVectorsBuffer = m_SharedRTManager.GetMotionVectorsBuffer(); var previousColorPyramid = hdCamera.GetPreviousFrameRT((int)HDCameraFrameHistoryType.ColorBufferMipChain); ssgiTraceResources.colorPyramid = previousColorPyramid != null ? previousColorPyramid : TextureXR.GetBlackTexture(); var historyDepthBuffer = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.Depth); ssgiTraceResources.historyDepth = historyDepthBuffer != null ? historyDepthBuffer : TextureXR.GetBlackTexture(); // Output buffers ssgiTraceResources.hitPointBuffer = hitPointBuffer; // Output buffers ssgiTraceResources.outputBuffer0 = outputBuffer0; ssgiTraceResources.outputBuffer1 = outputBuffer1; return(ssgiTraceResources); }
RTReflectionDirGenResources PrepareRTReflectionDirGenResources(HDCamera hdCamera, RTHandle outputBuffer) { RTReflectionDirGenResources rtrDirGenResources = new RTReflectionDirGenResources(); // Input buffers rtrDirGenResources.depthBuffer = m_SharedRTManager.GetDepthStencilBuffer(); rtrDirGenResources.stencilBuffer = m_SharedRTManager.GetStencilBuffer(); rtrDirGenResources.normalBuffer = m_SharedRTManager.GetNormalBuffer(); rtrDirGenResources.clearCoatMaskTexture = hdCamera.frameSettings.litShaderMode == LitShaderMode.Deferred ? m_GbufferManager.GetBuffersRTI()[2] : TextureXR.GetBlackTexture(); // Output buffers rtrDirGenResources.outputBuffer = outputBuffer; return(rtrDirGenResources); }
public void SetDefaultAmbientOcclusionTexture(CommandBuffer cmd) { cmd.SetGlobalTexture(HDShaderIDs._AmbientOcclusionTexture, TextureXR.GetBlackTexture()); cmd.SetGlobalVector(HDShaderIDs._AmbientOcclusionParam, Vector4.zero); }
RTReflectionUpscaleResources PrepareRTReflectionUpscaleResources(HDCamera hdCamera, RTHandle lightingTexture, RTHandle hitPointTexture, RTHandle outputTexture) { RTReflectionUpscaleResources rtrUpscaleResources = new RTReflectionUpscaleResources(); rtrUpscaleResources.depthStencilBuffer = m_SharedRTManager.GetDepthStencilBuffer(); rtrUpscaleResources.normalBuffer = m_SharedRTManager.GetNormalBuffer(); rtrUpscaleResources.lightingTexture = lightingTexture; rtrUpscaleResources.hitPointTexture = hitPointTexture; rtrUpscaleResources.outputTexture = outputTexture; rtrUpscaleResources.clearCoatMaskTexture = hdCamera.frameSettings.litShaderMode == LitShaderMode.Deferred ? m_GbufferManager.GetBuffersRTI()[2] : TextureXR.GetBlackTexture(); return(rtrUpscaleResources); }