public override void Render(CommandBuffer cmd, HDCamera camera, RTHandle source, RTHandle destination) { Debug.Assert(m_Material != null); AdditionalCompositorData layerData = null; camera.camera.gameObject.TryGetComponent <AdditionalCompositorData>(out layerData); if (layerData == null || layerData.layerFilters == null) { HDUtils.BlitCameraTexture(cmd, source, destination); return; } int index = layerData.layerFilters.FindIndex(x => x.filterType == CompositionFilter.FilterType.ALPHA_MASK); if (index < 0) { HDUtils.BlitCameraTexture(cmd, source, destination); return; } var filter = layerData.layerFilters[index]; m_Material.SetTexture(ShaderIDs.k_InputTexture, source); m_Material.SetTexture(ShaderIDs.k_AlphaTexture, filter.alphaMask); HDUtils.DrawFullScreen(cmd, m_Material, destination); }
public override void Render(CommandBuffer cmd, HDCamera camera, RTHandle source, RTHandle destination) { Debug.Assert(m_Material != null); AdditionalCompositorData layerData = null; camera.camera.gameObject.TryGetComponent <AdditionalCompositorData>(out layerData); if (activate.value == false || layerData == null || layerData.layerFilters == null) { HDUtils.BlitCameraTexture(cmd, source, destination); return; } int index = layerData.layerFilters.FindIndex(x => x.filterType == CompositionFilter.FilterType.CHROMA_KEYING); if (index < 0) { HDUtils.BlitCameraTexture(cmd, source, destination); return; } var filter = layerData.layerFilters[index]; Vector4 keyParams; keyParams.x = filter.keyThreshold; keyParams.y = filter.keyTolerance; keyParams.z = filter.spillRemoval; keyParams.w = 1.0f; m_Material.SetVector(ShaderIDs.k_KeyColor, filter.maskColor); m_Material.SetVector(ShaderIDs.k_KeyParams, keyParams); m_Material.SetTexture(ShaderIDs.k_InputTexture, source); HDUtils.DrawFullScreen(cmd, m_Material, destination); }
void GenerateGaussianMips(CommandBuffer cmd, HDCamera hdCam) { GetCustomBuffers(out var customColorBuffer, out var _); // Downsample using (new ProfilingSample(cmd, "Downsample", CustomSampler.Create("Downsample"))) { // This Blit will automatically downsample the color because our target buffer have been allocated in half resolution HDUtils.BlitCameraTexture(cmd, customColorBuffer, downSampleBuffer, 0, true); } // Horizontal Blur using (new ProfilingSample(cmd, "H Blur", CustomSampler.Create("H Blur"))) { var hBlurProperties = new MaterialPropertyBlock(); hBlurProperties.SetFloat(ShaderID._Radius, radius / 4.0f); // The blur is 4 pixel wide in the shader hBlurProperties.SetTexture(ShaderID._Source, downSampleBuffer); // The blur is 4 pixel wide in the shader SetViewPortSize(cmd, hBlurProperties, blurBuffer); HDUtils.DrawFullScreen(cmd, blurMaterial, blurBuffer, hBlurProperties, shaderPassId: 0); // Do not forget the shaderPassId: ! or it won't work } // Copy back the result in the color buffer while doing a vertical blur using (new ProfilingSample(cmd, "V Blur + Copy back", CustomSampler.Create("V Blur + Copy back"))) { var vBlurProperties = new MaterialPropertyBlock(); // When we use a mask, we do the vertical blur into the downsampling buffer instead of the camera buffer // We need that because we're going to write to the color buffer and read from this blured buffer which we can't do // if they are in the same buffer vBlurProperties.SetFloat(ShaderID._Radius, radius / 4.0f); // The blur is 4 pixel wide in the shader vBlurProperties.SetTexture(ShaderID._Source, blurBuffer); SetViewPortSize(cmd, vBlurProperties, customColorBuffer); HDUtils.DrawFullScreen(cmd, blurMaterial, customColorBuffer, vBlurProperties, shaderPassId: 1); // Do not forget the shaderPassId: ! or it won't work } }
protected override void Render(CommandBuffer cmd, HDCamera camera, RTHandle source, RTHandle destination, Rain data) { if (Mathf.Approximately(EnvironmentEffectsManager.Rain, 0f)) { HDUtils.BlitCameraTexture(cmd, source, destination); return; } if (EnvironmentEffectsManager.Rain < .35) { cmd.EnableShaderKeyword("LOW"); cmd.DisableShaderKeyword("MED"); cmd.DisableShaderKeyword("HGH"); } else if (EnvironmentEffectsManager.Rain >= .35 && EnvironmentEffectsManager.Rain < .7) { cmd.EnableShaderKeyword("MED"); cmd.DisableShaderKeyword("LOW"); cmd.DisableShaderKeyword("HGH"); } else { cmd.EnableShaderKeyword("HGH"); cmd.DisableShaderKeyword("LOW"); cmd.DisableShaderKeyword("MED"); } cmd.SetGlobalFloat("_Intensity", EnvironmentEffectsManager.Rain); cmd.SetGlobalTexture("_InputTexture", source); cmd.SetGlobalFloat("_Size", data.size); HDUtils.DrawFullScreen(cmd, material, destination); }
// Denoiser variant for non history array public void DenoiseBuffer(CommandBuffer cmd, HDCamera hdCamera, RTHandle noisySignal, RTHandle historySignal, RTHandle outputSignal, bool singleChannel = true, float historyValidity = 1.0f) { // If we do not have a depth and normal history buffers, we can skip right away var historyDepthBuffer = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.Depth); var historyNormalBuffer = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.Normal); if (historyDepthBuffer == null || historyNormalBuffer == null) { HDUtils.BlitCameraTexture(cmd, noisySignal, historySignal); HDUtils.BlitCameraTexture(cmd, noisySignal, outputSignal); return; } // Fetch texture dimensions int texWidth = hdCamera.actualWidth; int texHeight = hdCamera.actualHeight; // Evaluate the dispatch parameters int areaTileSize = 8; int numTilesX = (texWidth + (areaTileSize - 1)) / areaTileSize; int numTilesY = (texHeight + (areaTileSize - 1)) / areaTileSize; // Request the intermediate buffer we need RTHandle validationBuffer = m_RenderPipeline.GetRayTracingBuffer(InternalRayTracingBuffers.R0); // First of all we need to validate the history to know where we can or cannot use the history signal int m_KernelFilter = m_TemporalFilterCS.FindKernel("ValidateHistory"); var historyScale = new Vector2(hdCamera.actualWidth / (float)historySignal.rt.width, hdCamera.actualHeight / (float)historySignal.rt.height); cmd.SetComputeVectorParam(m_TemporalFilterCS, HDShaderIDs._RTHandleScaleHistory, historyScale); cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthStencilBuffer()); cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._HistoryDepthTexture, historyDepthBuffer); cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._NormalBufferTexture, m_SharedRTManager.GetNormalBuffer()); cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._HistoryNormalBufferTexture, historyNormalBuffer); cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._ValidationBufferRW, validationBuffer); cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._VelocityBuffer, TextureXR.GetBlackTexture()); cmd.SetComputeFloatParam(m_TemporalFilterCS, HDShaderIDs._HistoryValidity, historyValidity); cmd.SetComputeFloatParam(m_TemporalFilterCS, HDShaderIDs._PixelSpreadAngleTangent, HDRenderPipeline.GetPixelSpreadTangent(hdCamera.camera.fieldOfView, hdCamera.actualWidth, hdCamera.actualHeight)); cmd.DispatchCompute(m_TemporalFilterCS, m_KernelFilter, numTilesX, numTilesY, hdCamera.viewCount); // Now that we have validated our history, let's accumulate m_KernelFilter = m_TemporalFilterCS.FindKernel(singleChannel ? "TemporalAccumulationSingle" : "TemporalAccumulationColor"); cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._DenoiseInputTexture, noisySignal); cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._HistoryBuffer, historySignal); cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthStencilBuffer()); cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._DenoiseOutputTextureRW, outputSignal); cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._ValidationBuffer, validationBuffer); cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._VelocityBuffer, TextureXR.GetBlackTexture()); cmd.DispatchCompute(m_TemporalFilterCS, m_KernelFilter, numTilesX, numTilesY, hdCamera.viewCount); // Make sure to copy the new-accumulated signal in our history buffer m_KernelFilter = m_TemporalFilterCS.FindKernel(singleChannel ? "CopyHistorySingle" : "CopyHistoryColor"); cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._DenoiseInputTexture, outputSignal); cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._DenoiseOutputTextureRW, historySignal); cmd.DispatchCompute(m_TemporalFilterCS, m_KernelFilter, numTilesX, numTilesY, hdCamera.viewCount); }
protected override void Render(PostProcessPassContext ctx, RTHandle source, RTHandle destination, GreyScale data) { if (Mathf.Approximately(data.intensity, 0f)) { HDUtils.BlitCameraTexture(ctx.cmd, source, destination); return; } ctx.cmd.SetGlobalFloat("_Intensity", data.intensity); ctx.cmd.SetGlobalTexture("_InputTexture", source); HDUtils.DrawFullScreen(ctx.cmd, material, destination); }
public void DenoiseBuffer(CommandBuffer cmd, HDCamera hdCamera, RTHandle noisySignal, RTHandle historySignal, RTHandle outputSignal, bool singleChannel = true, int slotIndex = -1) { // If we do not have a depth and normal history buffers, we can skip right away var historyDepthBuffer = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.Depth); var historyNormalBuffer = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.Normal); if (historyDepthBuffer == null || historyNormalBuffer == null) { HDUtils.BlitCameraTexture(cmd, noisySignal, historySignal); HDUtils.BlitCameraTexture(cmd, noisySignal, outputSignal); return; } // Fetch texture dimensions int texWidth = hdCamera.actualWidth; int texHeight = hdCamera.actualHeight; // Evaluate the dispatch parameters int areaTileSize = 8; int numTilesX = (texWidth + (areaTileSize - 1)) / areaTileSize; int numTilesY = (texHeight + (areaTileSize - 1)) / areaTileSize; // First of all we need to validate the history to know where we can or cannot use the history signal int m_KernelFilter = m_TemporalFilterCS.FindKernel("ValidateHistory"); var historyScale = new Vector2(hdCamera.actualWidth / (float)historySignal.rt.width, hdCamera.actualHeight / (float)historySignal.rt.height); cmd.SetComputeVectorParam(m_TemporalFilterCS, HDShaderIDs._RTHandleScaleHistory, historyScale); cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthStencilBuffer()); cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._HistoryDepthTexture, historyDepthBuffer); cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._NormalBufferTexture, m_SharedRTManager.GetNormalBuffer()); cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._HistoryNormalBufferTexture, historyNormalBuffer); cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._ValidationBufferRW, m_ValidationBuffer); cmd.DispatchCompute(m_TemporalFilterCS, m_KernelFilter, numTilesX, numTilesY, 1); // Now that we have validated our history, let's accumulate m_KernelFilter = m_TemporalFilterCS.FindKernel(singleChannel ? (slotIndex == -1 ? "TemporalAccumulationSingle" : "TemporalAccumulationSingleArray") : "TemporalAccumulationColor"); cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._DenoiseInputTexture, noisySignal); cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._HistoryBuffer, historySignal); cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthStencilBuffer()); cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._DenoiseOutputTextureRW, outputSignal); cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._ValidationBuffer, m_ValidationBuffer); cmd.SetComputeIntParam(m_TemporalFilterCS, HDShaderIDs._DenoisingHistorySlot, slotIndex); cmd.DispatchCompute(m_TemporalFilterCS, m_KernelFilter, numTilesX, numTilesY, 1); m_KernelFilter = m_TemporalFilterCS.FindKernel(singleChannel ? (slotIndex == -1 ? "CopyHistorySingle" : "CopyHistorySingleArray") : "CopyHistoryColor"); cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._DenoiseInputTexture, outputSignal); cmd.SetComputeTextureParam(m_TemporalFilterCS, m_KernelFilter, HDShaderIDs._DenoiseOutputTextureRW, historySignal); cmd.SetComputeIntParam(m_TemporalFilterCS, HDShaderIDs._DenoisingHistorySlot, slotIndex); cmd.DispatchCompute(m_TemporalFilterCS, m_KernelFilter, numTilesX, numTilesY, 1); }
protected override void Render(CommandBuffer cmd, HDCamera camera, RTHandle source, RTHandle destination, GreyScale data) { if (Mathf.Approximately(data.intensity, 0f)) { HDUtils.BlitCameraTexture(cmd, source, destination); return; } cmd.SetGlobalFloat("_Intensity", data.intensity); cmd.SetGlobalTexture("_InputTexture", source); HDUtils.DrawFullScreen(cmd, material, destination); }
protected override void Render(PostProcessPassContext ctx, RTHandle source, RTHandle destination, VideoArtifacts data) { var cmd = ctx.cmd; if (Mathf.Approximately(data.intensity, 0f)) { HDUtils.BlitCameraTexture(cmd, source, destination); return; } // Update the time parameters. var time = Time.time; var delta = time - prevTime; prevTime = time; // Block parameters var block = data.intensity; var block3 = block * block * block; // Shuffle block parameters every 1/30 seconds. blockTime += delta * 60; if (blockTime > 1) { if (Random.value < 0.09f) { blockSeed1 += 251; } if (Random.value < 0.29f) { blockSeed2 += 373; } if (Random.value < 0.25f) { blockStride = Random.Range(1, 32); } blockTime = 0; } // Invoke the shader. cmd.SetGlobalInt(ShaderIDs.Seed, (int)(time * 10000)); cmd.SetGlobalFloat(ShaderIDs.BlockStrength, block3); cmd.SetGlobalInt(ShaderIDs.BlockStride, blockStride); cmd.SetGlobalInt(ShaderIDs.BlockSeed1, blockSeed1); cmd.SetGlobalInt(ShaderIDs.BlockSeed2, blockSeed2); cmd.SetGlobalTexture(ShaderIDs.InputTexture, source); cmd.SetGlobalInt(ShaderIDs.BlockSize, data.blockSize); HDUtils.DrawFullScreen(cmd, material, destination); }
void GenerateGaussianMips(CommandBuffer cmd, HDCamera hdCam) { RTHandle source; // Retrieve the target buffer of the blur from the UI: if (targetColorBuffer == TargetBuffer.Camera) { GetCameraBuffers(out source, out _); } else { GetCustomBuffers(out source, out _); } // Save the non blurred color into a copy if the mask is enabled: if (useMask) { cmd.CopyTexture(source, colorCopy); } // Downsample using (new ProfilingSample(cmd, "Downsample", CustomSampler.Create("Downsample"))) { // This Blit will automatically downsample the color because our target buffer have been allocated in half resolution HDUtils.BlitCameraTexture(cmd, source, downSampleBuffer, 0); } // Horizontal Blur using (new ProfilingSample(cmd, "H Blur", CustomSampler.Create("H Blur"))) { var hBlurProperties = new MaterialPropertyBlock(); hBlurProperties.SetFloat(ShaderID._Radius, radius / 4.0f); // The blur is 4 pixel wide in the shader hBlurProperties.SetTexture(ShaderID._Source, downSampleBuffer); // The blur is 4 pixel wide in the shader SetViewPortSize(cmd, hBlurProperties, blurBuffer); HDUtils.DrawFullScreen(cmd, blurMaterial, blurBuffer, hBlurProperties, shaderPassId: 0); // Do not forget the shaderPassId: ! or it won't work } // Copy back the result in the color buffer while doing a vertical blur using (new ProfilingSample(cmd, "V Blur + Copy back", CustomSampler.Create("V Blur + Copy back"))) { var vBlurProperties = new MaterialPropertyBlock(); // When we use a mask, we do the vertical blur into the downsampling buffer instead of the camera buffer // We need that because we're going to write to the color buffer and read from this blured buffer which we can't do // if they are in the same buffer vBlurProperties.SetFloat(ShaderID._Radius, radius / 4.0f); // The blur is 4 pixel wide in the shader vBlurProperties.SetTexture(ShaderID._Source, blurBuffer); var targetBuffer = (useMask) ? downSampleBuffer : source; SetViewPortSize(cmd, vBlurProperties, targetBuffer); HDUtils.DrawFullScreen(cmd, blurMaterial, targetBuffer, vBlurProperties, shaderPassId: 1); // Do not forget the shaderPassId: ! or it won't work } if (useMask) { // Merge the non blur copy and the blurred version using the mask buffers using (new ProfilingSample(cmd, "Compose Mask Blur", CustomSampler.Create("Compose Mask Blur"))) { var compositingProperties = new MaterialPropertyBlock(); compositingProperties.SetFloat(ShaderID._Radius, radius / 4.0f); // The blur is 4 pixel wide in the shader compositingProperties.SetTexture(ShaderID._Source, downSampleBuffer); compositingProperties.SetTexture(ShaderID._ColorBufferCopy, colorCopy); compositingProperties.SetTexture(ShaderID._Mask, maskBuffer); compositingProperties.SetTexture(ShaderID._MaskDepth, maskDepthBuffer); compositingProperties.SetFloat(ShaderID._InvertMask, invertMask ? 1 : 0); SetViewPortSize(cmd, compositingProperties, source); HDUtils.DrawFullScreen(cmd, blurMaterial, source, compositingProperties, shaderPassId: 2); // Do not forget the shaderPassId: ! or it won't work } } }
public override void Render(CommandBuffer cmd, HDCamera camera, RTHandle source, RTHandle destination) { if (m_Material == null) { return; } #if UNITY_EDITOR if (!EditorApplication.isPlaying && RenderMode.GameViewRenderMode == RenderMode.Mode.Off) { HDUtils.BlitCameraTexture(cmd, source, destination); return; } #endif bool draw = false; foreach (var lineEffect in camera.camera.GetComponents <PencilLineEffect>()) { if (lineEffect.PencilRenderer != null && lineEffect.PencilRenderer.Texture != null && lineEffect.isPostProsessingEnabled) { // エフェクトの重ね掛け対応 if (draw) { HDUtils.BlitCameraTexture(cmd, destination, source); } // テクスチャ更新設定 if (lineEffect.isRendering == true) { #if UNITY_2018_3_OR_NEWER var callback = NativeFunctions.GetTextureUpdateCallbackV2(); #else var callback = NativeFunctions.GetTextureUpdateCallback(); #endif if (callback == IntPtr.Zero) { continue; } // ハンドルを取得し、ネイティブで確保したバッファが意図せず解放されないようにする // ハンドルはTextureUpdateCallback()のEndで自動的に解除される var textureUpdateHandle = lineEffect.PencilRenderer.RequestTextureUpdate(0); if (textureUpdateHandle == 0xFFFFFFFF) { // PencilLinePostProcessRenderer.Render()の呼び出しがlineEffect.OnPreRender()よりも早いケースが稀にあり、 // PostProcessing_RenderingEventモードのときに適切なライン描画が行われない場合がある continue; } #if UNITY_2018_3_OR_NEWER cmd.IssuePluginCustomTextureUpdateV2(callback, lineEffect.PencilRenderer.Texture, textureUpdateHandle); #else cmd.IssuePluginCustomTextureUpdate(callback, lineEffect.PencilRenderer.Texture, textureUpdateHandle); #endif // レンダーエレメント画像出力用のテクスチャ更新 for (int renderElementIndex = 0; true; renderElementIndex++) { var renderElementTexture = lineEffect.PencilRenderer.GetRenderElementTexture(renderElementIndex); var renderElementTargetTexture = lineEffect.PencilRenderer.GetRenderElementTargetTexture(renderElementIndex); if (renderElementTexture == null || renderElementTargetTexture == null) { break; } textureUpdateHandle = lineEffect.PencilRenderer.RequestTextureUpdate(1 + renderElementIndex); if (textureUpdateHandle == 0xFFFFFFFF) { break; } #if UNITY_2018_3_OR_NEWER cmd.IssuePluginCustomTextureUpdateV2(callback, renderElementTexture, textureUpdateHandle); #else cmd.IssuePluginCustomTextureUpdate(callback, renderElementTexture, textureUpdateHandle); #endif cmd.Blit(renderElementTexture, renderElementTargetTexture); } } // 描画設定 cmd.SetGlobalTexture("_MainTex", source); cmd.SetGlobalTexture("_LineTex", lineEffect.PencilRenderer.Texture); cmd.SetGlobalFloat("_Alpha", alpha.value); HDUtils.DrawFullScreen(cmd, m_Material, destination); // draw = true; } } // 何も描画するものがなかった場合、RenderTargetを転写しておく if (!draw) { HDUtils.BlitCameraTexture(cmd, source, destination); } }
protected override void Render(CommandBuffer cmd, HDCamera camera, RTHandle source, RTHandle destination, SunFlare data) { var cam = camera.camera; var sunForward = sunTransform.forward; var sunWorldPos = cam.transform.position - sunForward * 1000f; var sunViewPos = cam.WorldToViewportPoint(sunWorldPos); var intensity = Mathf.Clamp01(Vector3.Dot(cam.transform.forward, -sunForward)); var sunVisible = sunViewPos.z > 0 && sunViewPos.x >= -0.1f && sunViewPos.x < 1.1f && sunViewPos.y >= -0.1f && sunViewPos.y < 1.1f; if (!sunVisible) { if (Physics.Raycast(cam.transform.position, -sunForward, 1000f, LayerMask)) { intensity = 0f; } } if (intensity > 0f) { GetCameraBuffers(out _, out var depthBuffer); var depthTexRes = depthBuffer.referenceSize; var actualCameraSize = new Vector2Int(camera.actualWidth, camera.actualHeight); var occlTexRes = new Vector2Int(OcclusionRes, OcclusionRes); var scaleRatio = new Vector2((float)actualCameraSize.x / depthTexRes.x, (float)actualCameraSize.y / depthTexRes.y); var aspectRatio = (float)actualCameraSize.y / actualCameraSize.x; var scaledSun = new Vector4(sunViewPos.x * scaleRatio.x, sunViewPos.y * scaleRatio.y, 0.1f * aspectRatio * scaleRatio.x, 0.1f * scaleRatio.y); cmd.SetComputeVectorParam(computeShader, DepthTextureRes, new Vector4(depthTexRes.x, depthTexRes.y, 1f / depthTexRes.x, 1f / depthTexRes.y)); cmd.SetComputeVectorParam(computeShader, OcclusionTextureRes, new Vector4(occlTexRes.x, occlTexRes.y, 1f / occlTexRes.x, 1f / occlTexRes.y)); cmd.SetComputeVectorParam(computeShader, SunViewPos, scaledSun); var kernel = textureOcclusionKernel; cmd.SetComputeTextureParam(computeShader, kernel, DepthTexture, depthBuffer); cmd.SetComputeTextureParam(computeShader, kernel, OcclusionTextureOut, occlusionTextureA); cmd.DispatchCompute(computeShader, kernel, OcclusionRes / 8, OcclusionRes / 8, 1); kernel = blurTextureOcclusionKernel; cmd.SetComputeTextureParam(computeShader, kernel, OcclusionTextureIn, occlusionTextureA); cmd.SetComputeTextureParam(computeShader, kernel, OcclusionTextureOut, occlusionTextureB); cmd.DispatchCompute(computeShader, kernel, OcclusionRes / 8, OcclusionRes / 8, 1); kernel = reduceTextureOcclusionKernel; cmd.SetComputeTextureParam(computeShader, kernel, OcclusionTextureIn, occlusionTextureB); cmd.SetComputeTextureParam(computeShader, kernel, OcclusionTextureOut, occlusionTextureA); cmd.DispatchCompute(computeShader, kernel, 1, 1, 1); kernel = angleOcclusionKernel; cmd.SetComputeTextureParam(computeShader, kernel, OcclusionTextureIn, occlusionTextureB); cmd.SetComputeBufferParam(computeShader, kernel, AngleOcclusion, angleOcclusion); cmd.DispatchCompute(computeShader, kernel, AngleSamples / 64, 1, 1); cmd.SetGlobalVector(SunViewPos, sunViewPos); cmd.SetGlobalVector(SunSettings, new Vector4(data.sunIntensity, data.haloIntensity, data.ghostingIntensity, intensity)); cmd.SetGlobalTexture(InputTexture, source); cmd.SetGlobalTexture(OcclusionTextureIn, occlusionTextureA); cmd.SetGlobalTexture(OcclusionTextureOut, occlusionTextureB); cmd.SetGlobalBuffer(AngleOcclusion, angleOcclusion); HDUtils.DrawFullScreen(cmd, material, destination); } else { HDUtils.BlitCameraTexture(cmd, source, destination); } }
public void Denoise(CommandBuffer cmd, HDCamera hdCamera, RTHandle noisyBuffer, RTHandle outputBuffer, bool halfResolution = false, float historyValidity = 1.0f) { // Grab the global illumination volume component var giSettings = hdCamera.volumeStack.GetComponent <UnityEngine.Rendering.HighDefinition.GlobalIllumination>(); var historyDepthBuffer = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.Depth); var historyDepthBuffer1 = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.Depth1); // If the depth textures are not available, we can't denoise if (historyDepthBuffer == null || historyDepthBuffer1 == null) { HDUtils.BlitCameraTexture(cmd, noisyBuffer, outputBuffer); return; } // Compute the dispatch parameters based on if we are half res or not int tileSize = 8; int numTilesX, numTilesY; Vector4 halfScreenSize; EvaluateDispatchParameters(hdCamera, halfResolution, tileSize, out numTilesX, out numTilesY, out halfScreenSize); // Pick the right kernel to use int m_KernelFilter = halfResolution ? m_SpatialFilterHalfKernel : m_SpatialFilterKernel; // Bind the input scalars var info = m_SharedRTManager.GetDepthBufferMipChainInfo(); firstMipOffset.Set(HDShadowUtils.Asfloat((uint)info.mipLevelOffsets[1].x), HDShadowUtils.Asfloat((uint)info.mipLevelOffsets[1].y)); cmd.SetComputeVectorParam(m_SSGIDenoiserCS, HDShaderIDs._DepthPyramidFirstMipLevelOffset, firstMipOffset); cmd.SetComputeIntParam(m_SSGIDenoiserCS, HDShaderIDs._IndirectDiffuseSpatialFilter, giSettings.filterRadius); // Inject half screen size if required if (halfResolution) { cmd.SetComputeVectorParam(m_SSGIDenoiserCS, HDShaderIDs._HalfScreenSize, halfScreenSize); } // Bind the input buffers cmd.SetComputeTextureParam(m_SSGIDenoiserCS, m_KernelFilter, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthTexture()); cmd.SetComputeTextureParam(m_SSGIDenoiserCS, m_KernelFilter, HDShaderIDs._InputNoisyBuffer, noisyBuffer); // Bind the output buffer cmd.SetComputeTextureParam(m_SSGIDenoiserCS, m_KernelFilter, HDShaderIDs._OutputFilteredBuffer, outputBuffer); // Do the spatial pass cmd.DispatchCompute(m_SSGIDenoiserCS, m_KernelFilter, numTilesX, numTilesY, hdCamera.viewCount); // Grab the history buffer RTHandle indirectDiffuseHistory = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.RaytracedIndirectDiffuseHF); if (indirectDiffuseHistory == null) { indirectDiffuseHistory = hdCamera.AllocHistoryFrameRT((int)HDCameraFrameHistoryType.RaytracedIndirectDiffuseHF, IndirectDiffuseHistoryBufferAllocatorFunction, 1); // clear it to black if this is the first pass to avoid nans CoreUtils.SetRenderTarget(cmd, indirectDiffuseHistory, m_SharedRTManager.GetDepthStencilBuffer(), ClearFlag.Color, clearColor: Color.black); } // Pick the right kernel to use m_KernelFilter = halfResolution ? m_TemporalFilterHalfKernel : m_TemporalFilterKernel; // Bind the input buffers cmd.SetComputeTextureParam(m_SSGIDenoiserCS, m_KernelFilter, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthTexture()); cmd.SetComputeTextureParam(m_SSGIDenoiserCS, m_KernelFilter, HDShaderIDs._NormalBufferTexture, m_SharedRTManager.GetNormalBuffer()); cmd.SetComputeFloatParam(m_SSGIDenoiserCS, HDShaderIDs._HistoryValidity, historyValidity); if (halfResolution) { cmd.SetComputeTextureParam(m_SSGIDenoiserCS, m_KernelFilter, HDShaderIDs._HistoryDepthTexture, historyDepthBuffer1); cmd.SetComputeVectorParam(m_SSGIDenoiserCS, HDShaderIDs._DepthPyramidFirstMipLevelOffset, firstMipOffset); } else { cmd.SetComputeTextureParam(m_SSGIDenoiserCS, m_KernelFilter, HDShaderIDs._HistoryDepthTexture, historyDepthBuffer); } cmd.SetComputeTextureParam(m_SSGIDenoiserCS, m_KernelFilter, HDShaderIDs._HistoryBuffer, indirectDiffuseHistory); cmd.SetComputeTextureParam(m_SSGIDenoiserCS, m_KernelFilter, HDShaderIDs._InputNoisyBuffer, outputBuffer); // Bind the output buffer cmd.SetComputeTextureParam(m_SSGIDenoiserCS, m_KernelFilter, HDShaderIDs._OutputFilteredBuffer, noisyBuffer); // Do the temporal pass cmd.DispatchCompute(m_SSGIDenoiserCS, m_KernelFilter, numTilesX, numTilesY, hdCamera.viewCount); // Copy the new version into the history buffer cmd.SetComputeTextureParam(m_SSGIDenoiserCS, m_CopyHistory, HDShaderIDs._InputNoisyBuffer, noisyBuffer); cmd.SetComputeTextureParam(m_SSGIDenoiserCS, m_CopyHistory, HDShaderIDs._OutputFilteredBuffer, indirectDiffuseHistory); cmd.DispatchCompute(m_SSGIDenoiserCS, m_CopyHistory, numTilesX, numTilesY, hdCamera.viewCount); }