public void VolumeVoxelizationPass(HDCamera hdCamera, CommandBuffer cmd, uint frameIndex, DensityVolumeList densityVolumes) { if (!hdCamera.frameSettings.enableVolumetric) { return; } var visualEnvironment = VolumeManager.instance.stack.GetComponent <VisualEnvironment>(); if (visualEnvironment.fogType != FogType.Volumetric) { return; } using (new ProfilingSample(cmd, "Volume Voxelization")) { int numVisibleVolumes = m_VisibleVolumeBounds.Count; if (numVisibleVolumes == 0) { // Clear the render target instead of running the shader. // Note: the clear must take the global fog into account! // CoreUtils.SetRenderTarget(cmd, vBuffer.GetDensityBuffer(), ClearFlag.Color, CoreUtils.clearColorAllBlack); // return; // Clearing 3D textures does not seem to work! // Use the workaround by running the full shader with 0 density } bool enableClustered = hdCamera.frameSettings.lightLoopSettings.enableTileAndCluster; int kernel = m_VolumeVoxelizationCS.FindKernel(enableClustered ? "VolumeVoxelizationClustered" : "VolumeVoxelizationBruteforce"); var frameParams = hdCamera.vBufferParams[0]; Vector4 resolution = frameParams.resolution; float vFoV = hdCamera.camera.fieldOfView * Mathf.Deg2Rad; // Compose the matrix which allows us to compute the world space view direction. Matrix4x4 transform = HDUtils.ComputePixelCoordToWorldSpaceViewDirectionMatrix(vFoV, resolution, hdCamera.viewMatrix, false); Texture3D volumeAtlas = DensityVolumeManager.manager.volumeAtlas.volumeAtlas; Vector3 volumeAtlasDimensions = new Vector3(0.0f, 0.0f, 0.0f); if (volumeAtlas != null) { volumeAtlasDimensions.x = (float)volumeAtlas.width / volumeAtlas.depth; // 1 / number of textures volumeAtlasDimensions.y = 1.0f / volumeAtlas.width; volumeAtlasDimensions.z = volumeAtlas.width; } else { volumeAtlas = CoreUtils.blackVolumeTexture; } cmd.SetComputeTextureParam(m_VolumeVoxelizationCS, kernel, HDShaderIDs._VBufferDensity, m_DensityBufferHandle); cmd.SetComputeBufferParam(m_VolumeVoxelizationCS, kernel, HDShaderIDs._VolumeBounds, s_VisibleVolumeBoundsBuffer); cmd.SetComputeBufferParam(m_VolumeVoxelizationCS, kernel, HDShaderIDs._VolumeData, s_VisibleVolumeDataBuffer); cmd.SetComputeTextureParam(m_VolumeVoxelizationCS, kernel, HDShaderIDs._VolumeMaskAtlas, volumeAtlas); // TODO: set the constant buffer data only once. cmd.SetComputeMatrixParam(m_VolumeVoxelizationCS, HDShaderIDs._VBufferCoordToViewDirWS, transform); cmd.SetComputeIntParam(m_VolumeVoxelizationCS, HDShaderIDs._NumVisibleDensityVolumes, numVisibleVolumes); cmd.SetComputeVectorParam(m_VolumeVoxelizationCS, HDShaderIDs._VolumeMaskDimensions, volumeAtlasDimensions); int w = (int)resolution.x; int h = (int)resolution.y; // The shader defines GROUP_SIZE_1D = 8. cmd.DispatchCompute(m_VolumeVoxelizationCS, kernel, (w + 7) / 8, (h + 7) / 8, 1); } }
// Combines specular lighting and diffuse lighting with subsurface scattering. // In the case our frame is MSAA, for the moment given the fact that we do not have read/write access to the stencil buffer of the MSAA target; we need to keep this pass MSAA // However, the compute can't output and MSAA target so we blend the non-MSAA target into the MSAA one. public void SubsurfaceScatteringPass(HDCamera hdCamera, CommandBuffer cmd, RTHandleSystem.RTHandle colorBufferRT, RTHandleSystem.RTHandle diffuseBufferRT, RTHandleSystem.RTHandle depthStencilBufferRT, RTHandleSystem.RTHandle depthTextureRT) { if (!hdCamera.frameSettings.IsEnabled(FrameSettingsField.SubsurfaceScattering)) { return; } // TODO: For MSAA, at least initially, we can only support Jimenez, because we can't // create MSAA + UAV render targets. using (new ProfilingSample(cmd, "Subsurface Scattering", CustomSamplerId.SubsurfaceScattering.GetSampler())) { // For Jimenez we always need an extra buffer, for Disney it depends on platform if (NeedTemporarySubsurfaceBuffer() || hdCamera.frameSettings.IsEnabled(FrameSettingsField.MSAA)) { // Clear the SSS filtering target using (new ProfilingSample(cmd, "Clear SSS filtering target", CustomSamplerId.ClearSSSFilteringTarget.GetSampler())) { HDUtils.SetRenderTarget(cmd, m_CameraFilteringBuffer, ClearFlag.Color, Color.clear); } } using (new ProfilingSample(cmd, "HTile for SSS", CustomSamplerId.HTileForSSS.GetSampler())) { // Currently, Unity does not offer a way to access the GCN HTile even on PS4 and Xbox One. // Therefore, it's computed in a pixel shader, and optimized to only contain the SSS bit. // Clear the HTile texture. TODO: move this to ClearBuffers(). Clear operations must be batched! HDUtils.SetRenderTarget(cmd, m_HTile, ClearFlag.Color, Color.clear); HDUtils.SetRenderTarget(cmd, depthStencilBufferRT); // No need for color buffer here cmd.SetRandomWriteTarget(1, m_HTile); // This need to be done AFTER SetRenderTarget // Generate HTile for the split lighting stencil usage. Don't write into stencil texture (shaderPassId = 2) // Use ShaderPassID 1 => "Pass 2 - Export HTILE for stencilRef to output" CoreUtils.DrawFullScreen(cmd, m_CopyStencilForSplitLighting, null, 2); cmd.ClearRandomWriteTargets(); } unsafe { // Warning: Unity is not able to losslessly transfer integers larger than 2^24 to the shader system. // Therefore, we bitcast uint to float in C#, and bitcast back to uint in the shader. uint texturingModeFlags = this.texturingModeFlags; cmd.SetComputeFloatParam(m_SubsurfaceScatteringCS, HDShaderIDs._TexturingModeFlags, *(float *)&texturingModeFlags); } cmd.SetComputeVectorArrayParam(m_SubsurfaceScatteringCS, HDShaderIDs._WorldScales, worldScales); cmd.SetComputeVectorArrayParam(m_SubsurfaceScatteringCS, HDShaderIDs._FilterKernels, filterKernels); cmd.SetComputeVectorArrayParam(m_SubsurfaceScatteringCS, HDShaderIDs._ShapeParams, shapeParams); cmd.SetComputeFloatParams(m_SubsurfaceScatteringCS, HDShaderIDs._DiffusionProfileHashTable, diffusionProfileHashes); int sssKernel = hdCamera.frameSettings.IsEnabled(FrameSettingsField.MSAA) ? m_SubsurfaceScatteringKernelMSAA : m_SubsurfaceScatteringKernel; cmd.SetComputeTextureParam(m_SubsurfaceScatteringCS, sssKernel, HDShaderIDs._DepthTexture, depthTextureRT); cmd.SetComputeTextureParam(m_SubsurfaceScatteringCS, sssKernel, HDShaderIDs._SSSHTile, m_HTile); cmd.SetComputeTextureParam(m_SubsurfaceScatteringCS, sssKernel, HDShaderIDs._IrradianceSource, diffuseBufferRT); for (int i = 0; i < sssBufferCount; ++i) { cmd.SetComputeTextureParam(m_SubsurfaceScatteringCS, sssKernel, HDShaderIDs._SSSBufferTexture[i], GetSSSBuffer(i)); } int numTilesX = ((int)hdCamera.screenSize.x + 15) / 16; int numTilesY = ((int)hdCamera.screenSize.y + 15) / 16; int numTilesZ = hdCamera.viewCount; if (NeedTemporarySubsurfaceBuffer() || hdCamera.frameSettings.IsEnabled(FrameSettingsField.MSAA)) { cmd.SetComputeTextureParam(m_SubsurfaceScatteringCS, sssKernel, HDShaderIDs._CameraFilteringBuffer, m_CameraFilteringBuffer); // Perform the SSS filtering pass which fills 'm_CameraFilteringBufferRT'. cmd.DispatchCompute(m_SubsurfaceScatteringCS, sssKernel, numTilesX, numTilesY, numTilesZ); cmd.SetGlobalTexture(HDShaderIDs._IrradianceSource, m_CameraFilteringBuffer); // Cannot set a RT on a material // Additively blend diffuse and specular lighting into 'm_CameraColorBufferRT'. HDUtils.DrawFullScreen(cmd, m_CombineLightingPass, colorBufferRT, depthStencilBufferRT); } else { cmd.SetComputeTextureParam(m_SubsurfaceScatteringCS, m_SubsurfaceScatteringKernel, HDShaderIDs._CameraColorTexture, colorBufferRT); // Perform the SSS filtering pass which performs an in-place update of 'colorBuffer'. cmd.DispatchCompute(m_SubsurfaceScatteringCS, m_SubsurfaceScatteringKernel, numTilesX, numTilesY, numTilesZ); } } }
public void VolumetricLightingPass(HDCamera camera, CommandBuffer cmd, FrameSettings frameSettings) { if (preset == VolumetricLightingPreset.Off) { return; } using (new ProfilingSample(cmd, "Volumetric Lighting")) { VBuffer vBuffer = FindVBuffer(camera.GetViewID()); Debug.Assert(vBuffer != null); if (HomogeneousFog.GetGlobalFogComponent() == null) { // Clear the render target instead of running the shader. // CoreUtils.SetRenderTarget(cmd, GetVBufferLightingIntegral(viewOffset), ClearFlag.Color, CoreUtils.clearColorAllBlack); // return; // Clearing 3D textures does not seem to work! // Use the workaround by running the full shader with no volume. } bool enableClustered = frameSettings.lightLoopSettings.enableTileAndCluster; bool enableReprojection = Application.isPlaying && camera.camera.cameraType == CameraType.Game; int kernel; if (enableReprojection) { // Only available in the Play Mode because all the frame counters in the Edit Mode are broken. kernel = m_VolumetricLightingCS.FindKernel(enableClustered ? "VolumetricLightingClusteredReproj" : "VolumetricLightingAllLightsReproj"); } else { kernel = m_VolumetricLightingCS.FindKernel(enableClustered ? "VolumetricLightingClustered" : "VolumetricLightingAllLights"); } int w = 0, h = 0, d = 0; Vector2 scale = ComputeVBufferResolutionAndScale(preset, (int)camera.screenSize.x, (int)camera.screenSize.y, ref w, ref h, ref d); float vFoV = camera.camera.fieldOfView * Mathf.Deg2Rad; // Compose the matrix which allows us to compute the world space view direction. // Compute it using the scaled resolution to account for the visible area of the VBuffer. Vector4 scaledRes = new Vector4(w * scale.x, h * scale.y, 1.0f / (w * scale.x), 1.0f / (h * scale.y)); Matrix4x4 transform = HDUtils.ComputePixelCoordToWorldSpaceViewDirectionMatrix(vFoV, scaledRes, camera.viewMatrix, false); camera.SetupComputeShader(m_VolumetricLightingCS, cmd); Vector2[] xySeq = GetHexagonalClosePackedSpheres7(); // This is a sequence of 7 equidistant numbers from 1/14 to 13/14. // Each of them is the centroid of the interval of length 2/14. // They've been rearranged in a sequence of pairs {small, large}, s.t. (small + large) = 1. // That way, the running average position is close to 0.5. // | 6 | 2 | 4 | 1 | 5 | 3 | 7 | // | | | | o | | | | // | | o | | x | | | | // | | x | | x | | o | | // | | x | o | x | | x | | // | | x | x | x | o | x | | // | o | x | x | x | x | x | | // | x | x | x | x | x | x | o | // | x | x | x | x | x | x | x | float[] zSeq = { 7.0f / 14.0f, 3.0f / 14.0f, 11.0f / 14.0f, 5.0f / 14.0f, 9.0f / 14.0f, 1.0f / 14.0f, 13.0f / 14.0f }; int rfc = Time.renderedFrameCount; int sampleIndex = rfc % 7; Vector4 offset = new Vector4(xySeq[sampleIndex].x, xySeq[sampleIndex].y, zSeq[sampleIndex], rfc); // TODO: set 'm_VolumetricLightingPreset'. cmd.SetComputeVectorParam(m_VolumetricLightingCS, HDShaderIDs._VBufferSampleOffset, offset); cmd.SetComputeMatrixParam(m_VolumetricLightingCS, HDShaderIDs._VBufferCoordToViewDirWS, transform); cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingIntegral, vBuffer.GetLightingIntegralBuffer()); // Write if (enableReprojection) { cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingFeedback, vBuffer.GetLightingFeedbackBuffer()); // Write cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingHistory, vBuffer.GetLightingHistoryBuffer()); // Read } // The shader defines GROUP_SIZE_1D = 16. cmd.DispatchCompute(m_VolumetricLightingCS, kernel, (w + 15) / 16, (h + 15) / 16, 1); } }
// Pass all the systems that may want to update per-camera data here. // That way you will never update an HDCamera and forget to update the dependent system. public void Update(FrameSettings currentFrameSettings, PostProcessLayer postProcessLayer, VolumetricLightingSystem vlSys) { // store a shortcut on HDAdditionalCameraData (done here and not in the constructor as // we do'nt create HDCamera at every frame and user can change the HDAdditionalData later (Like when they create a new scene). m_AdditionalCameraData = camera.GetComponent <HDAdditionalCameraData>(); m_frameSettings = currentFrameSettings; // If TAA is enabled projMatrix will hold a jittered projection matrix. The original, // non-jittered projection matrix can be accessed via nonJitteredProjMatrix. bool taaEnabled = camera.cameraType == CameraType.Game && HDUtils.IsTemporalAntialiasingActive(postProcessLayer) && m_frameSettings.enablePostprocess; var nonJitteredCameraProj = camera.projectionMatrix; var cameraProj = taaEnabled ? postProcessLayer.temporalAntialiasing.GetJitteredProjectionMatrix(camera) : nonJitteredCameraProj; // The actual projection matrix used in shaders is actually massaged a bit to work across all platforms // (different Z value ranges etc.) var gpuProj = GL.GetGPUProjectionMatrix(cameraProj, true); // Had to change this from 'false' var gpuView = camera.worldToCameraMatrix; var gpuNonJitteredProj = GL.GetGPUProjectionMatrix(nonJitteredCameraProj, true); // In stereo, this corresponds to the center eye position var pos = camera.transform.position; worldSpaceCameraPos = pos; if (ShaderConfig.s_CameraRelativeRendering != 0) { // Zero out the translation component. gpuView.SetColumn(3, new Vector4(0, 0, 0, 1)); } var gpuVP = gpuNonJitteredProj * gpuView; // A camera could be rendered multiple times per frame, only updates the previous view proj & pos if needed if (m_LastFrameActive != Time.frameCount) { if (isFirstFrame) { prevCameraPos = pos; prevViewProjMatrix = gpuVP; } else { prevCameraPos = cameraPos; prevViewProjMatrix = nonJitteredViewProjMatrix; } isFirstFrame = false; } taaFrameIndex = taaEnabled ? (uint)postProcessLayer.temporalAntialiasing.sampleIndex : 0; taaFrameRotation = new Vector2(Mathf.Sin(taaFrameIndex * (0.5f * Mathf.PI)), Mathf.Cos(taaFrameIndex * (0.5f * Mathf.PI))); viewMatrix = gpuView; projMatrix = gpuProj; nonJitteredProjMatrix = gpuNonJitteredProj; cameraPos = pos; detViewMatrix = viewMatrix.determinant; if (ShaderConfig.s_CameraRelativeRendering != 0) { Matrix4x4 cameraDisplacement = Matrix4x4.Translate(cameraPos - prevCameraPos); // Non-camera-relative positions prevViewProjMatrix *= cameraDisplacement; // Now prevViewProjMatrix correctly transforms this frame's camera-relative positionWS } float n = camera.nearClipPlane; float f = camera.farClipPlane; // Analyze the projection matrix. // p[2][3] = (reverseZ ? 1 : -1) * (depth_0_1 ? 1 : 2) * (f * n) / (f - n) float scale = projMatrix[2, 3] / (f * n) * (f - n); bool depth_0_1 = Mathf.Abs(scale) < 1.5f; bool reverseZ = scale > 0; bool flipProj = projMatrix.inverse.MultiplyPoint(new Vector3(0, 1, 0)).y < 0; // http://www.humus.name/temp/Linearize%20depth.txt if (reverseZ) { zBufferParams = new Vector4(-1 + f / n, 1, -1 / f + 1 / n, 1 / f); } else { zBufferParams = new Vector4(1 - f / n, f / n, 1 / f - 1 / n, 1 / n); } projectionParams = new Vector4(flipProj ? -1 : 1, n, f, 1.0f / f); float orthoHeight = camera.orthographic ? 2 * camera.orthographicSize : 0; float orthoWidth = orthoHeight * camera.aspect; unity_OrthoParams = new Vector4(orthoWidth, orthoHeight, 0, camera.orthographic ? 1 : 0); frustum = Frustum.Create(viewProjMatrix, depth_0_1, reverseZ); // Left, right, top, bottom, near, far. for (int i = 0; i < 6; i++) { frustumPlaneEquations[i] = new Vector4(frustum.planes[i].normal.x, frustum.planes[i].normal.y, frustum.planes[i].normal.z, frustum.planes[i].distance); } m_LastFrameActive = Time.frameCount; m_ActualWidth = camera.pixelWidth; m_ActualHeight = camera.pixelHeight; var screenWidth = m_ActualWidth; var screenHeight = m_ActualHeight; #if !UNITY_SWITCH if (m_frameSettings.enableStereo) { screenWidth = XRSettings.eyeTextureWidth; screenHeight = XRSettings.eyeTextureHeight; var xrDesc = XRSettings.eyeTextureDesc; m_ActualWidth = xrDesc.width; m_ActualHeight = xrDesc.height; ConfigureStereoMatrices(); } #endif // Unfortunately sometime (like in the HDCameraEditor) HDUtils.hdrpSettings can be null because of scripts that change the current pipeline... m_msaaSamples = HDUtils.hdrpSettings != null ? HDUtils.hdrpSettings.msaaSampleCount : MSAASamples.None; RTHandles.SetReferenceSize(m_ActualWidth, m_ActualHeight, m_frameSettings.enableMSAA, m_msaaSamples); m_HistoryRTSystem.SetReferenceSize(m_ActualWidth, m_ActualHeight, m_frameSettings.enableMSAA, m_msaaSamples); m_HistoryRTSystem.Swap(); int maxWidth = RTHandles.maxWidth; int maxHeight = RTHandles.maxHeight; m_ViewportScalePreviousFrame = m_ViewportScaleCurrentFrame; // Double-buffer m_ViewportScaleCurrentFrame.x = (float)m_ActualWidth / maxWidth; m_ViewportScaleCurrentFrame.y = (float)m_ActualHeight / maxHeight; screenSize = new Vector4(screenWidth, screenHeight, 1.0f / screenWidth, 1.0f / screenHeight); screenParams = new Vector4(screenSize.x, screenSize.y, 1 + screenSize.z, 1 + screenSize.w); if (vlSys != null) { vlSys.UpdatePerCameraData(this); } }
public bool RenderIndirectDiffuse(HDCamera hdCamera, CommandBuffer cmd, ScriptableRenderContext renderContext, uint frameCount) { // Bind the indirect diffuse texture BindIndirectDiffuseTexture(cmd); // First thing to check is: Do we have a valid ray-tracing environment? HDRaytracingEnvironment rtEnvironement = m_RaytracingManager.CurrentEnvironment(); RaytracingShader indirectDiffuseShader = m_PipelineAsset.renderPipelineResources.shaders.indirectDiffuseRaytracing; ComputeShader indirectDiffuseAccumulation = m_PipelineAsset.renderPipelineResources.shaders.indirectDiffuseAccumulation; bool invalidState = rtEnvironement == null || !rtEnvironement.raytracedIndirectDiffuse || indirectDiffuseShader == null || indirectDiffuseAccumulation == null || m_PipelineResources.textures.owenScrambledTex == null || m_PipelineResources.textures.scramblingTex == null; // If no acceleration structure available, end it now if (!ValidIndirectDiffuseState()) { return(false); } // Grab the acceleration structures and the light cluster to use RaytracingAccelerationStructure accelerationStructure = m_RaytracingManager.RequestAccelerationStructure(rtEnvironement.indirectDiffuseLayerMask); HDRaytracingLightCluster lightCluster = m_RaytracingManager.RequestLightCluster(rtEnvironement.indirectDiffuseLayerMask); // Compute the actual resolution that is needed base on the quality string targetRayGen = m_RayGenIndirectDiffuseName; // Define the shader pass to use for the indirect diffuse pass cmd.SetRaytracingShaderPass(indirectDiffuseShader, "IndirectDXR"); // Set the acceleration structure for the pass cmd.SetRaytracingAccelerationStructure(indirectDiffuseShader, HDShaderIDs._RaytracingAccelerationStructureName, accelerationStructure); // Inject the ray-tracing sampling data cmd.SetRaytracingTextureParam(indirectDiffuseShader, targetRayGen, HDShaderIDs._OwenScrambledTexture, m_PipelineResources.textures.owenScrambledTex); cmd.SetRaytracingTextureParam(indirectDiffuseShader, targetRayGen, HDShaderIDs._ScramblingTexture, m_PipelineResources.textures.scramblingTex); // Inject the ray generation data cmd.SetGlobalFloat(HDShaderIDs._RaytracingRayBias, rtEnvironement.rayBias); cmd.SetGlobalFloat(HDShaderIDs._RaytracingRayMaxLength, rtEnvironement.indirectDiffuseRayLength); cmd.SetRaytracingIntParams(indirectDiffuseShader, HDShaderIDs._RaytracingNumSamples, rtEnvironement.indirectDiffuseNumSamples); int frameIndex = hdCamera.IsTAAEnabled() ? hdCamera.taaFrameIndex : (int)frameCount % 8; cmd.SetGlobalInt(HDShaderIDs._RaytracingFrameIndex, frameIndex); // Set the data for the ray generation cmd.SetRaytracingTextureParam(indirectDiffuseShader, targetRayGen, HDShaderIDs._IndirectDiffuseTextureRW, m_IndirectDiffuseTexture); cmd.SetRaytracingTextureParam(indirectDiffuseShader, targetRayGen, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthStencilBuffer()); cmd.SetRaytracingTextureParam(indirectDiffuseShader, targetRayGen, HDShaderIDs._NormalBufferTexture, m_SharedRTManager.GetNormalBuffer()); // Set the indirect diffuse parameters cmd.SetRaytracingFloatParams(indirectDiffuseShader, HDShaderIDs._RaytracingIntensityClamp, rtEnvironement.indirectDiffuseClampValue); // Set ray count tex cmd.SetRaytracingIntParam(indirectDiffuseShader, HDShaderIDs._RayCountEnabled, m_RaytracingManager.rayCountManager.RayCountIsEnabled()); cmd.SetRaytracingTextureParam(indirectDiffuseShader, targetRayGen, HDShaderIDs._RayCountTexture, m_RaytracingManager.rayCountManager.rayCountTexture); // Compute the pixel spread value float pixelSpreadAngle = Mathf.Atan(2.0f * Mathf.Tan(hdCamera.camera.fieldOfView * Mathf.PI / 360.0f) / Mathf.Min(hdCamera.actualWidth, hdCamera.actualHeight)); cmd.SetRaytracingFloatParam(indirectDiffuseShader, HDShaderIDs._RaytracingPixelSpreadAngle, pixelSpreadAngle); // LightLoop data cmd.SetGlobalBuffer(HDShaderIDs._RaytracingLightCluster, lightCluster.GetCluster()); cmd.SetGlobalBuffer(HDShaderIDs._LightDatasRT, lightCluster.GetLightDatas()); cmd.SetGlobalVector(HDShaderIDs._MinClusterPos, lightCluster.GetMinClusterPos()); cmd.SetGlobalVector(HDShaderIDs._MaxClusterPos, lightCluster.GetMaxClusterPos()); cmd.SetGlobalInt(HDShaderIDs._LightPerCellCount, rtEnvironement.maxNumLightsPercell); cmd.SetGlobalInt(HDShaderIDs._PunctualLightCountRT, lightCluster.GetPunctualLightCount()); cmd.SetGlobalInt(HDShaderIDs._AreaLightCountRT, lightCluster.GetAreaLightCount()); // Set the data for the ray miss cmd.SetRaytracingTextureParam(indirectDiffuseShader, m_MissShaderName, HDShaderIDs._SkyTexture, m_SkyManager.skyReflection); // Compute the actual resolution that is needed base on the quality int widthResolution = hdCamera.actualWidth; int heightResolution = hdCamera.actualHeight; // Run the calculus CoreUtils.SetKeyword(cmd, "DIFFUSE_LIGHTING_ONLY", true); cmd.DispatchRays(indirectDiffuseShader, targetRayGen, (uint)widthResolution, (uint)heightResolution, 1); CoreUtils.SetKeyword(cmd, "DIFFUSE_LIGHTING_ONLY", false); switch (rtEnvironement.indirectDiffuseFilterMode) { case HDRaytracingEnvironment.IndirectDiffuseFilterMode.SpatioTemporal: { // Grab the history buffer RTHandleSystem.RTHandle indirectDiffuseHistory = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.RaytracedIndirectDiffuse) ?? hdCamera.AllocHistoryFrameRT((int)HDCameraFrameHistoryType.RaytracedIndirectDiffuse, IndirectDiffuseHistoryBufferAllocatorFunction, 1); // Texture dimensions int texWidth = hdCamera.actualWidth; int texHeight = hdCamera.actualHeight; // Evaluate the dispatch parameters int areaTileSize = 8; int numTilesX = (texWidth + (areaTileSize - 1)) / areaTileSize; int numTilesY = (texHeight + (areaTileSize - 1)) / areaTileSize; int m_KernelFilter = indirectDiffuseAccumulation.FindKernel("RaytracingIndirectDiffuseTAA"); // Compute the combined TAA frame var historyScale = new Vector2(hdCamera.actualWidth / (float)indirectDiffuseHistory.rt.width, hdCamera.actualHeight / (float)indirectDiffuseHistory.rt.height); cmd.SetComputeVectorParam(indirectDiffuseAccumulation, HDShaderIDs._ScreenToTargetScaleHistory, historyScale); cmd.SetComputeTextureParam(indirectDiffuseAccumulation, m_KernelFilter, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthStencilBuffer()); cmd.SetComputeTextureParam(indirectDiffuseAccumulation, m_KernelFilter, HDShaderIDs._DenoiseInputTexture, m_IndirectDiffuseTexture); cmd.SetComputeTextureParam(indirectDiffuseAccumulation, m_KernelFilter, HDShaderIDs._DenoiseOutputTextureRW, m_DenoiseBuffer0); cmd.SetComputeTextureParam(indirectDiffuseAccumulation, m_KernelFilter, HDShaderIDs._IndirectDiffuseHistorybufferRW, indirectDiffuseHistory); cmd.DispatchCompute(indirectDiffuseAccumulation, m_KernelFilter, numTilesX, numTilesY, 1); // Output the new history HDUtils.BlitCameraTexture(cmd, hdCamera, m_DenoiseBuffer0, indirectDiffuseHistory); m_KernelFilter = indirectDiffuseAccumulation.FindKernel("IndirectDiffuseFilterH"); // Horizontal pass of the bilateral filter cmd.SetComputeIntParam(indirectDiffuseAccumulation, HDShaderIDs._RaytracingDenoiseRadius, rtEnvironement.indirectDiffuseFilterRadius); cmd.SetComputeTextureParam(indirectDiffuseAccumulation, m_KernelFilter, HDShaderIDs._DenoiseInputTexture, indirectDiffuseHistory); cmd.SetComputeTextureParam(indirectDiffuseAccumulation, m_KernelFilter, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthStencilBuffer()); cmd.SetComputeTextureParam(indirectDiffuseAccumulation, m_KernelFilter, HDShaderIDs._NormalBufferTexture, m_SharedRTManager.GetNormalBuffer()); cmd.SetComputeTextureParam(indirectDiffuseAccumulation, m_KernelFilter, HDShaderIDs._DenoiseOutputTextureRW, m_DenoiseBuffer0); cmd.DispatchCompute(indirectDiffuseAccumulation, m_KernelFilter, numTilesX, numTilesY, 1); m_KernelFilter = indirectDiffuseAccumulation.FindKernel("IndirectDiffuseFilterV"); // Horizontal pass of the bilateral filter cmd.SetComputeIntParam(indirectDiffuseAccumulation, HDShaderIDs._RaytracingDenoiseRadius, rtEnvironement.indirectDiffuseFilterRadius); cmd.SetComputeTextureParam(indirectDiffuseAccumulation, m_KernelFilter, HDShaderIDs._DenoiseInputTexture, m_DenoiseBuffer0); cmd.SetComputeTextureParam(indirectDiffuseAccumulation, m_KernelFilter, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthStencilBuffer()); cmd.SetComputeTextureParam(indirectDiffuseAccumulation, m_KernelFilter, HDShaderIDs._NormalBufferTexture, m_SharedRTManager.GetNormalBuffer()); cmd.SetComputeTextureParam(indirectDiffuseAccumulation, m_KernelFilter, HDShaderIDs._DenoiseOutputTextureRW, m_IndirectDiffuseTexture); cmd.DispatchCompute(indirectDiffuseAccumulation, m_KernelFilter, numTilesX, numTilesY, 1); } break; } // If we are in deferred mode, we need to make sure to add the indirect diffuse (that we intentionally ignored during the gbuffer pass) // Note that this discards the texture/object ambient occlusion. But we consider that okay given that the raytraced indirect diffuse // is a physically correct evaluation of that quantity if (hdCamera.frameSettings.litShaderMode == LitShaderMode.Deferred) { int indirectDiffuseKernel = indirectDiffuseAccumulation.FindKernel("IndirectDiffuseAccumulation"); // Bind the source texture cmd.SetComputeTextureParam(indirectDiffuseAccumulation, indirectDiffuseKernel, HDShaderIDs._IndirectDiffuseTexture, m_IndirectDiffuseTexture); // Bind the output texture cmd.SetComputeTextureParam(indirectDiffuseAccumulation, indirectDiffuseKernel, HDShaderIDs._GBufferTexture[0], m_GBufferManager.GetBuffer(0)); cmd.SetComputeTextureParam(indirectDiffuseAccumulation, indirectDiffuseKernel, HDShaderIDs._GBufferTexture[3], m_GBufferManager.GetBuffer(3)); // Evaluate the dispatch parameters int areaTileSize = 8; int numTilesX = (widthResolution + (areaTileSize - 1)) / areaTileSize; int numTilesY = (heightResolution + (areaTileSize - 1)) / areaTileSize; // Add the indirect diffuse to the gbuffer cmd.DispatchCompute(indirectDiffuseAccumulation, indirectDiffuseKernel, numTilesX, numTilesY, 1); } return(true); }
public void VolumeVoxelizationPass(HDCamera hdCamera, CommandBuffer cmd, uint frameIndex, DensityVolumeList densityVolumes, LightLoop lightLoop) { if (!hdCamera.frameSettings.IsEnabled(FrameSettingsField.Volumetrics)) { return; } var visualEnvironment = VolumeManager.instance.stack.GetComponent <VisualEnvironment>(); if (visualEnvironment.fogType.value != FogType.Volumetric) { return; } using (new ProfilingSample(cmd, "Volume Voxelization")) { int numVisibleVolumes = m_VisibleVolumeBounds.Count; bool tiledLighting = hdCamera.frameSettings.IsEnabled(FrameSettingsField.BigTilePrepass); bool highQuality = preset == VolumetricLightingPreset.High; int kernel = (tiledLighting ? 1 : 0) | (highQuality ? 2 : 0); var currFrameParams = hdCamera.vBufferParams[0]; var cvp = currFrameParams.viewportSize; Vector4 resolution = new Vector4(cvp.x, cvp.y, 1.0f / cvp.x, 1.0f / cvp.y); #if UNITY_2019_1_OR_NEWER var vFoV = hdCamera.camera.GetGateFittedFieldOfView() * Mathf.Deg2Rad; var lensShift = hdCamera.camera.GetGateFittedLensShift(); #else var vFoV = hdCamera.camera.fieldOfView * Mathf.Deg2Rad; var lensShift = Vector2.zero; #endif // Compose the matrix which allows us to compute the world space view direction. Matrix4x4 transform = HDUtils.ComputePixelCoordToWorldSpaceViewDirectionMatrix(vFoV, lensShift, resolution, hdCamera.viewMatrix, false); // Compute texel spacing at the depth of 1 meter. float unitDepthTexelSpacing = HDUtils.ComputZPlaneTexelSpacing(1.0f, vFoV, resolution.y); Texture3D volumeAtlas = DensityVolumeManager.manager.volumeAtlas.GetAtlas(); Vector4 volumeAtlasDimensions = new Vector4(0.0f, 0.0f, 0.0f, 0.0f); if (volumeAtlas != null) { volumeAtlasDimensions.x = (float)volumeAtlas.width / volumeAtlas.depth; // 1 / number of textures volumeAtlasDimensions.y = volumeAtlas.width; volumeAtlasDimensions.z = volumeAtlas.depth; volumeAtlasDimensions.w = Mathf.Log(volumeAtlas.width, 2); // Max LoD } else { volumeAtlas = CoreUtils.blackVolumeTexture; } if (hdCamera.frameSettings.VolumeVoxelizationRunsAsync()) { // We explicitly set the big tile info even though it is set globally, since this could be running async before the PushGlobalParams cmd.SetComputeIntParam(m_VolumeVoxelizationCS, HDShaderIDs._NumTileBigTileX, lightLoop.GetNumTileBigTileX(hdCamera)); cmd.SetComputeIntParam(m_VolumeVoxelizationCS, HDShaderIDs._NumTileBigTileY, lightLoop.GetNumTileBigTileY(hdCamera)); if (hdCamera.frameSettings.IsEnabled(FrameSettingsField.BigTilePrepass)) { cmd.SetComputeBufferParam(m_VolumeVoxelizationCS, kernel, HDShaderIDs.g_vBigTileLightList, lightLoop.GetBigTileLightList()); } } cmd.SetComputeTextureParam(m_VolumeVoxelizationCS, kernel, HDShaderIDs._VBufferDensity, m_DensityBufferHandle); cmd.SetComputeBufferParam(m_VolumeVoxelizationCS, kernel, HDShaderIDs._VolumeBounds, s_VisibleVolumeBoundsBuffer); cmd.SetComputeBufferParam(m_VolumeVoxelizationCS, kernel, HDShaderIDs._VolumeData, s_VisibleVolumeDataBuffer); cmd.SetComputeTextureParam(m_VolumeVoxelizationCS, kernel, HDShaderIDs._VolumeMaskAtlas, volumeAtlas); // TODO: set the constant buffer data only once. cmd.SetComputeMatrixParam(m_VolumeVoxelizationCS, HDShaderIDs._VBufferCoordToViewDirWS, transform); cmd.SetComputeFloatParam(m_VolumeVoxelizationCS, HDShaderIDs._VBufferUnitDepthTexelSpacing, unitDepthTexelSpacing); cmd.SetComputeIntParam(m_VolumeVoxelizationCS, HDShaderIDs._NumVisibleDensityVolumes, numVisibleVolumes); cmd.SetComputeVectorParam(m_VolumeVoxelizationCS, HDShaderIDs._VolumeMaskDimensions, volumeAtlasDimensions); int w = (int)resolution.x; int h = (int)resolution.y; // The shader defines GROUP_SIZE_1D = 8. cmd.DispatchCompute(m_VolumeVoxelizationCS, kernel, (w + 7) / 8, (h + 7) / 8, 1); } }
public void VolumeVoxelizationPass(DensityVolumeList densityVolumes, HDCamera camera, CommandBuffer cmd, FrameSettings settings, uint frameIndex) { if (preset == VolumetricLightingPreset.Off) { return; } var visualEnvironment = VolumeManager.instance.stack.GetComponent <VisualEnvironment>(); if (visualEnvironment.fogType != FogType.Volumetric) { return; } VBuffer vBuffer = FindVBuffer(camera.GetViewID()); if (vBuffer == null) { return; } using (new ProfilingSample(cmd, "Volume Voxelization")) { int numVisibleVolumes = m_VisibleVolumeBounds.Count; if (numVisibleVolumes == 0) { // Clear the render target instead of running the shader. // Note: the clear must take the global fog into account! // CoreUtils.SetRenderTarget(cmd, vBuffer.GetDensityBuffer(), ClearFlag.Color, CoreUtils.clearColorAllBlack); // return; // Clearing 3D textures does not seem to work! // Use the workaround by running the full shader with 0 density } bool enableClustered = settings.lightLoopSettings.enableTileAndCluster; int kernel = m_VolumeVoxelizationCS.FindKernel(enableClustered ? "VolumeVoxelizationClustered" : "VolumeVoxelizationBruteforce"); var frameParams = vBuffer.GetParameters(frameIndex); Vector4 resolution = frameParams.resolution; float vFoV = camera.camera.fieldOfView * Mathf.Deg2Rad; // Compose the matrix which allows us to compute the world space view direction. Matrix4x4 transform = HDUtils.ComputePixelCoordToWorldSpaceViewDirectionMatrix(vFoV, resolution, camera.viewMatrix, false); cmd.SetComputeTextureParam(m_VolumeVoxelizationCS, kernel, HDShaderIDs._VBufferDensity, vBuffer.GetDensityBuffer()); cmd.SetComputeBufferParam(m_VolumeVoxelizationCS, kernel, HDShaderIDs._VolumeBounds, s_VisibleVolumeBoundsBuffer); cmd.SetComputeBufferParam(m_VolumeVoxelizationCS, kernel, HDShaderIDs._VolumeData, s_VisibleVolumeDataBuffer); // TODO: set the constant buffer data only once. cmd.SetComputeMatrixParam(m_VolumeVoxelizationCS, HDShaderIDs._VBufferCoordToViewDirWS, transform); cmd.SetComputeIntParam(m_VolumeVoxelizationCS, HDShaderIDs._NumVisibleDensityVolumes, numVisibleVolumes); int w = (int)resolution.x; int h = (int)resolution.y; // The shader defines GROUP_SIZE_1D = 8. cmd.DispatchCompute(m_VolumeVoxelizationCS, kernel, (w + 7) / 8, (h + 7) / 8, 1); } }
public void RenderAO(HDCamera hdCamera, CommandBuffer cmd, RTHandleSystem.RTHandle outputTexture, ScriptableRenderContext renderContext, int frameCount) { // Let's check all the resources HDRaytracingEnvironment rtEnvironment = m_RaytracingManager.CurrentEnvironment(); RayTracingShader aoShader = m_PipelineRayTracingResources.aoRaytracing; var aoSettings = VolumeManager.instance.stack.GetComponent <AmbientOcclusion>(); // Check if the state is valid for evaluating ambient occlusion bool invalidState = rtEnvironment == null || aoShader == null || m_PipelineResources.textures.owenScrambledTex == null || m_PipelineResources.textures.scramblingTex == null; // If any of the previous requirements is missing, the effect is not requested or no acceleration structure, set the default one and leave right away if (invalidState) { SetDefaultAmbientOcclusionTexture(cmd); return; } // Grab the acceleration structure for the target camera RayTracingAccelerationStructure accelerationStructure = m_RaytracingManager.RequestAccelerationStructure(rtEnvironment.aoLayerMask); // Define the shader pass to use for the reflection pass cmd.SetRayTracingShaderPass(aoShader, "VisibilityDXR"); // Set the acceleration structure for the pass cmd.SetRayTracingAccelerationStructure(aoShader, HDShaderIDs._RaytracingAccelerationStructureName, accelerationStructure); // Inject the ray-tracing sampling data cmd.SetRayTracingTextureParam(aoShader, HDShaderIDs._OwenScrambledTexture, m_PipelineResources.textures.owenScrambledTex); cmd.SetRayTracingTextureParam(aoShader, HDShaderIDs._ScramblingTexture, m_PipelineResources.textures.scramblingTex); // Inject the ray generation data cmd.SetRayTracingFloatParams(aoShader, HDShaderIDs._RaytracingRayBias, rtEnvironment.rayBias); cmd.SetRayTracingFloatParams(aoShader, HDShaderIDs._RaytracingRayMaxLength, aoSettings.rayLength.value); cmd.SetRayTracingIntParams(aoShader, HDShaderIDs._RaytracingNumSamples, aoSettings.numSamples.value); // Set the data for the ray generation cmd.SetRayTracingTextureParam(aoShader, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthStencilBuffer()); cmd.SetRayTracingTextureParam(aoShader, HDShaderIDs._NormalBufferTexture, m_SharedRTManager.GetNormalBuffer()); int frameIndex = hdCamera.IsTAAEnabled() ? hdCamera.taaFrameIndex : (int)frameCount % 8; cmd.SetGlobalInt(HDShaderIDs._RaytracingFrameIndex, frameIndex); // Value used to scale the ao intensity cmd.SetRayTracingFloatParam(aoShader, HDShaderIDs._RaytracingAOIntensity, aoSettings.intensity.value); cmd.SetRayTracingIntParam(aoShader, HDShaderIDs._RayCountEnabled, m_RaytracingManager.rayCountManager.RayCountIsEnabled()); cmd.SetRayTracingTextureParam(aoShader, HDShaderIDs._RayCountTexture, m_RaytracingManager.rayCountManager.rayCountTexture); // Set the output textures cmd.SetRayTracingTextureParam(aoShader, HDShaderIDs._AmbientOcclusionTextureRW, m_IntermediateBuffer); cmd.SetRayTracingTextureParam(aoShader, HDShaderIDs._RaytracingVSNormalTexture, m_ViewSpaceNormalBuffer); // Run the computation cmd.DispatchRays(aoShader, m_RayGenShaderName, (uint)hdCamera.actualWidth, (uint)hdCamera.actualHeight, 1); using (new ProfilingSample(cmd, "Filter Reflection", CustomSamplerId.RaytracingAmbientOcclusion.GetSampler())) { if (aoSettings.enableFilter.value) { // Grab the history buffer RTHandleSystem.RTHandle ambientOcclusionHistory = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.RaytracedAmbientOcclusion) ?? hdCamera.AllocHistoryFrameRT((int)HDCameraFrameHistoryType.RaytracedAmbientOcclusion, AmbientOcclusionHistoryBufferAllocatorFunction, 1); // Apply the simple denoiser HDSimpleDenoiser simpleDenoiser = m_RaytracingManager.GetSimpleDenoiser(); simpleDenoiser.DenoiseBuffer(cmd, hdCamera, m_IntermediateBuffer, ambientOcclusionHistory, outputTexture, aoSettings.filterRadius.value, singleChannel: true); } else { HDUtils.BlitCameraTexture(cmd, m_IntermediateBuffer, outputTexture); } } // Bind the textures and the params cmd.SetGlobalTexture(HDShaderIDs._AmbientOcclusionTexture, outputTexture); cmd.SetGlobalVector(HDShaderIDs._AmbientOcclusionParam, new Vector4(0f, 0f, 0f, VolumeManager.instance.stack.GetComponent <AmbientOcclusion>().directLightingStrength.value)); // TODO: All the push-debug stuff should be centralized somewhere (RenderPipelineManager.currentPipeline as HDRenderPipeline).PushFullScreenDebugTexture(hdCamera, cmd, outputTexture, FullScreenDebugMode.SSAO); }
void BuildLightData(CommandBuffer cmd, HDCamera hdCamera, List <HDAdditionalLightData> lightArray) { // Also we need to build the light list data if (m_LightDataGPUArray == null || m_LightDataGPUArray.count != lightArray.Count) { ResizeLightDataBuffer(lightArray.Count); } // Build the data for every light for (int lightIdx = 0; lightIdx < lightArray.Count; ++lightIdx) { var lightData = new LightData(); HDAdditionalLightData additionalLightData = lightArray[lightIdx]; // When the user deletes a light source in the editor, there is a single frame where the light is null before the collection of light in the scene is triggered // the workaround for this is simply to add an invalid light for that frame if (additionalLightData == null) { m_LightDataCPUArray[lightIdx] = lightData; continue; } Light light = additionalLightData.gameObject.GetComponent <Light>(); // Both of these positions are non-camera-relative. float distanceToCamera = (light.gameObject.transform.position - hdCamera.camera.transform.position).magnitude; float lightDistanceFade = HDUtils.ComputeLinearDistanceFade(distanceToCamera, additionalLightData.fadeDistance); bool contributesToLighting = ((additionalLightData.lightDimmer > 0) && (additionalLightData.affectDiffuse || additionalLightData.affectSpecular)) || (additionalLightData.volumetricDimmer > 0); contributesToLighting = contributesToLighting && (lightDistanceFade > 0); if (!contributesToLighting) { continue; } lightData.lightLayers = additionalLightData.GetLightLayers(); LightCategory lightCategory = LightCategory.Count; GPULightType gpuLightType = GPULightType.Point; GetLightGPUType(additionalLightData, light, ref gpuLightType, ref lightCategory); lightData.lightType = gpuLightType; lightData.positionRWS = light.gameObject.transform.position - hdCamera.camera.transform.position; bool applyRangeAttenuation = additionalLightData.applyRangeAttenuation && (gpuLightType != GPULightType.ProjectorBox); lightData.range = light.range; if (applyRangeAttenuation) { lightData.rangeAttenuationScale = 1.0f / (light.range * light.range); lightData.rangeAttenuationBias = 1.0f; if (lightData.lightType == GPULightType.Rectangle) { // Rect lights are currently a special case because they use the normalized // [0, 1] attenuation range rather than the regular [0, r] one. lightData.rangeAttenuationScale = 1.0f; } } else // Don't apply any attenuation but do a 'step' at range { // Solve f(x) = b - (a * x)^2 where x = (d/r)^2. // f(0) = huge -> b = huge. // f(1) = 0 -> huge - a^2 = 0 -> a = sqrt(huge). const float hugeValue = 16777216.0f; const float sqrtHuge = 4096.0f; lightData.rangeAttenuationScale = sqrtHuge / (light.range * light.range); lightData.rangeAttenuationBias = hugeValue; if (lightData.lightType == GPULightType.Rectangle) { // Rect lights are currently a special case because they use the normalized // [0, 1] attenuation range rather than the regular [0, r] one. lightData.rangeAttenuationScale = sqrtHuge; } } Color value = light.color.linear * light.intensity; if (additionalLightData.useColorTemperature) { value *= Mathf.CorrelatedColorTemperatureToRGB(light.colorTemperature); } lightData.color = new Vector3(value.r, value.g, value.b); lightData.forward = light.transform.forward; lightData.up = light.transform.up; lightData.right = light.transform.right; if (lightData.lightType == GPULightType.ProjectorBox) { // Rescale for cookies and windowing. lightData.right *= 2.0f / Mathf.Max(additionalLightData.shapeWidth, 0.001f); lightData.up *= 2.0f / Mathf.Max(additionalLightData.shapeHeight, 0.001f); } else if (lightData.lightType == GPULightType.ProjectorPyramid) { // Get width and height for the current frustum var spotAngle = light.spotAngle; float frustumWidth, frustumHeight; if (additionalLightData.aspectRatio >= 1.0f) { frustumHeight = 2.0f * Mathf.Tan(spotAngle * 0.5f * Mathf.Deg2Rad); frustumWidth = frustumHeight * additionalLightData.aspectRatio; } else { frustumWidth = 2.0f * Mathf.Tan(spotAngle * 0.5f * Mathf.Deg2Rad); frustumHeight = frustumWidth / additionalLightData.aspectRatio; } // Rescale for cookies and windowing. lightData.right *= 2.0f / frustumWidth; lightData.up *= 2.0f / frustumHeight; } if (lightData.lightType == GPULightType.Spot) { var spotAngle = light.spotAngle; var innerConePercent = additionalLightData.GetInnerSpotPercent01(); var cosSpotOuterHalfAngle = Mathf.Clamp(Mathf.Cos(spotAngle * 0.5f * Mathf.Deg2Rad), 0.0f, 1.0f); var sinSpotOuterHalfAngle = Mathf.Sqrt(1.0f - cosSpotOuterHalfAngle * cosSpotOuterHalfAngle); var cosSpotInnerHalfAngle = Mathf.Clamp(Mathf.Cos(spotAngle * 0.5f * innerConePercent * Mathf.Deg2Rad), 0.0f, 1.0f); // inner cone var val = Mathf.Max(0.0001f, (cosSpotInnerHalfAngle - cosSpotOuterHalfAngle)); lightData.angleScale = 1.0f / val; lightData.angleOffset = -cosSpotOuterHalfAngle * lightData.angleScale; // Rescale for cookies and windowing. float cotOuterHalfAngle = cosSpotOuterHalfAngle / sinSpotOuterHalfAngle; lightData.up *= cotOuterHalfAngle; lightData.right *= cotOuterHalfAngle; } else { // These are the neutral values allowing GetAngleAnttenuation in shader code to return 1.0 lightData.angleScale = 0.0f; lightData.angleOffset = 1.0f; } if (lightData.lightType != GPULightType.Directional && lightData.lightType != GPULightType.ProjectorBox) { // Store the squared radius of the light to simulate a fill light. lightData.size = new Vector2(additionalLightData.shapeRadius * additionalLightData.shapeRadius, 0); } if (lightData.lightType == GPULightType.Rectangle || lightData.lightType == GPULightType.Tube) { lightData.size = new Vector2(additionalLightData.shapeWidth, additionalLightData.shapeHeight); } lightData.lightDimmer = lightDistanceFade * (additionalLightData.lightDimmer); lightData.diffuseDimmer = lightDistanceFade * (additionalLightData.affectDiffuse ? additionalLightData.lightDimmer : 0); lightData.specularDimmer = lightDistanceFade * (additionalLightData.affectSpecular ? additionalLightData.lightDimmer * hdCamera.frameSettings.specularGlobalDimmer : 0); lightData.volumetricLightDimmer = lightDistanceFade * (additionalLightData.volumetricDimmer); lightData.contactShadowIndex = -1; lightData.cookieIndex = -1; lightData.shadowIndex = -1; lightData.rayTracedAreaShadowIndex = -1; if (light != null && light.cookie != null) { // TODO: add texture atlas support for cookie textures. switch (light.type) { case LightType.Spot: lightData.cookieIndex = m_LightLoop.cookieTexArray.FetchSlice(cmd, light.cookie); break; case LightType.Point: lightData.cookieIndex = m_LightLoop.cubeCookieTexArray.FetchSlice(cmd, light.cookie); break; } } else if (light.type == LightType.Spot && additionalLightData.spotLightShape != SpotLightShape.Cone) { // Projectors lights must always have a cookie texture. // As long as the cache is a texture array and not an atlas, the 4x4 white texture will be rescaled to 128 lightData.cookieIndex = m_LightLoop.cookieTexArray.FetchSlice(cmd, Texture2D.whiteTexture); } else if (lightData.lightType == GPULightType.Rectangle && additionalLightData.areaLightCookie != null) { lightData.cookieIndex = m_LightLoop.areaLightCookieManager.FetchSlice(cmd, additionalLightData.areaLightCookie); } { lightData.shadowDimmer = 1.0f; lightData.volumetricShadowDimmer = 1.0f; } { // fix up shadow information lightData.shadowIndex = additionalLightData.shadowIndex; } // Value of max smoothness is from artists point of view, need to convert from perceptual smoothness to roughness lightData.minRoughness = (1.0f - additionalLightData.maxSmoothness) * (1.0f - additionalLightData.maxSmoothness); // No usage for the shadow masks lightData.shadowMaskSelector = Vector4.zero; { // use -1 to say that we don't use shadow mask lightData.shadowMaskSelector.x = -1.0f; lightData.nonLightMappedOnly = 0; } // Set the data for this light m_LightDataCPUArray[lightIdx] = lightData; } //Push the data to the GPU m_LightDataGPUArray.SetData(m_LightDataCPUArray); }
public void RenderReflections(HDCamera hdCamera, CommandBuffer cmd, RTHandleSystem.RTHandle outputTexture, ScriptableRenderContext renderContext, uint frameCount) { // First thing to check is: Do we have a valid ray-tracing environment? HDRaytracingEnvironment rtEnvironement = m_RaytracingManager.CurrentEnvironment(); BlueNoise blueNoise = m_RaytracingManager.GetBlueNoiseManager(); ComputeShader bilateralFilter = m_PipelineAsset.renderPipelineResources.shaders.reflectionBilateralFilterCS; RaytracingShader reflectionShader = m_PipelineAsset.renderPipelineResources.shaders.reflectionRaytracing; bool invalidState = rtEnvironement == null || blueNoise == null || bilateralFilter == null || reflectionShader == null || m_PipelineResources.textures.owenScrambledTex == null || m_PipelineResources.textures.scramblingTex == null; // If no acceleration structure available, end it now if (invalidState) { return; } // Grab the acceleration structures and the light cluster to use RaytracingAccelerationStructure accelerationStructure = m_RaytracingManager.RequestAccelerationStructure(rtEnvironement.reflLayerMask); HDRaytracingLightCluster lightCluster = m_RaytracingManager.RequestLightCluster(rtEnvironement.reflLayerMask); // Compute the actual resolution that is needed base on the quality string targetRayGen = ""; switch (rtEnvironement.reflQualityMode) { case HDRaytracingEnvironment.ReflectionsQuality.QuarterRes: { targetRayGen = m_RayGenHalfResName; }; break; case HDRaytracingEnvironment.ReflectionsQuality.Integration: { targetRayGen = m_RayGenIntegrationName; }; break; } // Define the shader pass to use for the reflection pass cmd.SetRaytracingShaderPass(reflectionShader, "ReflectionDXR"); // Set the acceleration structure for the pass cmd.SetRaytracingAccelerationStructure(reflectionShader, HDShaderIDs._RaytracingAccelerationStructureName, accelerationStructure); // Inject the ray-tracing sampling data cmd.SetRaytracingTextureParam(reflectionShader, targetRayGen, HDShaderIDs._OwenScrambledTexture, m_PipelineResources.textures.owenScrambledTex); cmd.SetRaytracingTextureParam(reflectionShader, targetRayGen, HDShaderIDs._ScramblingTexture, m_PipelineResources.textures.scramblingTex); // Global reflection parameters cmd.SetRaytracingFloatParams(reflectionShader, HDShaderIDs._RaytracingIntensityClamp, rtEnvironement.reflClampValue); cmd.SetRaytracingFloatParams(reflectionShader, HDShaderIDs._RaytracingReflectionMinSmoothness, rtEnvironement.reflMinSmoothness); cmd.SetRaytracingFloatParams(reflectionShader, HDShaderIDs._RaytracingReflectionMaxDistance, rtEnvironement.reflBlendDistance); // Inject the ray generation data cmd.SetGlobalFloat(HDShaderIDs._RaytracingRayBias, rtEnvironement.rayBias); cmd.SetGlobalFloat(HDShaderIDs._RaytracingRayMaxLength, rtEnvironement.reflRayLength); cmd.SetRaytracingIntParams(reflectionShader, HDShaderIDs._RaytracingNumSamples, rtEnvironement.reflNumMaxSamples); int frameIndex = hdCamera.IsTAAEnabled() ? hdCamera.taaFrameIndex : (int)frameCount % 8; cmd.SetGlobalInt(HDShaderIDs._RaytracingFrameIndex, frameIndex); // Set the data for the ray generation cmd.SetRaytracingTextureParam(reflectionShader, targetRayGen, HDShaderIDs._SsrLightingTextureRW, m_LightingTexture); cmd.SetRaytracingTextureParam(reflectionShader, targetRayGen, HDShaderIDs._SsrHitPointTexture, m_HitPdfTexture); cmd.SetRaytracingTextureParam(reflectionShader, targetRayGen, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthStencilBuffer()); cmd.SetRaytracingTextureParam(reflectionShader, targetRayGen, HDShaderIDs._NormalBufferTexture, m_SharedRTManager.GetNormalBuffer()); // Set ray count tex cmd.SetRaytracingIntParam(reflectionShader, HDShaderIDs._RayCountEnabled, m_RaytracingManager.rayCountManager.RayCountIsEnabled()); cmd.SetRaytracingTextureParam(reflectionShader, targetRayGen, HDShaderIDs._RayCountTexture, m_RaytracingManager.rayCountManager.rayCountTexture); // Compute the pixel spread value float pixelSpreadAngle = Mathf.Atan(2.0f * Mathf.Tan(hdCamera.camera.fieldOfView * Mathf.PI / 360.0f) / Mathf.Min(hdCamera.actualWidth, hdCamera.actualHeight)); cmd.SetRaytracingFloatParam(reflectionShader, HDShaderIDs._RaytracingPixelSpreadAngle, pixelSpreadAngle); // LightLoop data cmd.SetGlobalBuffer(HDShaderIDs._RaytracingLightCluster, lightCluster.GetCluster()); cmd.SetGlobalBuffer(HDShaderIDs._LightDatasRT, lightCluster.GetLightDatas()); cmd.SetGlobalVector(HDShaderIDs._MinClusterPos, lightCluster.GetMinClusterPos()); cmd.SetGlobalVector(HDShaderIDs._MaxClusterPos, lightCluster.GetMaxClusterPos()); cmd.SetGlobalInt(HDShaderIDs._LightPerCellCount, rtEnvironement.maxNumLightsPercell); cmd.SetGlobalInt(HDShaderIDs._PunctualLightCountRT, lightCluster.GetPunctualLightCount()); cmd.SetGlobalInt(HDShaderIDs._AreaLightCountRT, lightCluster.GetAreaLightCount()); // Evaluate the clear coat mask texture based on the lit shader mode RenderTargetIdentifier clearCoatMaskTexture = hdCamera.frameSettings.litShaderMode == LitShaderMode.Deferred ? m_GbufferManager.GetBuffersRTI()[2] : Texture2D.blackTexture; cmd.SetRaytracingTextureParam(reflectionShader, targetRayGen, HDShaderIDs._SsrClearCoatMaskTexture, clearCoatMaskTexture); // Set the data for the ray miss cmd.SetRaytracingTextureParam(reflectionShader, m_MissShaderName, HDShaderIDs._SkyTexture, m_SkyManager.skyReflection); // Compute the actual resolution that is needed base on the quality uint widthResolution = 1, heightResolution = 1; switch (rtEnvironement.reflQualityMode) { case HDRaytracingEnvironment.ReflectionsQuality.QuarterRes: { widthResolution = (uint)hdCamera.actualWidth / 2; heightResolution = (uint)hdCamera.actualHeight / 2; }; break; case HDRaytracingEnvironment.ReflectionsQuality.Integration: { widthResolution = (uint)hdCamera.actualWidth; heightResolution = (uint)hdCamera.actualHeight; }; break; } // Force to disable specular lighting cmd.SetGlobalInt(HDShaderIDs._EnableSpecularLighting, 0); // Run the calculus cmd.DispatchRays(reflectionShader, targetRayGen, widthResolution, heightResolution, 1); // Restore the previous state of specular lighting cmd.SetGlobalInt(HDShaderIDs._EnableSpecularLighting, hdCamera.frameSettings.IsEnabled(FrameSettingsField.SpecularLighting) ? 0 : 1); using (new ProfilingSample(cmd, "Filter Reflection", CustomSamplerId.RaytracingFilterReflection.GetSampler())) { switch (rtEnvironement.reflQualityMode) { case HDRaytracingEnvironment.ReflectionsQuality.QuarterRes: { // Fetch the right filter to use int currentKernel = bilateralFilter.FindKernel("RaytracingReflectionFilter"); // Inject all the parameters for the compute cmd.SetComputeTextureParam(bilateralFilter, currentKernel, HDShaderIDs._SsrLightingTextureRW, m_LightingTexture); cmd.SetComputeTextureParam(bilateralFilter, currentKernel, HDShaderIDs._SsrHitPointTexture, m_HitPdfTexture); cmd.SetComputeTextureParam(bilateralFilter, currentKernel, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthStencilBuffer()); cmd.SetComputeTextureParam(bilateralFilter, currentKernel, HDShaderIDs._NormalBufferTexture, m_SharedRTManager.GetNormalBuffer()); cmd.SetComputeTextureParam(bilateralFilter, currentKernel, "_NoiseTexture", blueNoise.textureArray16RGB); cmd.SetComputeTextureParam(bilateralFilter, currentKernel, "_VarianceTexture", m_VarianceBuffer); cmd.SetComputeTextureParam(bilateralFilter, currentKernel, "_MinColorRangeTexture", m_MinBoundBuffer); cmd.SetComputeTextureParam(bilateralFilter, currentKernel, "_MaxColorRangeTexture", m_MaxBoundBuffer); cmd.SetComputeTextureParam(bilateralFilter, currentKernel, "_RaytracingReflectionTexture", outputTexture); cmd.SetComputeTextureParam(bilateralFilter, currentKernel, HDShaderIDs._ScramblingTexture, m_PipelineResources.textures.scramblingTex); cmd.SetComputeIntParam(bilateralFilter, HDShaderIDs._SpatialFilterRadius, rtEnvironement.reflSpatialFilterRadius); // Texture dimensions int texWidth = outputTexture.rt.width; int texHeight = outputTexture.rt.width; // Evaluate the dispatch parameters int areaTileSize = 8; int numTilesXHR = (texWidth / 2 + (areaTileSize - 1)) / areaTileSize; int numTilesYHR = (texHeight / 2 + (areaTileSize - 1)) / areaTileSize; // Bind the right texture for clear coat support cmd.SetComputeTextureParam(bilateralFilter, currentKernel, HDShaderIDs._SsrClearCoatMaskTexture, clearCoatMaskTexture); // Compute the texture cmd.DispatchCompute(bilateralFilter, currentKernel, numTilesXHR, numTilesYHR, 1); int numTilesXFR = (texWidth + (areaTileSize - 1)) / areaTileSize; int numTilesYFR = (texHeight + (areaTileSize - 1)) / areaTileSize; RTHandleSystem.RTHandle history = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.RaytracedReflection) ?? hdCamera.AllocHistoryFrameRT((int)HDCameraFrameHistoryType.RaytracedReflection, ReflectionHistoryBufferAllocatorFunction, 1); // Fetch the right filter to use currentKernel = bilateralFilter.FindKernel("TemporalAccumulationFilter"); cmd.SetComputeFloatParam(bilateralFilter, HDShaderIDs._TemporalAccumuationWeight, rtEnvironement.reflTemporalAccumulationWeight); cmd.SetComputeTextureParam(bilateralFilter, currentKernel, HDShaderIDs._AccumulatedFrameTexture, history); cmd.SetComputeTextureParam(bilateralFilter, currentKernel, HDShaderIDs._CurrentFrameTexture, outputTexture); cmd.SetComputeTextureParam(bilateralFilter, currentKernel, "_MinColorRangeTexture", m_MinBoundBuffer); cmd.SetComputeTextureParam(bilateralFilter, currentKernel, "_MaxColorRangeTexture", m_MaxBoundBuffer); cmd.DispatchCompute(bilateralFilter, currentKernel, numTilesXFR, numTilesYFR, 1); } break; case HDRaytracingEnvironment.ReflectionsQuality.Integration: { HDUtils.BlitCameraTexture(cmd, hdCamera, m_LightingTexture, outputTexture); } break; } } }
void BuildDebugRepresentation() { if (!isDebugViewMaterialInit) { List <RenderPipelineMaterial> materialList = HDUtils.GetRenderPipelineMaterialList(); // TODO: Share this code to retrieve deferred material with HDRenderPipeline // Find first material that is a deferredMaterial Type bsdfDataDeferredType = null; foreach (RenderPipelineMaterial material in materialList) { if (material.IsDefferedMaterial()) { bsdfDataDeferredType = material.GetType().GetNestedType("BSDFData"); } } // TODO: Handle the case of no Gbuffer material Debug.Assert(bsdfDataDeferredType != null); List <MaterialItem> materialItems = new List <MaterialItem>(); int numSurfaceDataFields = 0; int numBSDFDataFields = 0; foreach (RenderPipelineMaterial material in materialList) { MaterialItem item = new MaterialItem(); item.className = material.GetType().Name + "/"; item.surfaceDataType = material.GetType().GetNestedType("SurfaceData"); numSurfaceDataFields += item.surfaceDataType.GetFields().Length; item.bsdfDataType = material.GetType().GetNestedType("BSDFData"); numBSDFDataFields += item.bsdfDataType.GetFields().Length; materialItems.Add(item); } // Init list List <GUIContent> debugViewMaterialStringsList = new List <GUIContent>(); List <int> debugViewMaterialValuesList = new List <int>(); List <GUIContent> debugViewEngineStringsList = new List <GUIContent>(); List <int> debugViewEngineValuesList = new List <int>(); List <GUIContent> debugViewMaterialVaryingStringsList = new List <GUIContent>(); List <int> debugViewMaterialVaryingValuesList = new List <int>(); List <GUIContent> debugViewMaterialPropertiesStringsList = new List <GUIContent>(); List <int> debugViewMaterialPropertiesValuesList = new List <int>(); List <GUIContent> debugViewMaterialTextureStringsList = new List <GUIContent>(); List <int> debugViewMaterialTextureValuesList = new List <int>(); List <GUIContent> debugViewMaterialGBufferStringsList = new List <GUIContent>(); List <int> debugViewMaterialGBufferValuesList = new List <int>(); // First element is a reserved location and should not be used (allow to track error) // Special case for None since it cannot be inferred from SurfaceData/BuiltinData debugViewMaterialStringsList.Add(new GUIContent("None")); debugViewMaterialValuesList.Add(0); foreach (MaterialItem item in materialItems) { // BuiltinData are duplicated for each material // Giving the material specific types allow to move iterator at a separate range for each material // Otherwise, all BuiltinData will be at same offset and will broke the enum FillWithProperties(typeof(Builtin.BuiltinData), ref debugViewMaterialStringsList, ref debugViewMaterialValuesList, item.className); FillWithProperties(item.surfaceDataType, ref debugViewMaterialStringsList, ref debugViewMaterialValuesList, item.className); } // Engine properties debug // First element is a reserved location and should not be used (allow to track error) // Special case for None since it cannot be inferred from SurfaceData/BuiltinData debugViewEngineStringsList.Add(new GUIContent("None")); debugViewEngineValuesList.Add(0); foreach (MaterialItem item in materialItems) { FillWithProperties(item.bsdfDataType, ref debugViewEngineStringsList, ref debugViewEngineValuesList, item.className); } // For the following, no need to reserve the 0 case as it is handled in the Enum // Attributes debug FillWithPropertiesEnum(typeof(DebugViewVarying), ref debugViewMaterialVaryingStringsList, ref debugViewMaterialVaryingValuesList, ""); // Properties debug FillWithPropertiesEnum(typeof(DebugViewProperties), ref debugViewMaterialPropertiesStringsList, ref debugViewMaterialPropertiesValuesList, ""); // Gbuffer debug FillWithPropertiesEnum(typeof(DebugViewGbuffer), ref debugViewMaterialGBufferStringsList, ref debugViewMaterialGBufferValuesList, ""); FillWithProperties(typeof(Lit.BSDFData), ref debugViewMaterialGBufferStringsList, ref debugViewMaterialGBufferValuesList, ""); // Convert to array for UI debugViewMaterialStrings = debugViewMaterialStringsList.ToArray(); debugViewMaterialValues = debugViewMaterialValuesList.ToArray(); debugViewEngineStrings = debugViewEngineStringsList.ToArray(); debugViewEngineValues = debugViewEngineValuesList.ToArray(); debugViewMaterialVaryingStrings = debugViewMaterialVaryingStringsList.ToArray(); debugViewMaterialVaryingValues = debugViewMaterialVaryingValuesList.ToArray(); debugViewMaterialPropertiesStrings = debugViewMaterialPropertiesStringsList.ToArray(); debugViewMaterialPropertiesValues = debugViewMaterialPropertiesValuesList.ToArray(); debugViewMaterialTextureStrings = debugViewMaterialTextureStringsList.ToArray(); debugViewMaterialTextureValues = debugViewMaterialTextureValuesList.ToArray(); debugViewMaterialGBufferStrings = debugViewMaterialGBufferStringsList.ToArray(); debugViewMaterialGBufferValues = debugViewMaterialGBufferValuesList.ToArray(); isDebugViewMaterialInit = true; } }
public void ClearNormalTargetAndHTile(CommandBuffer cmd, HDCamera camera, Color clearColor) { // index 1 is normals HDUtils.SetRenderTarget(cmd, camera, m_RTs[1], ClearFlag.Color, clearColor); HDUtils.SetRenderTarget(cmd, camera, m_HTile, ClearFlag.Color, CoreUtils.clearColorAllBlack); }
// Init a FrameSettings from renderpipeline settings, frame settings and debug settings (if any) // This will aggregate the various option public static void InitializeFrameSettings(Camera camera, RenderPipelineSettings renderPipelineSettings, FrameSettings srcFrameSettings, ref FrameSettings aggregate) { if (aggregate == null) { aggregate = new FrameSettings(); } // When rendering reflection probe we disable specular as it is view dependent if (camera.cameraType == CameraType.Reflection) { aggregate.diffuseGlobalDimmer = 1.0f; aggregate.specularGlobalDimmer = 0.0f; } else { aggregate.diffuseGlobalDimmer = 1.0f; aggregate.specularGlobalDimmer = 1.0f; } aggregate.enableShadow = srcFrameSettings.enableShadow; aggregate.enableContactShadows = srcFrameSettings.enableContactShadows; aggregate.enableSSR = camera.cameraType != CameraType.Reflection && srcFrameSettings.enableSSR && renderPipelineSettings.supportSSR; aggregate.enableSSAO = srcFrameSettings.enableSSAO && renderPipelineSettings.supportSSAO; aggregate.enableSubsurfaceScattering = camera.cameraType != CameraType.Reflection && srcFrameSettings.enableSubsurfaceScattering && renderPipelineSettings.supportSubsurfaceScattering; aggregate.enableTransmission = srcFrameSettings.enableTransmission; aggregate.enableAtmosphericScattering = srcFrameSettings.enableAtmosphericScattering; // We must take care of the scene view fog flags in the editor if (!CoreUtils.IsSceneViewFogEnabled(camera)) { aggregate.enableAtmosphericScattering = false; } // Volumetric are disabled if there is no atmospheric scattering aggregate.enableVolumetric = srcFrameSettings.enableVolumetric && renderPipelineSettings.supportVolumetric && aggregate.enableAtmosphericScattering; // TODO: Add support of volumetric in planar reflection if (camera.cameraType == CameraType.Reflection) { aggregate.enableVolumetric = false; } // We have to fall back to forward-only rendering when scene view is using wireframe rendering mode // as rendering everything in wireframe + deferred do not play well together aggregate.enableForwardRenderingOnly = srcFrameSettings.enableForwardRenderingOnly || GL.wireframe || renderPipelineSettings.supportForwardOnly; aggregate.enableDepthPrepassWithDeferredRendering = srcFrameSettings.enableDepthPrepassWithDeferredRendering; aggregate.enableTransparentPrepass = srcFrameSettings.enableTransparentPrepass; aggregate.enableMotionVectors = camera.cameraType != CameraType.Reflection && srcFrameSettings.enableMotionVectors && renderPipelineSettings.supportMotionVectors; aggregate.enableObjectMotionVectors = camera.cameraType != CameraType.Reflection && srcFrameSettings.enableObjectMotionVectors && renderPipelineSettings.supportMotionVectors; aggregate.enableDBuffer = srcFrameSettings.enableDBuffer && renderPipelineSettings.supportDBuffer; aggregate.enableRoughRefraction = srcFrameSettings.enableRoughRefraction; aggregate.enableTransparentPostpass = srcFrameSettings.enableTransparentPostpass; aggregate.enableDistortion = camera.cameraType != CameraType.Reflection && srcFrameSettings.enableDistortion; // Planar and real time cubemap doesn't need post process and render in FP16 aggregate.enablePostprocess = camera.cameraType != CameraType.Reflection && srcFrameSettings.enablePostprocess; #if UNITY_SWITCH aggregate.enableStereo = false; #else aggregate.enableStereo = camera.cameraType != CameraType.Reflection && srcFrameSettings.enableStereo && XRSettings.isDeviceActive && (camera.stereoTargetEye == StereoTargetEyeMask.Both) && renderPipelineSettings.supportStereo; #endif aggregate.enableAsyncCompute = srcFrameSettings.enableAsyncCompute && SystemInfo.supportsAsyncCompute; aggregate.enableOpaqueObjects = srcFrameSettings.enableOpaqueObjects; aggregate.enableTransparentObjects = srcFrameSettings.enableTransparentObjects; aggregate.enableMSAA = srcFrameSettings.enableMSAA && renderPipelineSettings.supportMSAA; aggregate.enableShadowMask = srcFrameSettings.enableShadowMask && renderPipelineSettings.supportShadowMask; aggregate.ConfigureMSAADependentSettings(); aggregate.ConfigureStereoDependentSettings(); // Disable various option for the preview except if we are a Camera Editor preview if (HDUtils.IsRegularPreviewCamera(camera)) { aggregate.enableShadow = false; aggregate.enableContactShadows = false; aggregate.enableSSR = false; aggregate.enableSSAO = false; aggregate.enableAtmosphericScattering = false; aggregate.enableVolumetric = false; aggregate.enableTransparentPrepass = false; aggregate.enableMotionVectors = false; aggregate.enableObjectMotionVectors = false; aggregate.enableDBuffer = false; aggregate.enableTransparentPostpass = false; aggregate.enableDistortion = false; aggregate.enablePostprocess = false; aggregate.enableStereo = false; aggregate.enableShadowMask = false; } LightLoopSettings.InitializeLightLoopSettings(camera, aggregate, renderPipelineSettings, srcFrameSettings, ref aggregate.lightLoopSettings); }
void BuildDebugRepresentation() { if (!isDebugViewMaterialInit) { List <RenderPipelineMaterial> materialList = HDUtils.GetRenderPipelineMaterialList(); // TODO: Share this code to retrieve deferred material with HDRenderPipeline // Find first material that have non 0 Gbuffer count and assign it as deferredMaterial Type bsdfDataDeferredType = null; foreach (RenderPipelineMaterial material in materialList) { if (material.GetMaterialGBufferCount() > 0) { bsdfDataDeferredType = material.GetType().GetNestedType("BSDFData"); } } // TODO: Handle the case of no Gbuffer material Debug.Assert(bsdfDataDeferredType != null); List <MaterialItem> materialItems = new List <MaterialItem>(); int numSurfaceDataFields = 0; int numBSDFDataFields = 0; foreach (RenderPipelineMaterial material in materialList) { MaterialItem item = new MaterialItem(); item.className = material.GetType().Name + "/"; item.surfaceDataType = material.GetType().GetNestedType("SurfaceData"); numSurfaceDataFields += item.surfaceDataType.GetFields().Length; item.bsdfDataType = material.GetType().GetNestedType("BSDFData"); numBSDFDataFields += item.bsdfDataType.GetFields().Length; materialItems.Add(item); } // Material properties debug var num = typeof(Builtin.BuiltinData).GetFields().Length *materialList.Count // BuildtinData are duplicated for each material + numSurfaceDataFields + 1; // +1 for None case debugViewMaterialStrings = new GUIContent[num]; debugViewMaterialValues = new int[num]; // Special case for None since it cannot be inferred from SurfaceData/BuiltinData debugViewMaterialStrings[0] = new GUIContent("None"); debugViewMaterialValues[0] = 0; var index = 1; // 0 is a reserved number and should not be used (allow to track error) foreach (MaterialItem item in materialItems) { // BuiltinData are duplicated for each material FillWithProperties(typeof(Builtin.BuiltinData), debugViewMaterialStrings, debugViewMaterialValues, item.className, ref index); FillWithProperties(item.surfaceDataType, debugViewMaterialStrings, debugViewMaterialValues, item.className, ref index); } // Engine properties debug num = numBSDFDataFields + 1; // +1 for None case debugViewEngineStrings = new GUIContent[num]; debugViewEngineValues = new int[num]; // 0 is a reserved number and should not be used (allow to track error) debugViewEngineStrings[0] = new GUIContent("None"); debugViewEngineValues[0] = 0; index = 1; foreach (MaterialItem item in materialItems) { FillWithProperties(item.bsdfDataType, debugViewEngineStrings, debugViewEngineValues, item.className, ref index); } // Attributes debug var varyingNames = Enum.GetNames(typeof(Attributes.DebugViewVarying)); debugViewMaterialVaryingStrings = new GUIContent[varyingNames.Length]; debugViewMaterialVaryingValues = new int[varyingNames.Length]; index = 0; FillWithPropertiesEnum(typeof(Attributes.DebugViewVarying), debugViewMaterialVaryingStrings, debugViewMaterialVaryingValues, "", ref index); // Properties debug var propertiesNames = Enum.GetNames(typeof(Attributes.DebugViewProperties)); debugViewMaterialPropertiesStrings = new GUIContent[propertiesNames.Length]; debugViewMaterialPropertiesValues = new int[propertiesNames.Length]; index = 0; FillWithPropertiesEnum(typeof(Attributes.DebugViewProperties), debugViewMaterialPropertiesStrings, debugViewMaterialPropertiesValues, "", ref index); // Gbuffer debug var gbufferNames = Enum.GetNames(typeof(Attributes.DebugViewGbuffer)); debugViewMaterialGBufferStrings = new GUIContent[gbufferNames.Length + bsdfDataDeferredType.GetFields().Length]; debugViewMaterialGBufferValues = new int[gbufferNames.Length + bsdfDataDeferredType.GetFields().Length]; index = 0; FillWithPropertiesEnum(typeof(Attributes.DebugViewGbuffer), debugViewMaterialGBufferStrings, debugViewMaterialGBufferValues, "", ref index); FillWithProperties(typeof(Lit.BSDFData), debugViewMaterialGBufferStrings, debugViewMaterialGBufferValues, "", ref index); isDebugViewMaterialInit = true; } }
public void UpdateEnvironment(HDCamera hdCamera, Light sunLight, CommandBuffer cmd) { // WORKAROUND for building the player. // When building the player, for some reason we end up in a state where frameCount is not updated but all currently setup shader texture are reset to null // resulting in a rendering error (compute shader property not bound) that makes the player building fails... // So we just check if the texture is bound here so that we can setup a pink one to avoid the error without breaking half the world. if (Shader.GetGlobalTexture(HDShaderIDs._SkyTexture) == null) { cmd.SetGlobalTexture(HDShaderIDs._SkyTexture, CoreUtils.magentaCubeTexture); } bool isRegularPreview = HDUtils.IsRegularPreviewCamera(hdCamera.camera); SkyAmbientMode ambientMode = VolumeManager.instance.stack.GetComponent <VisualEnvironment>().skyAmbientMode.value; // Preview should never use dynamic ambient or they will conflict with main view (async readback of sky texture will update ambient probe for main view one frame later) if (isRegularPreview) { ambientMode = SkyAmbientMode.Static; } m_CurrentSkyRenderingContext.UpdateEnvironment(m_CurrentSky, hdCamera, sunLight, m_UpdateRequired, ambientMode == SkyAmbientMode.Dynamic, cmd); StaticLightingSky staticLightingSky = GetStaticLightingSky(); // We don't want to update the static sky during preview because it contains custom lights that may change the result. // The consequence is that previews will use main scene static lighting but we consider this to be acceptable. if (staticLightingSky != null && !isRegularPreview) { m_StaticLightingSky.skySettings = staticLightingSky.skySettings; m_StaticLightingSkyRenderingContext.UpdateEnvironment(m_StaticLightingSky, hdCamera, sunLight, false, true, cmd); } bool useRealtimeGI = true; #if UNITY_EDITOR useRealtimeGI = UnityEditor.Lightmapping.realtimeGI; #endif // Working around GI current system // When using baked lighting, setting up the ambient probe should be sufficient => We only need to update RenderSettings.ambientProbe with either the static or visual sky ambient probe (computed from GPU) // When using real time GI. Enlighten will pull sky information from Skybox material. So in order for dynamic GI to work, we update the skybox material texture and then set the ambient mode to SkyBox // Problem: We can't check at runtime if realtime GI is enabled so we need to take extra care (see useRealtimeGI usage below) RenderSettings.ambientMode = AmbientMode.Custom; // Needed to specify ourselves the ambient probe (this will update internal ambient probe data passed to shaders) if (ambientMode == SkyAmbientMode.Static) { RenderSettings.ambientProbe = GetStaticLightingAmbientProbe(); m_StandardSkyboxMaterial.SetTexture("_Tex", GetStaticLightingTexture()); } else { RenderSettings.ambientProbe = m_CurrentSkyRenderingContext.ambientProbe; // Workaround in the editor: // When in the editor, if we use baked lighting, we need to setup the skybox material with the static lighting texture otherwise when baking, the dynamic texture will be used if (useRealtimeGI) { m_StandardSkyboxMaterial.SetTexture("_Tex", m_CurrentSky.IsValid() ? (Texture)m_CurrentSkyRenderingContext.cubemapRT : CoreUtils.blackCubeTexture); } else { m_StandardSkyboxMaterial.SetTexture("_Tex", GetStaticLightingTexture()); } } // This is only needed if we use realtime GI otherwise enlighten won't get the right sky information RenderSettings.skybox = m_StandardSkyboxMaterial; // Setup this material as the default to be use in RenderSettings RenderSettings.ambientIntensity = 1.0f; RenderSettings.ambientMode = AmbientMode.Skybox; // Force skybox for our HDRI RenderSettings.reflectionIntensity = 1.0f; RenderSettings.customReflection = null; m_UpdateRequired = false; SetGlobalSkyTexture(cmd); if (IsLightingSkyValid()) { cmd.SetGlobalInt(HDShaderIDs._EnvLightSkyEnabled, 1); } else { cmd.SetGlobalInt(HDShaderIDs._EnvLightSkyEnabled, 0); } }
public void Init() { // Load default renderPipelineResources / Material / Shader string HDRenderPipelinePath = HDUtils.GetHDRenderPipelinePath(); string CorePath = HDUtils.GetCorePath(); defaultDiffuseMaterial = Load <Material>(HDRenderPipelinePath + "RenderPipelineResources/DefaultHDMaterial.mat"); defaultMirrorMaterial = Load <Material>(HDRenderPipelinePath + "RenderPipelineResources/DefaultHDMirrorMaterial.mat"); defaultDecalMaterial = Load <Material>(HDRenderPipelinePath + "RenderPipelineResources/DefaultHDDecalMaterial.mat"); defaultShader = Load <Shader>(HDRenderPipelinePath + "Material/Lit/Lit.shader"); debugFontTexture = Load <Texture2D>(HDRenderPipelinePath + "RenderPipelineResources/DebugFont.tga"); debugDisplayLatlongShader = Load <Shader>(HDRenderPipelinePath + "Debug/DebugDisplayLatlong.Shader"); debugViewMaterialGBufferShader = Load <Shader>(HDRenderPipelinePath + "Debug/DebugViewMaterialGBuffer.Shader"); debugViewTilesShader = Load <Shader>(HDRenderPipelinePath + "Debug/DebugViewTiles.Shader"); debugFullScreenShader = Load <Shader>(HDRenderPipelinePath + "Debug/DebugFullScreen.Shader"); debugColorPickerShader = Load <Shader>(HDRenderPipelinePath + "Debug/DebugColorPicker.Shader"); debugLightVolumeShader = Load <Shader>(HDRenderPipelinePath + "Debug/DebugLightVolume.Shader"); deferredShader = Load <Shader>(HDRenderPipelinePath + "Lighting/Deferred.Shader"); colorPyramidCS = Load <ComputeShader>(HDRenderPipelinePath + "RenderPipelineResources/ColorPyramid.compute"); depthPyramidCS = Load <ComputeShader>(HDRenderPipelinePath + "RenderPipelineResources/DepthPyramid.compute"); copyChannelCS = Load <ComputeShader>(CorePath + "CoreResources/GPUCopy.compute"); texturePaddingCS = Load <ComputeShader>(CorePath + "CoreResources/TexturePadding.compute"); applyDistortionCS = Load <ComputeShader>(HDRenderPipelinePath + "RenderPipelineResources/ApplyDistorsion.compute"); screenSpaceReflectionsCS = Load <ComputeShader>(HDRenderPipelinePath + "RenderPipelineResources/ScreenSpaceReflections.compute"); clearDispatchIndirectShader = Load <ComputeShader>(HDRenderPipelinePath + "Lighting/LightLoop/cleardispatchindirect.compute"); buildDispatchIndirectShader = Load <ComputeShader>(HDRenderPipelinePath + "Lighting/LightLoop/builddispatchindirect.compute"); buildScreenAABBShader = Load <ComputeShader>(HDRenderPipelinePath + "Lighting/LightLoop/scrbound.compute"); buildPerTileLightListShader = Load <ComputeShader>(HDRenderPipelinePath + "Lighting/LightLoop/lightlistbuild.compute"); buildPerBigTileLightListShader = Load <ComputeShader>(HDRenderPipelinePath + "Lighting/LightLoop/lightlistbuild-bigtile.compute"); buildPerVoxelLightListShader = Load <ComputeShader>(HDRenderPipelinePath + "Lighting/LightLoop/lightlistbuild-clustered.compute"); buildMaterialFlagsShader = Load <ComputeShader>(HDRenderPipelinePath + "Lighting/LightLoop/materialflags.compute"); deferredComputeShader = Load <ComputeShader>(HDRenderPipelinePath + "Lighting/LightLoop/Deferred.compute"); screenSpaceShadowComputeShader = Load <ComputeShader>(HDRenderPipelinePath + "Lighting/ScreenSpaceShadow.compute"); volumeVoxelizationCS = Load <ComputeShader>(HDRenderPipelinePath + "Lighting/Volumetrics/VolumeVoxelization.compute"); volumetricLightingCS = Load <ComputeShader>(HDRenderPipelinePath + "Lighting/Volumetrics/VolumetricLighting.compute"); subsurfaceScatteringCS = Load <ComputeShader>(HDRenderPipelinePath + "Material/SubsurfaceScattering/SubsurfaceScattering.compute"); subsurfaceScattering = Load <Shader>(HDRenderPipelinePath + "Material/SubsurfaceScattering/SubsurfaceScattering.shader"); combineLighting = Load <Shader>(HDRenderPipelinePath + "Material/SubsurfaceScattering/CombineLighting.shader"); // General cameraMotionVectors = Load <Shader>(HDRenderPipelinePath + "RenderPipelineResources/CameraMotionVectors.shader"); copyStencilBuffer = Load <Shader>(HDRenderPipelinePath + "RenderPipelineResources/CopyStencilBuffer.shader"); copyDepthBuffer = Load <Shader>(HDRenderPipelinePath + "RenderPipelineResources/CopyDepthBuffer.shader"); blit = Load <Shader>(HDRenderPipelinePath + "RenderPipelineResources/Blit.shader"); // Sky blitCubemap = Load <Shader>(HDRenderPipelinePath + "Sky/BlitCubemap.shader"); buildProbabilityTables = Load <ComputeShader>(HDRenderPipelinePath + "Material/GGXConvolution/BuildProbabilityTables.compute"); computeGgxIblSampleData = Load <ComputeShader>(HDRenderPipelinePath + "Material/GGXConvolution/ComputeGgxIblSampleData.compute"); GGXConvolve = Load <Shader>(HDRenderPipelinePath + "Material/GGXConvolution/GGXConvolve.shader"); opaqueAtmosphericScattering = Load <Shader>(HDRenderPipelinePath + "Lighting/AtmosphericScattering/OpaqueAtmosphericScattering.shader"); hdriSky = Load <Shader>(HDRenderPipelinePath + "Sky/HDRISky/HDRISky.shader"); integrateHdriSky = Load <Shader>(HDRenderPipelinePath + "Sky/HDRISky/IntegrateHDRISky.shader"); proceduralSky = Load <Shader>(HDRenderPipelinePath + "Sky/ProceduralSky/ProceduralSky.shader"); gradientSky = Load <Shader>(HDRenderPipelinePath + "Sky/GradientSky/GradientSky.shader"); // Skybox/Cubemap is a builtin shader, must use Sahder.Find to access it. It is fine because we are in the editor skyboxCubemap = Shader.Find("Skybox/Cubemap"); // Material preIntegratedFGD_GGXDisneyDiffuse = Load <Shader>(HDRenderPipelinePath + "Material/PreIntegratedFGD/PreIntegratedFGD_GGXDisneyDiffuse.shader"); preIntegratedFGD_CharlieFabricLambert = Load <Shader>(HDRenderPipelinePath + "Material/PreIntegratedFGD/PreIntegratedFGD_CharlieFabricLambert.shader"); // Utilities / Core encodeBC6HCS = Load <ComputeShader>(CorePath + "CoreResources/EncodeBC6H.compute"); cubeToPanoShader = Load <Shader>(CorePath + "CoreResources/CubeToPano.shader"); blitCubeTextureFace = Load <Shader>(CorePath + "CoreResources/BlitCubeTextureFace.shader"); // Shadow shadowClearShader = Load <Shader>(CorePath + "Shadow/ShadowClear.shader"); shadowBlurMoments = Load <ComputeShader>(CorePath + "Shadow/ShadowBlurMoments.compute"); debugShadowMapShader = Load <Shader>(CorePath + "Shadow/DebugDisplayShadowMap.shader"); // decal decalNormalBuffer = Load <Shader>(HDRenderPipelinePath + "Material/Decal/DecalNormalBuffer.shader"); }
public Vector4 ComputeUvScaleAndLimit(Vector2Int bufferSize) { // The slice count is fixed for now. return(HDUtils.ComputeUvScaleAndLimit(new Vector2Int(viewportSize.x, viewportSize.y), bufferSize)); }
public void RenderReflectionsT2(HDCamera hdCamera, CommandBuffer cmd, RTHandleSystem.RTHandle outputTexture, ScriptableRenderContext renderContext, int frameCount) { // First thing to check is: Do we have a valid ray-tracing environment? HDRaytracingEnvironment rtEnvironment = m_RaytracingManager.CurrentEnvironment(); HDRenderPipeline renderPipeline = m_RaytracingManager.GetRenderPipeline(); BlueNoise blueNoise = m_RaytracingManager.GetBlueNoiseManager(); ComputeShader reflectionFilter = m_PipelineAsset.renderPipelineRayTracingResources.reflectionBilateralFilterCS; RayTracingShader reflectionShader = m_PipelineAsset.renderPipelineRayTracingResources.reflectionRaytracing; RenderPipelineSettings.RaytracingTier currentTier = m_PipelineAsset.currentPlatformRenderPipelineSettings.supportedRaytracingTier; bool invalidState = rtEnvironment == null || blueNoise == null || reflectionFilter == null || reflectionShader == null || m_PipelineResources.textures.owenScrambledTex == null || m_PipelineResources.textures.scramblingTex == null; // If no acceleration structure available, end it now if (invalidState) { return; } var settings = VolumeManager.instance.stack.GetComponent <ScreenSpaceReflection>(); LightCluster lightClusterSettings = VolumeManager.instance.stack.GetComponent <LightCluster>(); // Grab the acceleration structures and the light cluster to use RayTracingAccelerationStructure accelerationStructure = m_RaytracingManager.RequestAccelerationStructure(rtEnvironment.reflLayerMask); HDRaytracingLightCluster lightCluster = m_RaytracingManager.RequestLightCluster(rtEnvironment.reflLayerMask); // Compute the actual resolution that is needed base on the quality string targetRayGen = m_RayGenIntegrationName; // Define the shader pass to use for the reflection pass cmd.SetRayTracingShaderPass(reflectionShader, "IndirectDXR"); // Set the acceleration structure for the pass cmd.SetRayTracingAccelerationStructure(reflectionShader, HDShaderIDs._RaytracingAccelerationStructureName, accelerationStructure); // Inject the ray-tracing sampling data cmd.SetRayTracingTextureParam(reflectionShader, HDShaderIDs._OwenScrambledTexture, m_PipelineResources.textures.owenScrambledTex); cmd.SetRayTracingTextureParam(reflectionShader, HDShaderIDs._ScramblingTexture, m_PipelineResources.textures.scramblingTex); // Global reflection parameters cmd.SetRayTracingFloatParams(reflectionShader, HDShaderIDs._RaytracingIntensityClamp, settings.clampValue.value); cmd.SetRayTracingFloatParams(reflectionShader, HDShaderIDs._RaytracingReflectionMinSmoothness, settings.minSmoothness.value); cmd.SetRayTracingFloatParams(reflectionShader, HDShaderIDs._RaytracingReflectSky, settings.reflectSky.value ? 1 : 0); // Inject the ray generation data cmd.SetGlobalFloat(HDShaderIDs._RaytracingRayBias, rtEnvironment.rayBias); cmd.SetGlobalFloat(HDShaderIDs._RaytracingRayMaxLength, settings.rayLength.value); cmd.SetRayTracingIntParams(reflectionShader, HDShaderIDs._RaytracingNumSamples, settings.numSamples.value); int frameIndex = hdCamera.IsTAAEnabled() ? hdCamera.taaFrameIndex : (int)frameCount % 8; cmd.SetGlobalInt(HDShaderIDs._RaytracingFrameIndex, frameIndex); // Set the data for the ray generation cmd.SetRayTracingTextureParam(reflectionShader, HDShaderIDs._SsrLightingTextureRW, m_LightingTexture); cmd.SetRayTracingTextureParam(reflectionShader, HDShaderIDs._SsrHitPointTexture, m_HitPdfTexture); cmd.SetRayTracingTextureParam(reflectionShader, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthStencilBuffer()); cmd.SetRayTracingTextureParam(reflectionShader, HDShaderIDs._NormalBufferTexture, m_SharedRTManager.GetNormalBuffer()); // Set ray count tex cmd.SetRayTracingIntParam(reflectionShader, HDShaderIDs._RayCountEnabled, m_RaytracingManager.rayCountManager.RayCountIsEnabled()); cmd.SetRayTracingTextureParam(reflectionShader, HDShaderIDs._RayCountTexture, m_RaytracingManager.rayCountManager.rayCountTexture); // Compute the pixel spread value float pixelSpreadAngle = Mathf.Atan(2.0f * Mathf.Tan(hdCamera.camera.fieldOfView * Mathf.PI / 360.0f) / Mathf.Min(hdCamera.actualWidth, hdCamera.actualHeight)); cmd.SetRayTracingFloatParam(reflectionShader, HDShaderIDs._RaytracingPixelSpreadAngle, pixelSpreadAngle); // LightLoop data cmd.SetGlobalBuffer(HDShaderIDs._RaytracingLightCluster, lightCluster.GetCluster()); cmd.SetGlobalBuffer(HDShaderIDs._LightDatasRT, lightCluster.GetLightDatas()); cmd.SetGlobalVector(HDShaderIDs._MinClusterPos, lightCluster.GetMinClusterPos()); cmd.SetGlobalVector(HDShaderIDs._MaxClusterPos, lightCluster.GetMaxClusterPos()); cmd.SetGlobalInt(HDShaderIDs._LightPerCellCount, lightClusterSettings.maxNumLightsPercell.value); cmd.SetGlobalInt(HDShaderIDs._PunctualLightCountRT, lightCluster.GetPunctualLightCount()); cmd.SetGlobalInt(HDShaderIDs._AreaLightCountRT, lightCluster.GetAreaLightCount()); // Note: Just in case, we rebind the directional light data (in case they were not) cmd.SetGlobalBuffer(HDShaderIDs._DirectionalLightDatas, renderPipeline.m_LightLoopLightData.directionalLightData); cmd.SetGlobalInt(HDShaderIDs._DirectionalLightCount, renderPipeline.m_lightList.directionalLights.Count); // Evaluate the clear coat mask texture based on the lit shader mode RenderTargetIdentifier clearCoatMaskTexture = hdCamera.frameSettings.litShaderMode == LitShaderMode.Deferred ? m_GbufferManager.GetBuffersRTI()[2] : TextureXR.GetBlackTexture(); cmd.SetRayTracingTextureParam(reflectionShader, HDShaderIDs._SsrClearCoatMaskTexture, clearCoatMaskTexture); // Set the data for the ray miss cmd.SetRayTracingTextureParam(reflectionShader, HDShaderIDs._SkyTexture, m_SkyManager.skyReflection); // Compute the actual resolution that is needed base on the quality uint widthResolution = (uint)hdCamera.actualWidth; uint heightResolution = (uint)hdCamera.actualHeight; // Force to disable specular lighting cmd.SetGlobalInt(HDShaderIDs._EnableSpecularLighting, 0); // Run the computation cmd.DispatchRays(reflectionShader, targetRayGen, widthResolution, heightResolution, 1); // Restore the previous state of specular lighting cmd.SetGlobalInt(HDShaderIDs._EnableSpecularLighting, hdCamera.frameSettings.IsEnabled(FrameSettingsField.SpecularLighting) ? 1 : 0); using (new ProfilingSample(cmd, "Filter Reflection", CustomSamplerId.RaytracingFilterReflection.GetSampler())) { if (settings.enableFilter.value) { // Grab the history buffer RTHandleSystem.RTHandle reflectionHistory = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.RaytracedReflection) ?? hdCamera.AllocHistoryFrameRT((int)HDCameraFrameHistoryType.RaytracedReflection, ReflectionHistoryBufferAllocatorFunction, 1); // Texture dimensions int texWidth = hdCamera.actualWidth; int texHeight = hdCamera.actualHeight; // Evaluate the dispatch parameters int areaTileSize = 8; int numTilesX = (texWidth + (areaTileSize - 1)) / areaTileSize; int numTilesY = (texHeight + (areaTileSize - 1)) / areaTileSize; int m_KernelFilter = reflectionFilter.FindKernel("RaytracingReflectionTAA"); // Compute the combined TAA frame var historyScale = new Vector2(hdCamera.actualWidth / (float)reflectionHistory.rt.width, hdCamera.actualHeight / (float)reflectionHistory.rt.height); cmd.SetComputeVectorParam(reflectionFilter, HDShaderIDs._RTHandleScaleHistory, historyScale); cmd.SetComputeTextureParam(reflectionFilter, m_KernelFilter, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthStencilBuffer()); cmd.SetComputeTextureParam(reflectionFilter, m_KernelFilter, HDShaderIDs._DenoiseInputTexture, m_LightingTexture); cmd.SetComputeTextureParam(reflectionFilter, m_KernelFilter, HDShaderIDs._DenoiseOutputTextureRW, m_HitPdfTexture); cmd.SetComputeTextureParam(reflectionFilter, m_KernelFilter, HDShaderIDs._ReflectionHistorybufferRW, reflectionHistory); cmd.DispatchCompute(reflectionFilter, m_KernelFilter, numTilesX, numTilesY, 1); // Output the new history HDUtils.BlitCameraTexture(cmd, m_HitPdfTexture, reflectionHistory); m_KernelFilter = reflectionFilter.FindKernel("ReflBilateralFilterH"); // Horizontal pass of the bilateral filter cmd.SetComputeIntParam(reflectionFilter, HDShaderIDs._RaytracingDenoiseRadius, settings.filterRadius.value); cmd.SetComputeTextureParam(reflectionFilter, m_KernelFilter, HDShaderIDs._DenoiseInputTexture, reflectionHistory); cmd.SetComputeTextureParam(reflectionFilter, m_KernelFilter, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthStencilBuffer()); cmd.SetComputeTextureParam(reflectionFilter, m_KernelFilter, HDShaderIDs._NormalBufferTexture, m_SharedRTManager.GetNormalBuffer()); cmd.SetComputeTextureParam(reflectionFilter, m_KernelFilter, HDShaderIDs._DenoiseOutputTextureRW, m_HitPdfTexture); cmd.DispatchCompute(reflectionFilter, m_KernelFilter, numTilesX, numTilesY, 1); m_KernelFilter = reflectionFilter.FindKernel("ReflBilateralFilterV"); // Horizontal pass of the bilateral filter cmd.SetComputeIntParam(reflectionFilter, HDShaderIDs._RaytracingDenoiseRadius, settings.filterRadius.value); cmd.SetComputeTextureParam(reflectionFilter, m_KernelFilter, HDShaderIDs._DenoiseInputTexture, m_HitPdfTexture); cmd.SetComputeTextureParam(reflectionFilter, m_KernelFilter, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthStencilBuffer()); cmd.SetComputeTextureParam(reflectionFilter, m_KernelFilter, HDShaderIDs._NormalBufferTexture, m_SharedRTManager.GetNormalBuffer()); cmd.SetComputeTextureParam(reflectionFilter, m_KernelFilter, HDShaderIDs._DenoiseOutputTextureRW, outputTexture); cmd.DispatchCompute(reflectionFilter, m_KernelFilter, numTilesX, numTilesY, 1); } else { HDUtils.BlitCameraTexture(cmd, m_LightingTexture, outputTexture); } } }
public void VolumetricLightingPass(HDCamera hdCamera, CommandBuffer cmd, uint frameIndex) { if (!hdCamera.frameSettings.IsEnabled(FrameSettingsField.Volumetrics)) { return; } var visualEnvironment = VolumeManager.instance.stack.GetComponent <VisualEnvironment>(); if (visualEnvironment.fogType.value != FogType.Volumetric) { return; } using (new ProfilingSample(cmd, "Volumetric Lighting")) { // Get the interpolated anisotropy value. var fog = VolumeManager.instance.stack.GetComponent <VolumetricFog>(); // Only available in the Play Mode because all the frame counters in the Edit Mode are broken. bool tiledLighting = hdCamera.frameSettings.IsEnabled(FrameSettingsField.BigTilePrepass); bool enableReprojection = Application.isPlaying && hdCamera.camera.cameraType == CameraType.Game && hdCamera.frameSettings.IsEnabled(FrameSettingsField.ReprojectionForVolumetrics); bool enableAnisotropy = fog.anisotropy != 0; bool highQuality = preset == VolumetricLightingPreset.High; int kernel = (tiledLighting ? 1 : 0) | (enableReprojection ? 2 : 0) | (enableAnisotropy ? 4 : 0) | (highQuality ? 8 : 0); var currFrameParams = hdCamera.vBufferParams[0]; var cvp = currFrameParams.viewportSize; Vector4 resolution = new Vector4(cvp.x, cvp.y, 1.0f / cvp.x, 1.0f / cvp.y); #if UNITY_2019_1_OR_NEWER var vFoV = hdCamera.camera.GetGateFittedFieldOfView() * Mathf.Deg2Rad; var lensShift = hdCamera.camera.GetGateFittedLensShift(); #else var vFoV = hdCamera.camera.fieldOfView * Mathf.Deg2Rad; var lensShift = Vector2.zero; #endif // Compose the matrix which allows us to compute the world space view direction. Matrix4x4 transform = HDUtils.ComputePixelCoordToWorldSpaceViewDirectionMatrix(vFoV, lensShift, resolution, hdCamera.viewMatrix, false); // Compute texel spacing at the depth of 1 meter. float unitDepthTexelSpacing = HDUtils.ComputZPlaneTexelSpacing(1.0f, vFoV, resolution.y); GetHexagonalClosePackedSpheres7(m_xySeq); int sampleIndex = (int)frameIndex % 7; // TODO: should we somehow reorder offsets in Z based on the offset in XY? S.t. the samples more evenly cover the domain. // Currently, we assume that they are completely uncorrelated, but maybe we should correlate them somehow. m_xySeqOffset.Set(m_xySeq[sampleIndex].x, m_xySeq[sampleIndex].y, m_zSeq[sampleIndex], frameIndex); // TODO: set 'm_VolumetricLightingPreset'. // TODO: set the constant buffer data only once. cmd.SetComputeMatrixParam(m_VolumetricLightingCS, HDShaderIDs._VBufferCoordToViewDirWS, transform); cmd.SetComputeFloatParam(m_VolumetricLightingCS, HDShaderIDs._VBufferUnitDepthTexelSpacing, unitDepthTexelSpacing); cmd.SetComputeFloatParam(m_VolumetricLightingCS, HDShaderIDs._CornetteShanksConstant, CornetteShanksPhasePartConstant(fog.anisotropy)); cmd.SetComputeVectorParam(m_VolumetricLightingCS, HDShaderIDs._VBufferSampleOffset, m_xySeqOffset); cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferDensity, m_DensityBufferHandle); // Read cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingIntegral, m_LightingBufferHandle); // Write if (enableReprojection) { var historyRT = hdCamera.GetPreviousFrameRT((int)HDCameraFrameHistoryType.VolumetricLighting); var feedbackRT = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.VolumetricLighting); cmd.SetComputeIntParam(m_VolumetricLightingCS, HDShaderIDs._VBufferLightingHistoryIsValid, hdCamera.volumetricHistoryIsValid ? 1 : 0); cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingHistory, historyRT); // Read cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingFeedback, feedbackRT); // Write hdCamera.volumetricHistoryIsValid = true; // For the next frame... } int w = (int)resolution.x; int h = (int)resolution.y; // The shader defines GROUP_SIZE_1D = 8. cmd.DispatchCompute(m_VolumetricLightingCS, kernel, (w + 7) / 8, (h + 7) / 8, 1); } }
public override Shader GetAutodeskInteractiveMaskedShader() { return(UnityEditor.AssetDatabase.LoadAssetAtPath <Shader>(HDUtils.GetHDRenderPipelinePath() + "Runtime/RenderPipelineResources/ShaderGraph/AutodeskInteractiveMasked.ShaderGraph")); }
public void VolumetricLightingPass(HDCamera camera, CommandBuffer cmd, FrameSettings settings, uint frameIndex) { if (preset == VolumetricLightingPreset.Off) { return; } var visualEnvironment = VolumeManager.instance.stack.GetComponent <VisualEnvironment>(); if (visualEnvironment.fogType != FogType.Volumetric) { return; } VBuffer vBuffer = FindVBuffer(camera.GetViewID()); if (vBuffer == null) { return; } using (new ProfilingSample(cmd, "Volumetric Lighting")) { // Only available in the Play Mode because all the frame counters in the Edit Mode are broken. bool enableClustered = settings.lightLoopSettings.enableTileAndCluster; bool enableReprojection = Application.isPlaying && camera.camera.cameraType == CameraType.Game; int kernel; if (enableReprojection) { kernel = m_VolumetricLightingCS.FindKernel(enableClustered ? "VolumetricLightingClusteredReproj" : "VolumetricLightingBruteforceReproj"); } else { kernel = m_VolumetricLightingCS.FindKernel(enableClustered ? "VolumetricLightingClustered" : "VolumetricLightingBruteforce"); } var frameParams = vBuffer.GetParameters(frameIndex); Vector4 resolution = frameParams.resolution; float vFoV = camera.camera.fieldOfView * Mathf.Deg2Rad; // Compose the matrix which allows us to compute the world space view direction. Matrix4x4 transform = HDUtils.ComputePixelCoordToWorldSpaceViewDirectionMatrix(vFoV, resolution, camera.viewMatrix, false); Vector2[] xySeq = GetHexagonalClosePackedSpheres7(); // This is a sequence of 7 equidistant numbers from 1/14 to 13/14. // Each of them is the centroid of the interval of length 2/14. // They've been rearranged in a sequence of pairs {small, large}, s.t. (small + large) = 1. // That way, the running average position is close to 0.5. // | 6 | 2 | 4 | 1 | 5 | 3 | 7 | // | | | | o | | | | // | | o | | x | | | | // | | x | | x | | o | | // | | x | o | x | | x | | // | | x | x | x | o | x | | // | o | x | x | x | x | x | | // | x | x | x | x | x | x | o | // | x | x | x | x | x | x | x | float[] zSeq = { 7.0f / 14.0f, 3.0f / 14.0f, 11.0f / 14.0f, 5.0f / 14.0f, 9.0f / 14.0f, 1.0f / 14.0f, 13.0f / 14.0f }; int sampleIndex = (int)frameIndex % 7; // TODO: should we somehow reorder offsets in Z based on the offset in XY? S.t. the samples more evenly cover the domain. // Currently, we assume that they are completely uncorrelated, but maybe we should correlate them somehow. Vector4 offset = new Vector4(xySeq[sampleIndex].x, xySeq[sampleIndex].y, zSeq[sampleIndex], frameIndex); // Get the interpolated asymmetry value. var fog = VolumeManager.instance.stack.GetComponent <VolumetricFog>(); // TODO: set 'm_VolumetricLightingPreset'. // TODO: set the constant buffer data only once. cmd.SetComputeMatrixParam(m_VolumetricLightingCS, HDShaderIDs._VBufferCoordToViewDirWS, transform); cmd.SetComputeVectorParam(m_VolumetricLightingCS, HDShaderIDs._VBufferSampleOffset, offset); cmd.SetComputeFloatParam(m_VolumetricLightingCS, HDShaderIDs._CornetteShanksConstant, CornetteShanksPhasePartConstant(fog.asymmetry)); cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferDensity, vBuffer.GetDensityBuffer()); // Read cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingIntegral, vBuffer.GetLightingIntegralBuffer()); // Write if (enableReprojection) { cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingFeedback, vBuffer.GetLightingFeedbackBuffer(frameIndex)); // Write cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingHistory, vBuffer.GetLightingHistoryBuffer(frameIndex)); // Read } int w = (int)resolution.x; int h = (int)resolution.y; // The shader defines GROUP_SIZE_1D = 8. cmd.DispatchCompute(m_VolumetricLightingCS, kernel, (w + 7) / 8, (h + 7) / 8, 1); } }
public void VolumeVoxelizationPass(HDCamera hdCamera, CommandBuffer cmd, uint frameIndex, DensityVolumeList densityVolumes) { if (!hdCamera.frameSettings.enableVolumetrics) { return; } var visualEnvironment = VolumeManager.instance.stack.GetComponent <VisualEnvironment>(); if (visualEnvironment.fogType.value != FogType.Volumetric) { return; } using (new ProfilingSample(cmd, "Volume Voxelization")) { int numVisibleVolumes = m_VisibleVolumeBounds.Count; bool highQuality = preset == VolumetricLightingPreset.High; bool enableClustered = hdCamera.frameSettings.lightLoopSettings.enableTileAndCluster; int kernel; if (highQuality) { kernel = m_VolumeVoxelizationCS.FindKernel(enableClustered ? "VolumeVoxelizationClusteredHQ" : "VolumeVoxelizationBruteforceHQ"); } else { kernel = m_VolumeVoxelizationCS.FindKernel(enableClustered ? "VolumeVoxelizationClusteredMQ" : "VolumeVoxelizationBruteforceMQ"); } var frameParams = hdCamera.vBufferParams[0]; Vector4 resolution = frameParams.resolution; float vFoV = hdCamera.camera.fieldOfView * Mathf.Deg2Rad; // Compose the matrix which allows us to compute the world space view direction. Matrix4x4 transform = HDUtils.ComputePixelCoordToWorldSpaceViewDirectionMatrix(vFoV, resolution, hdCamera.viewMatrix, false); Texture3D volumeAtlas = DensityVolumeManager.manager.volumeAtlas.volumeAtlas; Vector4 volumeAtlasDimensions = new Vector4(0.0f, 0.0f, 0.0f, 0.0f); if (volumeAtlas != null) { volumeAtlasDimensions.x = (float)volumeAtlas.width / volumeAtlas.depth; // 1 / number of textures volumeAtlasDimensions.y = volumeAtlas.width; volumeAtlasDimensions.z = volumeAtlas.depth; volumeAtlasDimensions.w = Mathf.Log(volumeAtlas.width, 2); // Max LoD } else { volumeAtlas = CoreUtils.blackVolumeTexture; } cmd.SetComputeTextureParam(m_VolumeVoxelizationCS, kernel, HDShaderIDs._VBufferDensity, m_DensityBufferHandle); cmd.SetComputeBufferParam(m_VolumeVoxelizationCS, kernel, HDShaderIDs._VolumeBounds, s_VisibleVolumeBoundsBuffer); cmd.SetComputeBufferParam(m_VolumeVoxelizationCS, kernel, HDShaderIDs._VolumeData, s_VisibleVolumeDataBuffer); cmd.SetComputeTextureParam(m_VolumeVoxelizationCS, kernel, HDShaderIDs._VolumeMaskAtlas, volumeAtlas); // TODO: set the constant buffer data only once. cmd.SetComputeMatrixParam(m_VolumeVoxelizationCS, HDShaderIDs._VBufferCoordToViewDirWS, transform); cmd.SetComputeIntParam(m_VolumeVoxelizationCS, HDShaderIDs._NumVisibleDensityVolumes, numVisibleVolumes); cmd.SetComputeVectorParam(m_VolumeVoxelizationCS, HDShaderIDs._VolumeMaskDimensions, volumeAtlasDimensions); int w = (int)resolution.x; int h = (int)resolution.y; // The shader defines GROUP_SIZE_1D = 8. cmd.DispatchCompute(m_VolumeVoxelizationCS, kernel, (w + 7) / 8, (h + 7) / 8, 1); } }
public void Init() { // Load default renderPipelineResources / Material / Shader string HDRenderPipelinePath = HDUtils.GetHDRenderPipelinePath() + "Runtime/"; string CorePath = HDUtils.GetHDRenderPipelinePath() + "Runtime/Core/"; // HDUtils.GetCorePath(); // All CoreRP have been move to HDRP currently for out of preview of SRP and LW // Shaders shaders = new ShaderResources { // Defaults defaultPS = Load <Shader>(HDRenderPipelinePath + "Material/Lit/Lit.shader"), // Debug debugDisplayLatlongPS = Load <Shader>(HDRenderPipelinePath + "Debug/DebugDisplayLatlong.Shader"), debugViewMaterialGBufferPS = Load <Shader>(HDRenderPipelinePath + "Debug/DebugViewMaterialGBuffer.Shader"), debugViewTilesPS = Load <Shader>(HDRenderPipelinePath + "Debug/DebugViewTiles.Shader"), debugFullScreenPS = Load <Shader>(HDRenderPipelinePath + "Debug/DebugFullScreen.Shader"), debugColorPickerPS = Load <Shader>(HDRenderPipelinePath + "Debug/DebugColorPicker.Shader"), debugLightVolumePS = Load <Shader>(HDRenderPipelinePath + "Debug/DebugLightVolumes.Shader"), debugLightVolumeCS = Load <ComputeShader>(HDRenderPipelinePath + "Debug/DebugLightVolumes.compute"), // Lighting deferredPS = Load <Shader>(HDRenderPipelinePath + "Lighting/Deferred.Shader"), colorPyramidCS = Load <ComputeShader>(HDRenderPipelinePath + "RenderPipeline/RenderPass/ColorPyramid.compute"), depthPyramidCS = Load <ComputeShader>(HDRenderPipelinePath + "RenderPipeline/RenderPass/DepthPyramid.compute"), copyChannelCS = Load <ComputeShader>(CorePath + "CoreResources/GPUCopy.compute"), applyDistortionCS = Load <ComputeShader>(HDRenderPipelinePath + "RenderPipeline/RenderPass/Distortion/ApplyDistorsion.compute"), screenSpaceReflectionsCS = Load <ComputeShader>(HDRenderPipelinePath + "Lighting/ScreenSpaceLighting/ScreenSpaceReflections.compute"), // Lighting tile pass clearDispatchIndirectCS = Load <ComputeShader>(HDRenderPipelinePath + "Lighting/LightLoop/cleardispatchindirect.compute"), buildDispatchIndirectCS = Load <ComputeShader>(HDRenderPipelinePath + "Lighting/LightLoop/builddispatchindirect.compute"), buildScreenAABBCS = Load <ComputeShader>(HDRenderPipelinePath + "Lighting/LightLoop/scrbound.compute"), buildPerTileLightListCS = Load <ComputeShader>(HDRenderPipelinePath + "Lighting/LightLoop/lightlistbuild.compute"), buildPerBigTileLightListCS = Load <ComputeShader>(HDRenderPipelinePath + "Lighting/LightLoop/lightlistbuild-bigtile.compute"), buildPerVoxelLightListCS = Load <ComputeShader>(HDRenderPipelinePath + "Lighting/LightLoop/lightlistbuild-clustered.compute"), buildMaterialFlagsCS = Load <ComputeShader>(HDRenderPipelinePath + "Lighting/LightLoop/materialflags.compute"), deferredCS = Load <ComputeShader>(HDRenderPipelinePath + "Lighting/LightLoop/Deferred.compute"), screenSpaceShadowCS = Load <ComputeShader>(HDRenderPipelinePath + "Lighting/Shadow/ScreenSpaceShadow.compute"), volumeVoxelizationCS = Load <ComputeShader>(HDRenderPipelinePath + "Lighting/VolumetricLighting/VolumeVoxelization.compute"), volumetricLightingCS = Load <ComputeShader>(HDRenderPipelinePath + "Lighting/VolumetricLighting/VolumetricLighting.compute"), subsurfaceScatteringCS = Load <ComputeShader>(HDRenderPipelinePath + "Material/SubsurfaceScattering/SubsurfaceScattering.compute"), combineLightingPS = Load <Shader>(HDRenderPipelinePath + "Material/SubsurfaceScattering/CombineLighting.shader"), // General cameraMotionVectorsPS = Load <Shader>(HDRenderPipelinePath + "RenderPipeline/RenderPass/MotionVectors/CameraMotionVectors.shader"), copyStencilBufferPS = Load <Shader>(HDRenderPipelinePath + "ShaderLibrary/CopyStencilBuffer.shader"), copyDepthBufferPS = Load <Shader>(HDRenderPipelinePath + "ShaderLibrary/CopyDepthBuffer.shader"), blitPS = Load <Shader>(HDRenderPipelinePath + "ShaderLibrary/Blit.shader"), // Sky blitCubemapPS = Load <Shader>(HDRenderPipelinePath + "Sky/BlitCubemap.shader"), buildProbabilityTablesCS = Load <ComputeShader>(HDRenderPipelinePath + "Material/GGXConvolution/BuildProbabilityTables.compute"), computeGgxIblSampleDataCS = Load <ComputeShader>(HDRenderPipelinePath + "Material/GGXConvolution/ComputeGgxIblSampleData.compute"), GGXConvolvePS = Load <Shader>(HDRenderPipelinePath + "Material/GGXConvolution/GGXConvolve.shader"), opaqueAtmosphericScatteringPS = Load <Shader>(HDRenderPipelinePath + "Lighting/AtmosphericScattering/OpaqueAtmosphericScattering.shader"), hdriSkyPS = Load <Shader>(HDRenderPipelinePath + "Sky/HDRISky/HDRISky.shader"), integrateHdriSkyPS = Load <Shader>(HDRenderPipelinePath + "Sky/HDRISky/IntegrateHDRISky.shader"), proceduralSkyPS = Load <Shader>(HDRenderPipelinePath + "Sky/ProceduralSky/ProceduralSky.shader"), gradientSkyPS = Load <Shader>(HDRenderPipelinePath + "Sky/GradientSky/GradientSky.shader"), // Skybox/Cubemap is a builtin shader, must use Shader.Find to access it. It is fine because we are in the editor skyboxCubemapPS = Shader.Find("Skybox/Cubemap"), // Material preIntegratedFGD_GGXDisneyDiffusePS = Load <Shader>(HDRenderPipelinePath + "Material/PreIntegratedFGD/PreIntegratedFGD_GGXDisneyDiffuse.shader"), preIntegratedFGD_CharlieFabricLambertPS = Load <Shader>(HDRenderPipelinePath + "Material/PreIntegratedFGD/PreIntegratedFGD_CharlieFabricLambert.shader"), preIntegratedFGD_CookTorrancePS = Load <Shader>(HDRenderPipelinePath + "Material/AxF/PreIntegratedFGD_CookTorrance.shader"), preIntegratedFGD_WardPS = Load <Shader>(HDRenderPipelinePath + "Material/AxF/PreIntegratedFGD_Ward.shader"), // Utilities / Core encodeBC6HCS = Load <ComputeShader>(CorePath + "CoreResources/EncodeBC6H.compute"), cubeToPanoPS = Load <Shader>(CorePath + "CoreResources/CubeToPano.shader"), blitCubeTextureFacePS = Load <Shader>(CorePath + "CoreResources/BlitCubeTextureFace.shader"), // Shadow shadowClearPS = Load <Shader>(HDRenderPipelinePath + "Lighting/Shadow/ShadowClear.shader"), shadowBlurMomentsCS = Load <ComputeShader>(HDRenderPipelinePath + "Lighting/Shadow/ShadowBlurMoments.compute"), debugShadowMapPS = Load <Shader>(HDRenderPipelinePath + "Lighting/Shadow/DebugDisplayShadowMap.shader"), debugHDShadowMapPS = Load <Shader>(HDRenderPipelinePath + "Lighting/Shadow/DebugDisplayHDShadowMap.shader"), // Decal decalNormalBufferPS = Load <Shader>(HDRenderPipelinePath + "Material/Decal/DecalNormalBuffer.shader"), // MSAA depthValuesPS = Load <Shader>(HDRenderPipelinePath + "RenderPipeline/RenderPass/MSAA/DepthValues.shader"), aoResolvePS = Load <Shader>(HDRenderPipelinePath + "RenderPipeline/RenderPass/MSAA/AOResolve.shader"), colorResolvePS = Load <Shader>(HDRenderPipelinePath + "RenderPipeline/RenderPass/MSAA/ColorResolve.shader"), }; // Materials materials = new MaterialResources { // Defaults defaultDiffuseMat = Load <Material>(HDRenderPipelinePath + "RenderPipelineResources/Material/DefaultHDMaterial.mat"), defaultMirrorMat = Load <Material>(HDRenderPipelinePath + "RenderPipelineResources/Material/DefaultHDMirrorMaterial.mat"), defaultDecalMat = Load <Material>(HDRenderPipelinePath + "RenderPipelineResources/Material/DefaultHDDecalMaterial.mat"), defaultTerrainMat = Load <Material>(HDRenderPipelinePath + "RenderPipelineResources/Material/DefaultHDTerrainMaterial.mat"), }; // Textures textures = new TextureResources { // Debug debugFontTex = Load <Texture2D>(HDRenderPipelinePath + "RenderPipelineResources/Texture/DebugFont.tga"), colorGradient = Load <Texture2D>(HDRenderPipelinePath + "Debug/ColorGradient.png"), }; // ShaderGraphs shaderGraphs = new ShaderGraphResources { }; }
public void VolumetricLightingPass(HDCamera hdCamera, CommandBuffer cmd, uint frameIndex) { if (!hdCamera.frameSettings.enableVolumetrics) { return; } var visualEnvironment = VolumeManager.instance.stack.GetComponent <VisualEnvironment>(); if (visualEnvironment.fogType.value != FogType.Volumetric) { return; } using (new ProfilingSample(cmd, "Volumetric Lighting")) { // Only available in the Play Mode because all the frame counters in the Edit Mode are broken. bool highQuality = preset == VolumetricLightingPreset.High; bool enableClustered = hdCamera.frameSettings.lightLoopSettings.enableTileAndCluster; bool enableReprojection = Application.isPlaying && hdCamera.camera.cameraType == CameraType.Game; int kernel; if (highQuality) { if (enableReprojection) { kernel = m_VolumetricLightingCS.FindKernel(enableClustered ? "VolumetricLightingClusteredReprojHQ" : "VolumetricLightingBruteforceReprojHQ"); } else { kernel = m_VolumetricLightingCS.FindKernel(enableClustered ? "VolumetricLightingClusteredHQ" : "VolumetricLightingBruteforceHQ"); } } else { if (enableReprojection) { kernel = m_VolumetricLightingCS.FindKernel(enableClustered ? "VolumetricLightingClusteredReprojMQ" : "VolumetricLightingBruteforceReprojMQ"); } else { kernel = m_VolumetricLightingCS.FindKernel(enableClustered ? "VolumetricLightingClusteredMQ" : "VolumetricLightingBruteforceMQ"); } } var frameParams = hdCamera.vBufferParams[0]; Vector4 resolution = frameParams.resolution; float vFoV = hdCamera.camera.fieldOfView * Mathf.Deg2Rad; // Compose the matrix which allows us to compute the world space view direction. Matrix4x4 transform = HDUtils.ComputePixelCoordToWorldSpaceViewDirectionMatrix(vFoV, resolution, hdCamera.viewMatrix, false); Vector2[] xySeq = GetHexagonalClosePackedSpheres7(); // This is a sequence of 7 equidistant numbers from 1/14 to 13/14. // Each of them is the centroid of the interval of length 2/14. // They've been rearranged in a sequence of pairs {small, large}, s.t. (small + large) = 1. // That way, the running average position is close to 0.5. // | 6 | 2 | 4 | 1 | 5 | 3 | 7 | // | | | | o | | | | // | | o | | x | | | | // | | x | | x | | o | | // | | x | o | x | | x | | // | | x | x | x | o | x | | // | o | x | x | x | x | x | | // | x | x | x | x | x | x | o | // | x | x | x | x | x | x | x | float[] zSeq = { 7.0f / 14.0f, 3.0f / 14.0f, 11.0f / 14.0f, 5.0f / 14.0f, 9.0f / 14.0f, 1.0f / 14.0f, 13.0f / 14.0f }; int sampleIndex = (int)frameIndex % 7; // TODO: should we somehow reorder offsets in Z based on the offset in XY? S.t. the samples more evenly cover the domain. // Currently, we assume that they are completely uncorrelated, but maybe we should correlate them somehow. Vector4 offset = new Vector4(xySeq[sampleIndex].x, xySeq[sampleIndex].y, zSeq[sampleIndex], frameIndex); // Get the interpolated anisotropy value. var fog = VolumeManager.instance.stack.GetComponent <VolumetricFog>(); // TODO: set 'm_VolumetricLightingPreset'. // TODO: set the constant buffer data only once. cmd.SetComputeMatrixParam(m_VolumetricLightingCS, HDShaderIDs._VBufferCoordToViewDirWS, transform); cmd.SetComputeVectorParam(m_VolumetricLightingCS, HDShaderIDs._VBufferSampleOffset, offset); cmd.SetComputeFloatParam(m_VolumetricLightingCS, HDShaderIDs._CornetteShanksConstant, CornetteShanksPhasePartConstant(fog.anisotropy)); cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferDensity, m_DensityBufferHandle); // Read cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingIntegral, m_LightingBufferHandle); // Write if (enableReprojection) { var historyRT = hdCamera.GetPreviousFrameRT((int)HDCameraFrameHistoryType.VolumetricLighting); var feedbackRT = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.VolumetricLighting); // Detect if the history buffer has been recreated or resized. Vector3Int currentResolutionOfHistoryBuffer = new Vector3Int(); currentResolutionOfHistoryBuffer.x = historyRT.rt.width; currentResolutionOfHistoryBuffer.y = historyRT.rt.height; currentResolutionOfHistoryBuffer.z = historyRT.rt.volumeDepth; // We allow downsizing, as this does not cause a reallocation. bool validHistory = (currentResolutionOfHistoryBuffer.x <= m_PreviousResolutionOfHistoryBuffer.x && currentResolutionOfHistoryBuffer.y <= m_PreviousResolutionOfHistoryBuffer.y && currentResolutionOfHistoryBuffer.z <= m_PreviousResolutionOfHistoryBuffer.z); cmd.SetComputeIntParam(m_VolumetricLightingCS, HDShaderIDs._VBufferLightingHistoryIsValid, validHistory ? 1 : 0); cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingHistory, historyRT); // Read cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingFeedback, feedbackRT); // Write m_PreviousResolutionOfHistoryBuffer = currentResolutionOfHistoryBuffer; } int w = (int)resolution.x; int h = (int)resolution.y; // The shader defines GROUP_SIZE_1D = 8. cmd.DispatchCompute(m_VolumetricLightingCS, kernel, (w + 7) / 8, (h + 7) / 8, 1); } }
// Pass all the systems that may want to update per-camera data here. // That way you will never update an HDCamera and forget to update the dependent system. public void Update(FrameSettings currentFrameSettings, PostProcessLayer postProcessLayer, VolumetricLightingSystem vlSys, MSAASamples msaaSamples) { // store a shortcut on HDAdditionalCameraData (done here and not in the constructor as // we don't create HDCamera at every frame and user can change the HDAdditionalData later (Like when they create a new scene). m_AdditionalCameraData = camera.GetComponent <HDAdditionalCameraData>(); m_frameSettings = currentFrameSettings; // Handle memory allocation. { bool isColorPyramidHistoryRequired = m_frameSettings.enableSSR; // TODO: TAA as well bool isVolumetricHistoryRequired = m_frameSettings.enableVolumetrics && m_frameSettings.enableReprojectionForVolumetrics; int numColorPyramidBuffersRequired = isColorPyramidHistoryRequired ? 2 : 1; // TODO: 1 -> 0 int numVolumetricBuffersRequired = isVolumetricHistoryRequired ? 2 : 0; // History + feedback if ((numColorPyramidBuffersAllocated != numColorPyramidBuffersRequired) || (numVolumetricBuffersAllocated != numVolumetricBuffersRequired)) { // Reinit the system. colorPyramidHistoryIsValid = false; vlSys.DeinitializePerCameraData(this); // The history system only supports the "nuke all" option. m_HistoryRTSystem.Dispose(); m_HistoryRTSystem = new BufferedRTHandleSystem(); if (numColorPyramidBuffersRequired != 0) { AllocHistoryFrameRT((int)HDCameraFrameHistoryType.ColorBufferMipChain, HistoryBufferAllocatorFunction, numColorPyramidBuffersRequired); colorPyramidHistoryIsValid = false; } vlSys.InitializePerCameraData(this, numVolumetricBuffersRequired); // Mark as init. numColorPyramidBuffersAllocated = numColorPyramidBuffersRequired; numVolumetricBuffersAllocated = numVolumetricBuffersRequired; } } // If TAA is enabled projMatrix will hold a jittered projection matrix. The original, // non-jittered projection matrix can be accessed via nonJitteredProjMatrix. bool taaEnabled = camera.cameraType == CameraType.Game && HDUtils.IsTemporalAntialiasingActive(postProcessLayer) && m_frameSettings.enablePostprocess; var nonJitteredCameraProj = camera.projectionMatrix; var cameraProj = taaEnabled ? postProcessLayer.temporalAntialiasing.GetJitteredProjectionMatrix(camera) : nonJitteredCameraProj; // The actual projection matrix used in shaders is actually massaged a bit to work across all platforms // (different Z value ranges etc.) var gpuProj = GL.GetGPUProjectionMatrix(cameraProj, true); // Had to change this from 'false' var gpuView = camera.worldToCameraMatrix; var gpuNonJitteredProj = GL.GetGPUProjectionMatrix(nonJitteredCameraProj, true); // Update viewport sizes. m_ViewportSizePrevFrame = new Vector2Int(m_ActualWidth, m_ActualHeight); m_ActualWidth = Math.Max(camera.pixelWidth, 1); m_ActualHeight = Math.Max(camera.pixelHeight, 1); var screenWidth = m_ActualWidth; var screenHeight = m_ActualHeight; textureWidthScaling = new Vector4(1.0f, 1.0f, 0.0f, 0.0f); numEyes = camera.stereoEnabled ? (uint)2 : (uint)1; // TODO VR: Generalize this when support for >2 eyes comes out with XR SDK if (camera.stereoEnabled) { textureWidthScaling = new Vector4(2.0f, 0.5f, 0.0f, 0.0f); for (uint eyeIndex = 0; eyeIndex < 2; eyeIndex++) { // For VR, TAA proj matrices don't need to be jittered var currProjStereo = camera.GetStereoProjectionMatrix((Camera.StereoscopicEye)eyeIndex); var gpuCurrProjStereo = GL.GetGPUProjectionMatrix(currProjStereo, true); var gpuCurrViewStereo = camera.GetStereoViewMatrix((Camera.StereoscopicEye)eyeIndex); if (ShaderConfig.s_CameraRelativeRendering != 0) { // Zero out the translation component. gpuCurrViewStereo.SetColumn(3, new Vector4(0, 0, 0, 1)); } var gpuCurrVPStereo = gpuCurrProjStereo * gpuCurrViewStereo; // A camera could be rendered multiple times per frame, only updates the previous view proj & pos if needed if (m_LastFrameActive != Time.frameCount) { if (isFirstFrame) { prevViewMatrixStereo[eyeIndex] = gpuCurrViewStereo; prevViewProjMatrixStereo[eyeIndex] = gpuCurrVPStereo; } else { prevViewMatrixStereo[eyeIndex] = viewMatrixStereo[eyeIndex]; prevViewProjMatrixStereo[eyeIndex] = GetViewProjMatrixStereo(eyeIndex); // Grabbing this before ConfigureStereoMatrices updates view/proj } isFirstFrame = false; } } isFirstFrame = true; // So that mono vars can still update when stereo active screenWidth = XRGraphics.eyeTextureWidth; screenHeight = XRGraphics.eyeTextureHeight; var xrDesc = XRGraphics.eyeTextureDesc; m_ActualWidth = xrDesc.width; m_ActualHeight = xrDesc.height; } if (ShaderConfig.s_CameraRelativeRendering != 0) { // Zero out the translation component. gpuView.SetColumn(3, new Vector4(0, 0, 0, 1)); } var gpuVP = gpuNonJitteredProj * gpuView; // A camera could be rendered multiple times per frame, only updates the previous view proj & pos if needed // Note: if your first rendered view during the frame is not the Game view, everything breaks. if (m_LastFrameActive != Time.frameCount) { if (isFirstFrame) { prevWorldSpaceCameraPos = camera.transform.position; prevViewProjMatrix = gpuVP; } else { prevWorldSpaceCameraPos = worldSpaceCameraPos; prevViewProjMatrix = nonJitteredViewProjMatrix; } isFirstFrame = false; } // In stereo, this corresponds to the center eye position worldSpaceCameraPos = camera.transform.position; taaFrameIndex = taaEnabled ? (uint)postProcessLayer.temporalAntialiasing.sampleIndex : 0; taaFrameRotation = new Vector2(Mathf.Sin(taaFrameIndex * (0.5f * Mathf.PI)), Mathf.Cos(taaFrameIndex * (0.5f * Mathf.PI))); viewMatrix = gpuView; projMatrix = gpuProj; nonJitteredProjMatrix = gpuNonJitteredProj; ConfigureStereoMatrices(); if (ShaderConfig.s_CameraRelativeRendering != 0) { Matrix4x4 cameraDisplacement = Matrix4x4.Translate(worldSpaceCameraPos - prevWorldSpaceCameraPos); prevViewProjMatrix *= cameraDisplacement; // Now prevViewProjMatrix correctly transforms this frame's camera-relative positionWS } float n = camera.nearClipPlane; float f = camera.farClipPlane; // Analyze the projection matrix. // p[2][3] = (reverseZ ? 1 : -1) * (depth_0_1 ? 1 : 2) * (f * n) / (f - n) float scale = projMatrix[2, 3] / (f * n) * (f - n); bool depth_0_1 = Mathf.Abs(scale) < 1.5f; bool reverseZ = scale > 0; bool flipProj = projMatrix.inverse.MultiplyPoint(new Vector3(0, 1, 0)).y < 0; // http://www.humus.name/temp/Linearize%20depth.txt if (reverseZ) { zBufferParams = new Vector4(-1 + f / n, 1, -1 / f + 1 / n, 1 / f); } else { zBufferParams = new Vector4(1 - f / n, f / n, 1 / f - 1 / n, 1 / n); } projectionParams = new Vector4(flipProj ? -1 : 1, n, f, 1.0f / f); float orthoHeight = camera.orthographic ? 2 * camera.orthographicSize : 0; float orthoWidth = orthoHeight * camera.aspect; unity_OrthoParams = new Vector4(orthoWidth, orthoHeight, 0, camera.orthographic ? 1 : 0); Frustum.Create(frustum, viewProjMatrix, depth_0_1, reverseZ); // Left, right, top, bottom, near, far. for (int i = 0; i < 6; i++) { frustumPlaneEquations[i] = new Vector4(frustum.planes[i].normal.x, frustum.planes[i].normal.y, frustum.planes[i].normal.z, frustum.planes[i].distance); } m_LastFrameActive = Time.frameCount; // TODO: cache this, or make the history system spill the beans... Vector2Int prevColorPyramidBufferSize = Vector2Int.zero; if (numColorPyramidBuffersAllocated > 0) { var rt = GetCurrentFrameRT((int)HDCameraFrameHistoryType.ColorBufferMipChain).rt; prevColorPyramidBufferSize.x = rt.width; prevColorPyramidBufferSize.y = rt.height; } // TODO: cache this, or make the history system spill the beans... Vector3Int prevVolumetricBufferSize = Vector3Int.zero; if (numVolumetricBuffersAllocated != 0) { var rt = GetCurrentFrameRT((int)HDCameraFrameHistoryType.VolumetricLighting).rt; prevVolumetricBufferSize.x = rt.width; prevVolumetricBufferSize.y = rt.height; prevVolumetricBufferSize.z = rt.volumeDepth; } // Unfortunately sometime (like in the HDCameraEditor) HDUtils.hdrpSettings can be null because of scripts that change the current pipeline... m_msaaSamples = msaaSamples; RTHandles.SetReferenceSize(m_ActualWidth, m_ActualHeight, m_msaaSamples); m_HistoryRTSystem.SetReferenceSize(m_ActualWidth, m_ActualHeight, m_msaaSamples); m_HistoryRTSystem.Swap(); Vector3Int currColorPyramidBufferSize = Vector3Int.zero; if (numColorPyramidBuffersAllocated != 0) { var rt = GetCurrentFrameRT((int)HDCameraFrameHistoryType.ColorBufferMipChain).rt; currColorPyramidBufferSize.x = rt.width; currColorPyramidBufferSize.y = rt.height; if ((currColorPyramidBufferSize.x != prevColorPyramidBufferSize.x) || (currColorPyramidBufferSize.y != prevColorPyramidBufferSize.y)) { // A reallocation has happened, so the new texture likely contains garbage. colorPyramidHistoryIsValid = false; } } Vector3Int currVolumetricBufferSize = Vector3Int.zero; if (numVolumetricBuffersAllocated != 0) { var rt = GetCurrentFrameRT((int)HDCameraFrameHistoryType.VolumetricLighting).rt; currVolumetricBufferSize.x = rt.width; currVolumetricBufferSize.y = rt.height; currVolumetricBufferSize.z = rt.volumeDepth; if ((currVolumetricBufferSize.x != prevVolumetricBufferSize.x) || (currVolumetricBufferSize.y != prevVolumetricBufferSize.y) || (currVolumetricBufferSize.z != prevVolumetricBufferSize.z)) { // A reallocation has happened, so the new texture likely contains garbage. volumetricHistoryIsValid = false; } } int maxWidth = RTHandles.maxWidth; int maxHeight = RTHandles.maxHeight; Vector2 rcpTextureSize = Vector2.one / new Vector2(maxWidth, maxHeight); m_ViewportScalePreviousFrame = m_ViewportSizePrevFrame * rcpTextureSize; m_ViewportScaleCurrentFrame = new Vector2Int(m_ActualWidth, m_ActualHeight) * rcpTextureSize; screenSize = new Vector4(screenWidth, screenHeight, 1.0f / screenWidth, 1.0f / screenHeight); screenParams = new Vector4(screenSize.x, screenSize.y, 1 + screenSize.z, 1 + screenSize.w); if (vlSys != null) { vlSys.UpdatePerCameraData(this); } UpdateVolumeParameters(); }
public void Init() { // Load default renderPipelineResources / Material / Shader string HDRenderPipelinePath = HDUtils.GetHDRenderPipelinePath() + "Runtime/"; string CorePath = HDUtils.GetHDRenderPipelinePath() + "Runtime/Core/"; // HDUtils.GetCorePath(); // All CoreRP have been move to HDRP currently for out of preview of SRP and LW // Shaders shaders = new ShaderResources { // Defaults defaultPS = Load <Shader>(HDRenderPipelinePath + "Material/Lit/Lit.shader"), // Debug debugDisplayLatlongPS = Load <Shader>(HDRenderPipelinePath + "Debug/DebugDisplayLatlong.Shader"), debugViewMaterialGBufferPS = Load <Shader>(HDRenderPipelinePath + "Debug/DebugViewMaterialGBuffer.Shader"), debugViewTilesPS = Load <Shader>(HDRenderPipelinePath + "Debug/DebugViewTiles.Shader"), debugFullScreenPS = Load <Shader>(HDRenderPipelinePath + "Debug/DebugFullScreen.Shader"), debugColorPickerPS = Load <Shader>(HDRenderPipelinePath + "Debug/DebugColorPicker.Shader"), debugLightVolumePS = Load <Shader>(HDRenderPipelinePath + "Debug/DebugLightVolumes.Shader"), debugLightVolumeCS = Load <ComputeShader>(HDRenderPipelinePath + "Debug/DebugLightVolumes.compute"), // Lighting deferredPS = Load <Shader>(HDRenderPipelinePath + "Lighting/Deferred.Shader"), colorPyramidCS = Load <ComputeShader>(HDRenderPipelinePath + "RenderPipeline/RenderPass/ColorPyramid.compute"), colorPyramidPS = Load <Shader>(HDRenderPipelinePath + "RenderPipeline/RenderPass/ColorPyramidPS.Shader"), depthPyramidCS = Load <ComputeShader>(HDRenderPipelinePath + "RenderPipeline/RenderPass/DepthPyramid.compute"), copyChannelCS = Load <ComputeShader>(CorePath + "CoreResources/GPUCopy.compute"), applyDistortionCS = Load <ComputeShader>(HDRenderPipelinePath + "RenderPipeline/RenderPass/Distortion/ApplyDistorsion.compute"), screenSpaceReflectionsCS = Load <ComputeShader>(HDRenderPipelinePath + "Lighting/ScreenSpaceLighting/ScreenSpaceReflections.compute"), // Lighting tile pass clearDispatchIndirectCS = Load <ComputeShader>(HDRenderPipelinePath + "Lighting/LightLoop/cleardispatchindirect.compute"), buildDispatchIndirectCS = Load <ComputeShader>(HDRenderPipelinePath + "Lighting/LightLoop/builddispatchindirect.compute"), buildScreenAABBCS = Load <ComputeShader>(HDRenderPipelinePath + "Lighting/LightLoop/scrbound.compute"), buildPerTileLightListCS = Load <ComputeShader>(HDRenderPipelinePath + "Lighting/LightLoop/lightlistbuild.compute"), buildPerBigTileLightListCS = Load <ComputeShader>(HDRenderPipelinePath + "Lighting/LightLoop/lightlistbuild-bigtile.compute"), buildPerVoxelLightListCS = Load <ComputeShader>(HDRenderPipelinePath + "Lighting/LightLoop/lightlistbuild-clustered.compute"), buildMaterialFlagsCS = Load <ComputeShader>(HDRenderPipelinePath + "Lighting/LightLoop/materialflags.compute"), deferredCS = Load <ComputeShader>(HDRenderPipelinePath + "Lighting/LightLoop/Deferred.compute"), screenSpaceShadowCS = Load <ComputeShader>(HDRenderPipelinePath + "Lighting/Shadow/ScreenSpaceShadow.compute"), volumeVoxelizationCS = Load <ComputeShader>(HDRenderPipelinePath + "Lighting/VolumetricLighting/VolumeVoxelization.compute"), volumetricLightingCS = Load <ComputeShader>(HDRenderPipelinePath + "Lighting/VolumetricLighting/VolumetricLighting.compute"), deferredTilePS = Load <Shader>(HDRenderPipelinePath + "Lighting/LightLoop/DeferredTile.shader"), subsurfaceScatteringCS = Load <ComputeShader>(HDRenderPipelinePath + "Material/SubsurfaceScattering/SubsurfaceScattering.compute"), combineLightingPS = Load <Shader>(HDRenderPipelinePath + "Material/SubsurfaceScattering/CombineLighting.shader"), // General cameraMotionVectorsPS = Load <Shader>(HDRenderPipelinePath + "RenderPipeline/RenderPass/MotionVectors/CameraMotionVectors.shader"), copyStencilBufferPS = Load <Shader>(HDRenderPipelinePath + "ShaderLibrary/CopyStencilBuffer.shader"), copyDepthBufferPS = Load <Shader>(HDRenderPipelinePath + "ShaderLibrary/CopyDepthBuffer.shader"), blitPS = Load <Shader>(HDRenderPipelinePath + "ShaderLibrary/Blit.shader"), // Sky blitCubemapPS = Load <Shader>(HDRenderPipelinePath + "Sky/BlitCubemap.shader"), buildProbabilityTablesCS = Load <ComputeShader>(HDRenderPipelinePath + "Material/GGXConvolution/BuildProbabilityTables.compute"), computeGgxIblSampleDataCS = Load <ComputeShader>(HDRenderPipelinePath + "Material/GGXConvolution/ComputeGgxIblSampleData.compute"), GGXConvolvePS = Load <Shader>(HDRenderPipelinePath + "Material/GGXConvolution/GGXConvolve.shader"), charlieConvolvePS = Load <Shader>(HDRenderPipelinePath + "Material/Fabric/CharlieConvolve.shader"), opaqueAtmosphericScatteringPS = Load <Shader>(HDRenderPipelinePath + "Lighting/AtmosphericScattering/OpaqueAtmosphericScattering.shader"), hdriSkyPS = Load <Shader>(HDRenderPipelinePath + "Sky/HDRISky/HDRISky.shader"), integrateHdriSkyPS = Load <Shader>(HDRenderPipelinePath + "Sky/HDRISky/IntegrateHDRISky.shader"), proceduralSkyPS = Load <Shader>(HDRenderPipelinePath + "Sky/ProceduralSky/ProceduralSky.shader"), gradientSkyPS = Load <Shader>(HDRenderPipelinePath + "Sky/GradientSky/GradientSky.shader"), ambientProbeConvolutionCS = Load <ComputeShader>(HDRenderPipelinePath + "Sky/AmbientProbeConvolution.compute"), // Skybox/Cubemap is a builtin shader, must use Shader.Find to access it. It is fine because we are in the editor skyboxCubemapPS = Shader.Find("Skybox/Cubemap"), // Material preIntegratedFGD_GGXDisneyDiffusePS = Load <Shader>(HDRenderPipelinePath + "Material/PreIntegratedFGD/PreIntegratedFGD_GGXDisneyDiffuse.shader"), preIntegratedFGD_CharlieFabricLambertPS = Load <Shader>(HDRenderPipelinePath + "Material/PreIntegratedFGD/PreIntegratedFGD_CharlieFabricLambert.shader"), preIntegratedFGD_CookTorrancePS = Load <Shader>(HDRenderPipelinePath + "Material/AxF/PreIntegratedFGD_CookTorrance.shader"), preIntegratedFGD_WardPS = Load <Shader>(HDRenderPipelinePath + "Material/AxF/PreIntegratedFGD_Ward.shader"), // Utilities / Core encodeBC6HCS = Load <ComputeShader>(CorePath + "CoreResources/EncodeBC6H.compute"), cubeToPanoPS = Load <Shader>(CorePath + "CoreResources/CubeToPano.shader"), blitCubeTextureFacePS = Load <Shader>(CorePath + "CoreResources/BlitCubeTextureFace.shader"), filterAreaLightCookiesPS = Load <Shader>(CorePath + "CoreResources/FilterAreaLightCookies.shader"), // Shadow shadowClearPS = Load <Shader>(HDRenderPipelinePath + "Lighting/Shadow/ShadowClear.shader"), evsmBlurCS = Load <ComputeShader>(HDRenderPipelinePath + "Lighting/Shadow/EVSMBlur.compute"), debugHDShadowMapPS = Load <Shader>(HDRenderPipelinePath + "Lighting/Shadow/DebugDisplayHDShadowMap.shader"), momentShadowsCS = Load <ComputeShader>(HDRenderPipelinePath + "Lighting/Shadow/MomentShadows.compute"), // Decal decalNormalBufferPS = Load <Shader>(HDRenderPipelinePath + "Material/Decal/DecalNormalBuffer.shader"), // Ambient occlusion aoDownsample1CS = Load <ComputeShader>(HDRenderPipelinePath + "Lighting/ScreenSpaceLighting/AmbientOcclusionDownsample1.compute"), aoDownsample2CS = Load <ComputeShader>(HDRenderPipelinePath + "Lighting/ScreenSpaceLighting/AmbientOcclusionDownsample2.compute"), aoRenderCS = Load <ComputeShader>(HDRenderPipelinePath + "Lighting/ScreenSpaceLighting/AmbientOcclusionRender.compute"), aoUpsampleCS = Load <ComputeShader>(HDRenderPipelinePath + "Lighting/ScreenSpaceLighting/AmbientOcclusionUpsample.compute"), // MSAA depthValuesPS = Load <Shader>(HDRenderPipelinePath + "RenderPipeline/RenderPass/MSAA/DepthValues.shader"), colorResolvePS = Load <Shader>(HDRenderPipelinePath + "RenderPipeline/RenderPass/MSAA/ColorResolve.shader"), aoResolvePS = Load <Shader>(HDRenderPipelinePath + "RenderPipeline/RenderPass/MSAA/AmbientOcclusionResolve.shader"), // Post-processing nanKillerCS = Load <ComputeShader>(HDRenderPipelinePath + "PostProcessing/Shaders/NaNKiller.compute"), exposureCS = Load <ComputeShader>(HDRenderPipelinePath + "PostProcessing/Shaders/Exposure.compute"), uberPostCS = Load <ComputeShader>(HDRenderPipelinePath + "PostProcessing/Shaders/UberPost.compute"), lutBuilder3DCS = Load <ComputeShader>(HDRenderPipelinePath + "PostProcessing/Shaders/LutBuilder3D.compute"), temporalAntialiasingCS = Load <ComputeShader>(HDRenderPipelinePath + "PostProcessing/Shaders/TemporalAntialiasing.compute"), depthOfFieldKernelCS = Load <ComputeShader>(HDRenderPipelinePath + "PostProcessing/Shaders/DepthOfFieldKernel.compute"), depthOfFieldCoCCS = Load <ComputeShader>(HDRenderPipelinePath + "PostProcessing/Shaders/DepthOfFieldCoC.compute"), depthOfFieldCoCReprojectCS = Load <ComputeShader>(HDRenderPipelinePath + "PostProcessing/Shaders/DepthOfFieldCoCReproject.compute"), depthOfFieldDilateCS = Load <ComputeShader>(HDRenderPipelinePath + "PostProcessing/Shaders/DepthOfFieldCoCDilate.compute"), depthOfFieldMipCS = Load <ComputeShader>(HDRenderPipelinePath + "PostProcessing/Shaders/DepthOfFieldMip.compute"), depthOfFieldMipSafeCS = Load <ComputeShader>(HDRenderPipelinePath + "PostProcessing/Shaders/DepthOfFieldMipSafe.compute"), depthOfFieldPrefilterCS = Load <ComputeShader>(HDRenderPipelinePath + "PostProcessing/Shaders/DepthOfFieldPrefilter.compute"), depthOfFieldTileMaxCS = Load <ComputeShader>(HDRenderPipelinePath + "PostProcessing/Shaders/DepthOfFieldTileMax.compute"), depthOfFieldGatherCS = Load <ComputeShader>(HDRenderPipelinePath + "PostProcessing/Shaders/DepthOfFieldGather.compute"), depthOfFieldCombineCS = Load <ComputeShader>(HDRenderPipelinePath + "PostProcessing/Shaders/DepthOfFieldCombine.compute"), motionBlurTileGenCS = Load <ComputeShader>(HDRenderPipelinePath + "PostProcessing/Shaders/MotionBlurTilePass.compute"), motionBlurCS = Load <ComputeShader>(HDRenderPipelinePath + "PostProcessing/Shaders/MotionBlur.compute"), motionBlurVelocityPrepCS = Load <ComputeShader>(HDRenderPipelinePath + "PostProcessing/Shaders/MotionBlurVelocityPrep.compute"), paniniProjectionCS = Load <ComputeShader>(HDRenderPipelinePath + "PostProcessing/Shaders/PaniniProjection.compute"), bloomPrefilterCS = Load <ComputeShader>(HDRenderPipelinePath + "PostProcessing/Shaders/BloomPrefilter.compute"), bloomBlurCS = Load <ComputeShader>(HDRenderPipelinePath + "PostProcessing/Shaders/BloomBlur.compute"), bloomUpsampleCS = Load <ComputeShader>(HDRenderPipelinePath + "PostProcessing/Shaders/BloomUpsample.compute"), FXAACS = Load <ComputeShader>(HDRenderPipelinePath + "PostProcessing/Shaders/FXAA.compute"), finalPassPS = Load <Shader>(HDRenderPipelinePath + "PostProcessing/Shaders/FinalPass.shader"), #if ENABLE_RAYTRACING aoRaytracing = Load <RaytracingShader>(HDRenderPipelinePath + "RenderPipeline/Raytracing/Shaders/RaytracingAmbientOcclusion.raytrace"), reflectionRaytracing = Load <RaytracingShader>(HDRenderPipelinePath + "RenderPipeline/Raytracing/Shaders/RaytracingReflections.raytrace"), shadowsRaytracing = Load <RaytracingShader>(HDRenderPipelinePath + "RenderPipeline/Raytracing/Shaders/RaytracingAreaShadows.raytrace"), areaBillateralFilterCS = Load <ComputeShader>(HDRenderPipelinePath + "RenderPipeline/Raytracing/Shaders/AreaBilateralShadow.compute"), jointBilateralFilterCS = Load <ComputeShader>(HDRenderPipelinePath + "RenderPipeline/Raytracing/Shaders/JointBilateralFilter.compute"), reflectionBilateralFilterCS = Load <ComputeShader>(HDRenderPipelinePath + "RenderPipeline/Raytracing/Shaders/RaytracingReflectionFilter.compute"), lightClusterBuildCS = Load <ComputeShader>(HDRenderPipelinePath + "RenderPipeline/Raytracing/Shaders/RaytracingLightCluster.compute"), lightClusterDebugCS = Load <ComputeShader>(HDRenderPipelinePath + "RenderPipeline/Raytracing/Shaders/DebugLightCluster.compute"), countTracedRays = Load <ComputeShader>(HDRenderPipelinePath + "RenderPipeline/Raytracing/Shaders/CountTracedRays.compute"), #endif }; // Materials materials = new MaterialResources { }; // Textures textures = new TextureResources { // Debug debugFontTex = Load <Texture2D>(HDRenderPipelinePath + "RenderPipelineResources/Texture/DebugFont.tga"), colorGradient = Load <Texture2D>(HDRenderPipelinePath + "Debug/ColorGradient.png"), filmGrainTex = new[] { // These need to stay in this specific order! Load <Texture2D>(HDRenderPipelinePath + "RenderPipelineResources/Texture/FilmGrain/Thin01.png"), Load <Texture2D>(HDRenderPipelinePath + "RenderPipelineResources/Texture/FilmGrain/Thin02.png"), Load <Texture2D>(HDRenderPipelinePath + "RenderPipelineResources/Texture/FilmGrain/Medium01.png"), Load <Texture2D>(HDRenderPipelinePath + "RenderPipelineResources/Texture/FilmGrain/Medium02.png"), Load <Texture2D>(HDRenderPipelinePath + "RenderPipelineResources/Texture/FilmGrain/Medium03.png"), Load <Texture2D>(HDRenderPipelinePath + "RenderPipelineResources/Texture/FilmGrain/Medium04.png"), Load <Texture2D>(HDRenderPipelinePath + "RenderPipelineResources/Texture/FilmGrain/Medium05.png"), Load <Texture2D>(HDRenderPipelinePath + "RenderPipelineResources/Texture/FilmGrain/Medium06.png"), Load <Texture2D>(HDRenderPipelinePath + "RenderPipelineResources/Texture/FilmGrain/Large01.png"), Load <Texture2D>(HDRenderPipelinePath + "RenderPipelineResources/Texture/FilmGrain/Large02.png") }, blueNoise16LTex = new Texture2D[32], blueNoise16RGBTex = new Texture2D[32], }; // ShaderGraphs shaderGraphs = new ShaderGraphResources { }; // Fill-in blue noise textures for (int i = 0; i < 32; i++) { textures.blueNoise16LTex[i] = Load <Texture2D>(HDRenderPipelinePath + "RenderPipelineResources/Texture/BlueNoise16/L/LDR_LLL1_" + i + ".png"); textures.blueNoise16RGBTex[i] = Load <Texture2D>(HDRenderPipelinePath + "RenderPipelineResources/Texture/BlueNoise16/RGB/LDR_RGB1_" + i + ".png"); } // Coherent noise textures textures.owenScrambledTex = Load <Texture2D>(HDRenderPipelinePath + "RenderPipelineResources/Texture/CoherentNoise/OwenScrambledNoise.png"); textures.scramblingTex = Load <Texture2D>(HDRenderPipelinePath + "RenderPipelineResources/Texture/CoherentNoise/ScrambleNoise.png"); }
public void RenderLightVolumes(CommandBuffer cmd, HDCamera hdCamera, CullingResults cullResults, LightingDebugSettings lightDebugSettings) { // Clear the buffers HDUtils.SetRenderTarget(cmd, hdCamera, m_ColorAccumulationBuffer, ClearFlag.Color, Color.black); HDUtils.SetRenderTarget(cmd, hdCamera, m_LightCountBuffer, ClearFlag.Color, Color.black); HDUtils.SetRenderTarget(cmd, hdCamera, m_DebugLightVolumesTexture, ClearFlag.Color, Color.black); // Set the render target array cmd.SetRenderTarget(m_RTIDs, m_DepthBuffer); // First of all let's do the regions for the light sources (we only support Punctual and Area) int numLights = cullResults.visibleLights.Length; for (int lightIdx = 0; lightIdx < numLights; ++lightIdx) { // Let's build the light's bounding sphere matrix Light currentLegacyLight = cullResults.visibleLights[lightIdx].light; if (currentLegacyLight == null) { continue; } HDAdditionalLightData currentHDRLight = currentLegacyLight.GetComponent <HDAdditionalLightData>(); if (currentHDRLight == null) { continue; } Matrix4x4 positionMat = Matrix4x4.Translate(currentLegacyLight.transform.position); if (currentLegacyLight.type == LightType.Point || currentLegacyLight.type == LightType.Area) { m_MaterialProperty.SetVector(_RangeShaderID, new Vector3(currentLegacyLight.range, currentLegacyLight.range, currentLegacyLight.range)); switch (currentHDRLight.lightTypeExtent) { case LightTypeExtent.Punctual: { m_MaterialProperty.SetColor(_ColorShaderID, new Color(0.0f, 0.5f, 0.0f, 1.0f)); m_MaterialProperty.SetVector(_OffsetShaderID, new Vector3(0, 0, 0)); cmd.DrawMesh(DebugShapes.instance.RequestSphereMesh(), positionMat, m_DebugLightVolumeMaterial, 0, 0, m_MaterialProperty); } break; case LightTypeExtent.Rectangle: { m_MaterialProperty.SetColor(_ColorShaderID, new Color(0.0f, 1.0f, 1.0f, 1.0f)); m_MaterialProperty.SetVector(_OffsetShaderID, new Vector3(0, 0, 0)); cmd.DrawMesh(DebugShapes.instance.RequestSphereMesh(), positionMat, m_DebugLightVolumeMaterial, 0, 0, m_MaterialProperty); } break; case LightTypeExtent.Tube: { m_MaterialProperty.SetColor(_ColorShaderID, new Color(1.0f, 0.0f, 0.5f, 1.0f)); m_MaterialProperty.SetVector(_OffsetShaderID, new Vector3(0, 0, 0)); cmd.DrawMesh(DebugShapes.instance.RequestSphereMesh(), positionMat, m_DebugLightVolumeMaterial, 0, 0, m_MaterialProperty); } break; default: break; } } else if (currentLegacyLight.type == LightType.Spot) { if (currentHDRLight.spotLightShape == SpotLightShape.Cone) { float bottomRadius = Mathf.Tan(currentLegacyLight.spotAngle * Mathf.PI / 360.0f) * currentLegacyLight.range; m_MaterialProperty.SetColor(_ColorShaderID, new Color(1.0f, 0.5f, 0.0f, 1.0f)); m_MaterialProperty.SetVector(_RangeShaderID, new Vector3(bottomRadius, bottomRadius, currentLegacyLight.range)); m_MaterialProperty.SetVector(_OffsetShaderID, new Vector3(0, 0, 0)); cmd.DrawMesh(DebugShapes.instance.RequestConeMesh(), currentLegacyLight.gameObject.transform.localToWorldMatrix, m_DebugLightVolumeMaterial, 0, 0, m_MaterialProperty); } else if (currentHDRLight.spotLightShape == SpotLightShape.Box) { m_MaterialProperty.SetColor(_ColorShaderID, new Color(1.0f, 0.5f, 0.0f, 1.0f)); m_MaterialProperty.SetVector(_RangeShaderID, new Vector3(currentHDRLight.shapeWidth, currentHDRLight.shapeHeight, currentLegacyLight.range)); m_MaterialProperty.SetVector(_OffsetShaderID, new Vector3(0, 0, currentLegacyLight.range / 2.0f)); cmd.DrawMesh(DebugShapes.instance.RequestBoxMesh(), currentLegacyLight.gameObject.transform.localToWorldMatrix, m_DebugLightVolumeMaterial, 0, 0, m_MaterialProperty); } else if (currentHDRLight.spotLightShape == SpotLightShape.Pyramid) { float bottomWidth = Mathf.Tan(currentLegacyLight.spotAngle * Mathf.PI / 360.0f) * currentLegacyLight.range; m_MaterialProperty.SetColor(_ColorShaderID, new Color(1.0f, 0.5f, 0.0f, 1.0f)); m_MaterialProperty.SetVector(_RangeShaderID, new Vector3(currentHDRLight.aspectRatio * bottomWidth * 2, bottomWidth * 2, currentLegacyLight.range)); m_MaterialProperty.SetVector(_OffsetShaderID, new Vector3(0, 0, 0)); cmd.DrawMesh(DebugShapes.instance.RequestPyramidMesh(), currentLegacyLight.gameObject.transform.localToWorldMatrix, m_DebugLightVolumeMaterial, 0, 0, m_MaterialProperty); } } } // Now let's do the same but for reflection probes int numProbes = cullResults.visibleReflectionProbes.Length; for (int probeIdx = 0; probeIdx < numProbes; ++probeIdx) { // Let's build the light's bounding sphere matrix ReflectionProbe currentLegacyProbe = cullResults.visibleReflectionProbes[probeIdx].reflectionProbe; HDAdditionalReflectionData currentHDProbe = currentLegacyProbe.GetComponent <HDAdditionalReflectionData>(); if (!currentHDProbe) { continue; } MaterialPropertyBlock m_MaterialProperty = new MaterialPropertyBlock(); Mesh targetMesh = null; if (currentHDProbe.influenceVolume.shape == InfluenceShape.Sphere) { m_MaterialProperty.SetVector(_RangeShaderID, new Vector3(currentHDProbe.influenceVolume.sphereRadius, currentHDProbe.influenceVolume.sphereRadius, currentHDProbe.influenceVolume.sphereRadius)); targetMesh = DebugShapes.instance.RequestSphereMesh(); } else { m_MaterialProperty.SetVector(_RangeShaderID, new Vector3(currentHDProbe.influenceVolume.boxSize.x, currentHDProbe.influenceVolume.boxSize.y, currentHDProbe.influenceVolume.boxSize.z)); targetMesh = DebugShapes.instance.RequestBoxMesh(); } m_MaterialProperty.SetColor(_ColorShaderID, new Color(1.0f, 1.0f, 0.0f, 1.0f)); m_MaterialProperty.SetVector(_OffsetShaderID, new Vector3(0, 0, 0)); Matrix4x4 positionMat = Matrix4x4.Translate(currentLegacyProbe.transform.position); cmd.DrawMesh(targetMesh, positionMat, m_DebugLightVolumeMaterial, 0, 0, m_MaterialProperty); } // Define which kernel to use based on the lightloop options int targetKernel = lightDebugSettings.lightVolumeDebugByCategory == LightLoop.LightVolumeDebug.ColorAndEdge ? m_DebugLightVolumeColorsKernel : m_DebugLightVolumeGradientKernel; // Set the input params for the compute cmd.SetComputeTextureParam(m_DebugLightVolumeCompute, targetKernel, _DebugLightCountBufferShaderID, m_LightCountBuffer); cmd.SetComputeTextureParam(m_DebugLightVolumeCompute, targetKernel, _DebugColorAccumulationBufferShaderID, m_ColorAccumulationBuffer); cmd.SetComputeTextureParam(m_DebugLightVolumeCompute, targetKernel, _DebugLightVolumesTextureShaderID, m_DebugLightVolumesTexture); cmd.SetComputeTextureParam(m_DebugLightVolumeCompute, targetKernel, _ColorGradientTextureShaderID, m_ColorGradientTexture); cmd.SetComputeIntParam(m_DebugLightVolumeCompute, _MaxDebugLightCountShaderID, (int)lightDebugSettings.maxDebugLightCount); // Texture dimensions int texWidth = m_ColorAccumulationBuffer.rt.width; int texHeight = m_ColorAccumulationBuffer.rt.width; // Dispatch the compute int lightVolumesTileSize = 8; int numTilesX = (texWidth + (lightVolumesTileSize - 1)) / lightVolumesTileSize; int numTilesY = (texHeight + (lightVolumesTileSize - 1)) / lightVolumesTileSize; cmd.DispatchCompute(m_DebugLightVolumeCompute, targetKernel, numTilesX, numTilesY, 1); // Blit this into the camera target cmd.SetRenderTarget(BuiltinRenderTextureType.CameraTarget); m_MaterialProperty.SetTexture(HDShaderIDs._BlitTexture, m_DebugLightVolumesTexture); cmd.DrawProcedural(Matrix4x4.identity, m_DebugLightVolumeMaterial, 1, MeshTopology.Triangles, 3, 1, m_MaterialProperty); }
// Init a FrameSettings from renderpipeline settings, frame settings and debug settings (if any) // This will aggregate the various option public static void InitializeFrameSettings(Camera camera, RenderPipelineSettings renderPipelineSettings, FrameSettings srcFrameSettings, ref FrameSettings aggregate) { if (aggregate == null) { aggregate = new FrameSettings(); } // When rendering reflection probe we disable specular as it is view dependent if (camera.cameraType == CameraType.Reflection) { aggregate.diffuseGlobalDimmer = 1.0f; aggregate.specularGlobalDimmer = 0.0f; } else { aggregate.diffuseGlobalDimmer = 1.0f; aggregate.specularGlobalDimmer = 1.0f; } aggregate.enableShadow = srcFrameSettings.enableShadow; aggregate.enableContactShadows = srcFrameSettings.enableContactShadows; aggregate.enableShadowMask = srcFrameSettings.enableShadowMask && renderPipelineSettings.supportShadowMask; aggregate.enableSSR = camera.cameraType != CameraType.Reflection && srcFrameSettings.enableSSR && renderPipelineSettings.supportSSR; // No recursive reflections aggregate.enableSSAO = srcFrameSettings.enableSSAO && renderPipelineSettings.supportSSAO; aggregate.enableSubsurfaceScattering = camera.cameraType != CameraType.Reflection && srcFrameSettings.enableSubsurfaceScattering && renderPipelineSettings.supportSubsurfaceScattering; aggregate.enableTransmission = srcFrameSettings.enableTransmission; aggregate.enableAtmosphericScattering = srcFrameSettings.enableAtmosphericScattering; // We must take care of the scene view fog flags in the editor if (!CoreUtils.IsSceneViewFogEnabled(camera)) { aggregate.enableAtmosphericScattering = false; } // Volumetric are disabled if there is no atmospheric scattering aggregate.enableVolumetrics = srcFrameSettings.enableVolumetrics && renderPipelineSettings.supportVolumetrics && aggregate.enableAtmosphericScattering; aggregate.enableReprojectionForVolumetrics = srcFrameSettings.enableReprojectionForVolumetrics; aggregate.enableLightLayers = srcFrameSettings.enableLightLayers && renderPipelineSettings.supportLightLayers; // We have to fall back to forward-only rendering when scene view is using wireframe rendering mode // as rendering everything in wireframe + deferred do not play well together if (GL.wireframe) //force forward mode for wireframe { aggregate.shaderLitMode = LitShaderMode.Forward; } else { switch (renderPipelineSettings.supportedLitShaderMode) { case RenderPipelineSettings.SupportedLitShaderMode.ForwardOnly: aggregate.shaderLitMode = LitShaderMode.Forward; break; case RenderPipelineSettings.SupportedLitShaderMode.DeferredOnly: aggregate.shaderLitMode = LitShaderMode.Deferred; break; case RenderPipelineSettings.SupportedLitShaderMode.Both: aggregate.shaderLitMode = srcFrameSettings.shaderLitMode; break; } } aggregate.enableDepthPrepassWithDeferredRendering = srcFrameSettings.enableDepthPrepassWithDeferredRendering; aggregate.enableTransparentPrepass = srcFrameSettings.enableTransparentPrepass && renderPipelineSettings.supportTransparentDepthPrepass; aggregate.enableMotionVectors = camera.cameraType != CameraType.Reflection && srcFrameSettings.enableMotionVectors && renderPipelineSettings.supportMotionVectors; // Object motion vector are disabled if motion vector are disabled aggregate.enableObjectMotionVectors = srcFrameSettings.enableObjectMotionVectors && aggregate.enableMotionVectors; aggregate.enableDecals = srcFrameSettings.enableDecals && renderPipelineSettings.supportDecals; aggregate.enableRoughRefraction = srcFrameSettings.enableRoughRefraction; aggregate.enableTransparentPostpass = srcFrameSettings.enableTransparentPostpass && renderPipelineSettings.supportTransparentDepthPostpass; aggregate.enableDistortion = camera.cameraType != CameraType.Reflection && srcFrameSettings.enableDistortion && renderPipelineSettings.supportDistortion; // Planar and real time cubemap doesn't need post process and render in FP16 aggregate.enablePostprocess = camera.cameraType != CameraType.Reflection && srcFrameSettings.enablePostprocess; aggregate.enableAsyncCompute = srcFrameSettings.enableAsyncCompute && SystemInfo.supportsAsyncCompute; aggregate.runLightListAsync = aggregate.enableAsyncCompute && srcFrameSettings.runLightListAsync; aggregate.runSSRAsync = aggregate.enableAsyncCompute && srcFrameSettings.runSSRAsync; aggregate.runSSAOAsync = aggregate.enableAsyncCompute && srcFrameSettings.runSSAOAsync; aggregate.runContactShadowsAsync = aggregate.enableAsyncCompute && srcFrameSettings.runContactShadowsAsync; aggregate.runVolumeVoxelizationAsync = aggregate.enableAsyncCompute && srcFrameSettings.runVolumeVoxelizationAsync; aggregate.enableOpaqueObjects = srcFrameSettings.enableOpaqueObjects; aggregate.enableTransparentObjects = srcFrameSettings.enableTransparentObjects; aggregate.enableRealtimePlanarReflection = srcFrameSettings.enableRealtimePlanarReflection; //MSAA only supported in forward aggregate.enableMSAA = srcFrameSettings.enableMSAA && renderPipelineSettings.supportMSAA && aggregate.shaderLitMode == LitShaderMode.Forward; aggregate.ConfigureMSAADependentSettings(); aggregate.ConfigureStereoDependentSettings(camera); // Disable various option for the preview except if we are a Camera Editor preview if (HDUtils.IsRegularPreviewCamera(camera)) { aggregate.enableShadow = false; aggregate.enableContactShadows = false; aggregate.enableShadowMask = false; aggregate.enableSSR = false; aggregate.enableSSAO = false; aggregate.enableAtmosphericScattering = false; aggregate.enableVolumetrics = false; aggregate.enableReprojectionForVolumetrics = false; aggregate.enableLightLayers = false; aggregate.enableTransparentPrepass = false; aggregate.enableMotionVectors = false; aggregate.enableObjectMotionVectors = false; aggregate.enableDecals = false; aggregate.enableTransparentPostpass = false; aggregate.enableDistortion = false; aggregate.enablePostprocess = false; } LightLoopSettings.InitializeLightLoopSettings(camera, aggregate, renderPipelineSettings, srcFrameSettings, ref aggregate.lightLoopSettings); aggregate.m_LitShaderModeEnumIndex = srcFrameSettings.m_LitShaderModeEnumIndex; }
static void IMBlurMoment(RenderShadowsParameters parameters, RTHandle atlas, RTHandle atlasMoment, RTHandle intermediateSummedAreaTexture, RTHandle summedAreaTexture, CommandBuffer cmd) { // If the target kernel is not available ComputeShader momentCS = parameters.imShadowBlurMomentsCS; if (momentCS == null) { return; } using (new ProfilingSample(cmd, "Render Moment Shadows", CustomSamplerId.RenderShadowMaps.GetSampler())) { int computeMomentKernel = momentCS.FindKernel("ComputeMomentShadows"); int summedAreaHorizontalKernel = momentCS.FindKernel("MomentSummedAreaTableHorizontal"); int summedAreaVerticalKernel = momentCS.FindKernel("MomentSummedAreaTableVertical"); // First of all let's clear the moment shadow map HDUtils.SetRenderTarget(cmd, atlasMoment, ClearFlag.Color, Color.black); HDUtils.SetRenderTarget(cmd, intermediateSummedAreaTexture, ClearFlag.Color, Color.black); HDUtils.SetRenderTarget(cmd, summedAreaTexture, ClearFlag.Color, Color.black); // Alright, so the thing here is that for every sub-shadow map of the atlas, we need to generate the moment shadow map foreach (var shadowRequest in parameters.shadowRequests) { // Let's bind the resources of this cmd.SetComputeTextureParam(momentCS, computeMomentKernel, HDShaderIDs._ShadowmapAtlas, atlas); cmd.SetComputeTextureParam(momentCS, computeMomentKernel, HDShaderIDs._MomentShadowAtlas, atlasMoment); cmd.SetComputeVectorParam(momentCS, HDShaderIDs._MomentShadowmapSlotST, new Vector4(shadowRequest.atlasViewport.width, shadowRequest.atlasViewport.height, shadowRequest.atlasViewport.min.x, shadowRequest.atlasViewport.min.y)); // First of all we need to compute the moments int numTilesX = Math.Max((int)shadowRequest.atlasViewport.width / 8, 1); int numTilesY = Math.Max((int)shadowRequest.atlasViewport.height / 8, 1); cmd.DispatchCompute(momentCS, computeMomentKernel, numTilesX, numTilesY, 1); // Do the horizontal pass of the summed area table cmd.SetComputeTextureParam(momentCS, summedAreaHorizontalKernel, HDShaderIDs._SummedAreaTableInputFloat, atlasMoment); cmd.SetComputeTextureParam(momentCS, summedAreaHorizontalKernel, HDShaderIDs._SummedAreaTableOutputInt, intermediateSummedAreaTexture); cmd.SetComputeFloatParam(momentCS, HDShaderIDs._IMSKernelSize, shadowRequest.kernelSize); cmd.SetComputeVectorParam(momentCS, HDShaderIDs._MomentShadowmapSize, new Vector2((float)atlasMoment.referenceSize.x, (float)atlasMoment.referenceSize.y)); int numLines = Math.Max((int)shadowRequest.atlasViewport.width / 64, 1); cmd.DispatchCompute(momentCS, summedAreaHorizontalKernel, numLines, 1, 1); // Do the horizontal pass of the summed area table cmd.SetComputeTextureParam(momentCS, summedAreaVerticalKernel, HDShaderIDs._SummedAreaTableInputInt, intermediateSummedAreaTexture); cmd.SetComputeTextureParam(momentCS, summedAreaVerticalKernel, HDShaderIDs._SummedAreaTableOutputInt, summedAreaTexture); cmd.SetComputeVectorParam(momentCS, HDShaderIDs._MomentShadowmapSize, new Vector2((float)atlasMoment.referenceSize.x, (float)atlasMoment.referenceSize.y)); cmd.SetComputeFloatParam(momentCS, HDShaderIDs._IMSKernelSize, shadowRequest.kernelSize); int numColumns = Math.Max((int)shadowRequest.atlasViewport.height / 64, 1); cmd.DispatchCompute(momentCS, summedAreaVerticalKernel, numColumns, 1, 1); // Push the global texture cmd.SetGlobalTexture(HDShaderIDs._SummedAreaTableInputInt, summedAreaTexture); } } }
// Generates the gaussian pyramid of source into destination // We can't do it in place as the color pyramid has to be read while writing to the color // buffer in some cases (e.g. refraction, distortion) // Returns the number of mips public int RenderColorGaussianPyramid(CommandBuffer cmd, Vector2Int size, Texture source, RenderTexture destination) { // Select between Tex2D and Tex2DArray versions of the kernels int kernelIndex = (source.dimension == TextureDimension.Tex2DArray) ? kKernelTex2DArray : kKernelTex2D; // Sanity check if (kernelIndex == kKernelTex2DArray) { Debug.Assert(source.dimension == destination.dimension, "MipGenerator source texture does not match dimension of destination!"); Debug.Assert(m_ColorGaussianKernel.Length == kernelCount); } // Only create the temporary target on-demand in case the game doesn't actually need it if (m_TempColorTargets[kernelIndex] == null) { m_TempColorTargets[kernelIndex] = RTHandles.Alloc( Vector2.one * 0.5f, filterMode: FilterMode.Bilinear, colorFormat: GraphicsFormat.R16G16B16A16_SFloat, enableRandomWrite: true, useMipMap: false, enableMSAA: false, xrInstancing: kernelIndex == kKernelTex2DArray, useDynamicScale: true, name: "Temp Gaussian Pyramid Target" ); } #if UNITY_SWITCH bool preferFragment = true; #else bool preferFragment = false; #endif int srcMipLevel = 0; int srcMipWidth = size.x; int srcMipHeight = size.y; int slices = destination.volumeDepth; if (preferFragment) { Debug.Assert(!TextureXR.useTexArray, "Fragment version of mip generator is not compatible with texture array!"); int tempTargetWidth = srcMipWidth >> 1; int tempTargetHeight = srcMipHeight >> 1; // Copies src mip0 to dst mip0 m_PropertyBlock.SetTexture(HDShaderIDs._BlitTexture, source); m_PropertyBlock.SetVector(HDShaderIDs._BlitScaleBias, new Vector4(1f, 1f, 0f, 0f)); m_PropertyBlock.SetFloat(HDShaderIDs._BlitMipLevel, 0f); cmd.SetRenderTarget(destination, 0); cmd.DrawProcedural(Matrix4x4.identity, HDUtils.GetBlitMaterial(source.dimension), 0, MeshTopology.Triangles, 3, 1, m_PropertyBlock); // Note: smaller mips are excluded as we don't need them and the gaussian compute works // on 8x8 blocks // TODO: Could be further optimized by merging the smaller mips to reduce the amount of dispatches // Specifically, levels 2x2 and 1x1 (or their variations, depending on the aspect ratio) should not be used. while (srcMipWidth >= 8 || srcMipHeight >= 8) { int dstMipWidth = Mathf.Max(1, srcMipWidth >> 1); int dstMipHeight = Mathf.Max(1, srcMipHeight >> 1); // Downsample. // Note: this code is not valid on D3D11 because destination is used both as an input and target m_PropertyBlock.SetTexture(HDShaderIDs._BlitTexture, destination); m_PropertyBlock.SetVector(HDShaderIDs._BlitScaleBias, new Vector4(1f, 1f, 0f, 0f)); m_PropertyBlock.SetFloat(HDShaderIDs._BlitMipLevel, srcMipLevel); cmd.SetRenderTarget(destination, srcMipLevel + 1); cmd.DrawProcedural(Matrix4x4.identity, HDUtils.GetBlitMaterial(source.dimension), 1, MeshTopology.Triangles, 3, 1, m_PropertyBlock); // Blur horizontal. m_PropertyBlock.SetTexture(HDShaderIDs._Source, destination); m_PropertyBlock.SetVector(HDShaderIDs._SrcScaleBias, new Vector4(1f, 1f, 0f, 0f)); m_PropertyBlock.SetVector(HDShaderIDs._SrcUvLimits, new Vector4(1f, 1f, 1f / dstMipWidth, 0f)); m_PropertyBlock.SetFloat(HDShaderIDs._SourceMip, srcMipLevel + 1); cmd.SetRenderTarget(m_TempColorTargets[kernelIndex], 0); cmd.SetViewport(new Rect(0, 0, dstMipWidth, dstMipHeight)); cmd.DrawProcedural(Matrix4x4.identity, m_ColorPyramidPSMat, 0, MeshTopology.Triangles, 3, 1, m_PropertyBlock); // Blur vertical. m_PropertyBlock.SetTexture(HDShaderIDs._Source, m_TempColorTargets[kernelIndex]); m_PropertyBlock.SetVector(HDShaderIDs._SrcScaleBias, new Vector4((float)dstMipWidth / tempTargetWidth, (float)dstMipHeight / tempTargetHeight, 0f, 0f)); m_PropertyBlock.SetVector(HDShaderIDs._SrcUvLimits, new Vector4((dstMipWidth - 0.5f) / tempTargetWidth, (dstMipHeight - 0.5f) / tempTargetHeight, 0f, 1f / tempTargetHeight)); m_PropertyBlock.SetFloat(HDShaderIDs._SourceMip, 0); cmd.SetRenderTarget(destination, srcMipLevel + 1); cmd.DrawProcedural(Matrix4x4.identity, m_ColorPyramidPSMat, 0, MeshTopology.Triangles, 3, 1, m_PropertyBlock); srcMipLevel++; srcMipWidth = srcMipWidth >> 1; srcMipHeight = srcMipHeight >> 1; } } else { var cs = m_ColorPyramidCS; int downsampleKernel = m_ColorDownsampleKernel[kernelIndex]; int downsampleKernelMip0 = m_ColorDownsampleKernelCopyMip0[kernelIndex]; int gaussianKernel = m_ColorGaussianKernel[kernelIndex]; while (srcMipWidth >= 8 || srcMipHeight >= 8) { int dstMipWidth = Mathf.Max(1, srcMipWidth >> 1); int dstMipHeight = Mathf.Max(1, srcMipHeight >> 1); cmd.SetComputeVectorParam(cs, HDShaderIDs._Size, new Vector4(srcMipWidth, srcMipHeight, 0f, 0f)); // First dispatch also copies src to dst mip0 if (srcMipLevel == 0) { cmd.SetComputeTextureParam(cs, downsampleKernelMip0, HDShaderIDs._Source, source, 0); cmd.SetComputeTextureParam(cs, downsampleKernelMip0, HDShaderIDs._Mip0, destination, 0); cmd.SetComputeTextureParam(cs, downsampleKernelMip0, HDShaderIDs._Destination, m_TempColorTargets[kernelIndex]); cmd.DispatchCompute(cs, downsampleKernelMip0, (dstMipWidth + 7) / 8, (dstMipHeight + 7) / 8, slices); } else { cmd.SetComputeTextureParam(cs, downsampleKernel, HDShaderIDs._Source, destination, srcMipLevel); cmd.SetComputeTextureParam(cs, downsampleKernel, HDShaderIDs._Destination, m_TempColorTargets[kernelIndex]); cmd.DispatchCompute(cs, downsampleKernel, (dstMipWidth + 7) / 8, (dstMipHeight + 7) / 8, slices); } cmd.SetComputeVectorParam(cs, HDShaderIDs._Size, new Vector4(dstMipWidth, dstMipHeight, 0f, 0f)); cmd.SetComputeTextureParam(cs, gaussianKernel, HDShaderIDs._Source, m_TempColorTargets[kernelIndex]); cmd.SetComputeTextureParam(cs, gaussianKernel, HDShaderIDs._Destination, destination, srcMipLevel + 1); cmd.DispatchCompute(cs, gaussianKernel, (dstMipWidth + 7) / 8, (dstMipHeight + 7) / 8, slices); srcMipLevel++; srcMipWidth = srcMipWidth >> 1; srcMipHeight = srcMipHeight >> 1; } } return(srcMipLevel + 1); }