Exemple #1
0
        void RenderPathTracing(HDCamera hdCamera, CommandBuffer cmd, RTHandle outputTexture)
        {
            RayTracingShader pathTracingShader = m_Asset.renderPipelineRayTracingResources.pathTracing;

            m_PathTracingSettings = hdCamera.volumeStack.GetComponent <PathTracing>();

            // Check the validity of the state before moving on with the computation
            if (!pathTracingShader || !m_PathTracingSettings.enable.value)
            {
                return;
            }

            if (hdCamera.viewCount > 1)
            {
                Debug.LogError("Path Tracing is not supported when using XR single-pass rendering.");
                return;
            }

            CheckDirtiness(hdCamera);

            // Inject the ray-tracing sampling data
            BlueNoise blueNoiseManager = GetBlueNoiseManager();

            blueNoiseManager.BindDitheredRNGData256SPP(cmd);

            // Grab the acceleration structure and the list of HD lights for the target camera
            RayTracingAccelerationStructure accelerationStructure = RequestAccelerationStructure();
            HDRaytracingLightCluster        lightCluster          = RequestLightCluster();
            LightCluster lightClusterSettings = hdCamera.volumeStack.GetComponent <LightCluster>();

            if (!m_SubFrameManager.isRecording)
            {
                // If we are recording, the max iteration is set/overridden by the subframe manager, otherwise we read it from the path tracing volume
                m_SubFrameManager.subFrameCount = (uint)m_PathTracingSettings.maximumSamples.value;
            }

#if UNITY_HDRP_DXR_TESTS_DEFINE
            if (Application.isPlaying)
            {
                m_SubFrameManager.subFrameCount = 1;
            }
#endif

            CameraData camData = m_SubFrameManager.GetCameraData(hdCamera.camera.GetInstanceID());
            if (camData.currentIteration < m_SubFrameManager.subFrameCount)
            {
                // Define the shader pass to use for the path tracing pass
                cmd.SetRayTracingShaderPass(pathTracingShader, "PathTracingDXR");

                // Set the acceleration structure for the pass
                cmd.SetRayTracingAccelerationStructure(pathTracingShader, HDShaderIDs._RaytracingAccelerationStructureName, accelerationStructure);

                // Inject the ray-tracing sampling data
                cmd.SetGlobalTexture(HDShaderIDs._OwenScrambledTexture, m_Asset.renderPipelineResources.textures.owenScrambled256Tex);
                cmd.SetGlobalTexture(HDShaderIDs._ScramblingTexture, m_Asset.renderPipelineResources.textures.scramblingTex);

                // Update the global constant buffer
                m_ShaderVariablesRayTracingCB._RaytracingNumSamples     = (int)m_SubFrameManager.subFrameCount;
                m_ShaderVariablesRayTracingCB._RaytracingMinRecursion   = m_PathTracingSettings.minimumDepth.value;
                m_ShaderVariablesRayTracingCB._RaytracingMaxRecursion   = m_PathTracingSettings.maximumDepth.value;
                m_ShaderVariablesRayTracingCB._RaytracingIntensityClamp = m_PathTracingSettings.maximumIntensity.value;
                m_ShaderVariablesRayTracingCB._RaytracingSampleIndex    = (int)camData.currentIteration;
                ConstantBuffer.PushGlobal(cmd, m_ShaderVariablesRayTracingCB, HDShaderIDs._ShaderVariablesRaytracing);

                // LightLoop data
                cmd.SetGlobalBuffer(HDShaderIDs._RaytracingLightCluster, lightCluster.GetCluster());
                cmd.SetGlobalBuffer(HDShaderIDs._LightDatasRT, lightCluster.GetLightDatas());

                // Set the data for the ray miss
                cmd.SetRayTracingIntParam(pathTracingShader, HDShaderIDs._RaytracingCameraSkyEnabled, camData.skyEnabled ? 1 : 0);
                cmd.SetRayTracingVectorParam(pathTracingShader, HDShaderIDs._RaytracingCameraClearColor, hdCamera.backgroundColorHDR);
                cmd.SetRayTracingTextureParam(pathTracingShader, HDShaderIDs._SkyTexture, m_SkyManager.GetSkyReflection(hdCamera));

                // Additional data for path tracing
                cmd.SetRayTracingTextureParam(pathTracingShader, HDShaderIDs._RadianceTexture, m_RadianceTexture);
                cmd.SetRayTracingMatrixParam(pathTracingShader, HDShaderIDs._PixelCoordToViewDirWS, hdCamera.mainViewConstants.pixelCoordToViewDirWS);
                cmd.SetRayTracingVectorParam(pathTracingShader, HDShaderIDs._PathTracedDoFConstants, ComputeDoFConstants(hdCamera, m_PathTracingSettings));

                // Run the computation
                cmd.DispatchRays(pathTracingShader, "RayGen", (uint)hdCamera.actualWidth, (uint)hdCamera.actualHeight, 1);
            }
            RenderAccumulation(hdCamera, cmd, m_RadianceTexture, outputTexture, true);
        }
Exemple #2
0
        private void CheckDirtiness(HDCamera hdCamera)
        {
            if (m_SubFrameManager.isRecording)
            {
                return;
            }

            // Grab the cached data for the current camera
            int        camID   = hdCamera.camera.GetInstanceID();
            CameraData camData = m_SubFrameManager.GetCameraData(camID);

            // Check camera resolution dirtiness
            if (hdCamera.actualWidth != camData.width || hdCamera.actualHeight != camData.height)
            {
                camData.width  = (uint)hdCamera.actualWidth;
                camData.height = (uint)hdCamera.actualHeight;
                camData.ResetIteration();
                m_SubFrameManager.SetCameraData(camID, camData);
                return;
            }

            // Check camera sky dirtiness
            bool enabled = (hdCamera.clearColorMode == HDAdditionalCameraData.ClearColorMode.Sky);

            if (enabled != camData.skyEnabled)
            {
                camData.skyEnabled = enabled;
                camData.ResetIteration();
                m_SubFrameManager.SetCameraData(camID, camData);
                return;
            }

            // Check camera fog dirtiness
            enabled = Fog.IsFogEnabled(hdCamera);
            if (enabled != camData.fogEnabled)
            {
                camData.fogEnabled = enabled;
                camData.ResetIteration();
                m_SubFrameManager.SetCameraData(camID, camData);
                return;
            }

            // Check camera matrix dirtiness
            if (hdCamera.mainViewConstants.nonJitteredViewProjMatrix != (hdCamera.mainViewConstants.prevViewProjMatrix))
            {
                camData.ResetIteration();
                m_SubFrameManager.SetCameraData(camID, camData);
                return;
            }

            // Check materials dirtiness
            if (m_MaterialsDirty)
            {
                m_MaterialsDirty = false;
                ResetPathTracing();
                return;
            }

            // Check light or geometry transforms dirtiness
            if (m_TransformDirty)
            {
                m_TransformDirty = false;
                ResetPathTracing();
            }

            // Check lights dirtiness
            if (m_CacheLightCount != m_RayTracingLights.lightCount)
            {
                m_CacheLightCount = (uint)m_RayTracingLights.lightCount;
                ResetPathTracing();
                return;
            }

            // Check geometry dirtiness
            ulong accelSize = m_CurrentRAS.GetSize();

            if (accelSize != m_CacheAccelSize)
            {
                m_CacheAccelSize = accelSize;
                ResetPathTracing();
            }
        }
Exemple #3
0
 internal void SetCameraData(int camID, CameraData camData)
 {
     m_CameraCache[camID] = camData;
 }
Exemple #4
0
 internal void ResetPathTracing(int camID, CameraData camData)
 {
     m_RenderSky = true;
     camData.ResetIteration();
     m_SubFrameManager.SetCameraData(camID, camData);
 }
Exemple #5
0
        private CameraData CheckDirtiness(HDCamera hdCamera, int camID, CameraData camData)
        {
            // Check camera resolution dirtiness
            if (hdCamera.actualWidth != camData.width || hdCamera.actualHeight != camData.height)
            {
                camData.width  = (uint)hdCamera.actualWidth;
                camData.height = (uint)hdCamera.actualHeight;
                return(ResetPathTracing(camID, camData));
            }

            // Check camera sky dirtiness
            bool enabled = (hdCamera.clearColorMode == HDAdditionalCameraData.ClearColorMode.Sky);

            if (enabled != camData.skyEnabled)
            {
                camData.skyEnabled = enabled;
                return(ResetPathTracing(camID, camData));
            }

            // Check camera fog dirtiness
            enabled = Fog.IsFogEnabled(hdCamera);
            if (enabled != camData.fogEnabled)
            {
                camData.fogEnabled = enabled;
                return(ResetPathTracing(camID, camData));
            }

            // Check camera matrix dirtiness
            if (hdCamera.mainViewConstants.nonJitteredViewProjMatrix != (hdCamera.mainViewConstants.prevViewProjMatrix))
            {
                return(ResetPathTracing(camID, camData));
            }

            // Check materials dirtiness
            if (m_MaterialsDirty)
            {
                m_MaterialsDirty = false;
                ResetPathTracing();
                return(camData);
            }

            // Check light or geometry transforms dirtiness
            if (m_TransformDirty)
            {
                m_TransformDirty = false;
                ResetPathTracing();
                return(camData);
            }

            // Check lights dirtiness
            if (m_CacheLightCount != m_RayTracingLights.lightCount)
            {
                m_CacheLightCount = (uint)m_RayTracingLights.lightCount;
                ResetPathTracing();
                return(camData);
            }

            // Check geometry dirtiness
            ulong accelSize = m_CurrentRAS.GetSize();

            if (accelSize != m_CacheAccelSize)
            {
                m_CacheAccelSize = accelSize;
                ResetPathTracing();
            }

            // If the camera has changed, re-render the sky texture
            if (camID != m_CameraID)
            {
                m_RenderSky = true;
                m_CameraID  = camID;
            }

            return(camData);
        }
        void RenderAccumulation(RenderGraph renderGraph, HDCamera hdCamera, TextureHandle inputTexture, TextureHandle outputTexture, bool needExposure)
        {
            using (var builder = renderGraph.AddRenderPass <RenderAccumulationPassData>("Render Accumulation", out var passData))
            {
                // Grab the history buffer
                TextureHandle history = renderGraph.ImportTexture(hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.PathTracing)
                                                                  ?? hdCamera.AllocHistoryFrameRT((int)HDCameraFrameHistoryType.PathTracing, PathTracingHistoryBufferAllocatorFunction, 1));

                bool inputFromRadianceTexture = !inputTexture.Equals(outputTexture);
                passData.accumulationCS                = m_Asset.renderPipelineResources.shaders.accumulationCS;
                passData.accumulationKernel            = passData.accumulationCS.FindKernel("KMain");
                passData.subFrameManager               = m_SubFrameManager;
                passData.needExposure                  = needExposure;
                passData.hdCamera                      = hdCamera;
                passData.accumulationCS.shaderKeywords = null;
                if (inputFromRadianceTexture)
                {
                    passData.accumulationCS.EnableKeyword("INPUT_FROM_FRAME_TEXTURE");
                }
                passData.input   = builder.ReadTexture(inputTexture);
                passData.output  = builder.WriteTexture(outputTexture);
                passData.history = builder.WriteTexture(history);

                builder.SetRenderFunc(
                    (RenderAccumulationPassData data, RenderGraphContext ctx) =>
                {
                    ComputeShader accumulationShader = data.accumulationCS;

                    // Check the validity of the state before moving on with the computation
                    if (!accumulationShader)
                    {
                        return;
                    }

                    // Get the per-camera data
                    int camID            = data.hdCamera.camera.GetInstanceID();
                    Vector4 frameWeights = data.subFrameManager.ComputeFrameWeights(camID);
                    CameraData camData   = data.subFrameManager.GetCameraData(camID);

                    RTHandle input  = data.input;
                    RTHandle output = data.output;

                    // Accumulate the path tracing results
                    ctx.cmd.SetComputeIntParam(accumulationShader, HDShaderIDs._AccumulationFrameIndex, (int)camData.currentIteration);
                    ctx.cmd.SetComputeIntParam(accumulationShader, HDShaderIDs._AccumulationNumSamples, (int)data.subFrameManager.subFrameCount);
                    ctx.cmd.SetComputeTextureParam(accumulationShader, data.accumulationKernel, HDShaderIDs._AccumulatedFrameTexture, data.history);
                    ctx.cmd.SetComputeTextureParam(accumulationShader, data.accumulationKernel, HDShaderIDs._CameraColorTextureRW, output);
                    if (!input.Equals(output))
                    {
                        ctx.cmd.SetComputeTextureParam(accumulationShader, data.accumulationKernel, HDShaderIDs._FrameTexture, input);
                    }
                    ctx.cmd.SetComputeVectorParam(accumulationShader, HDShaderIDs._AccumulationWeights, frameWeights);
                    ctx.cmd.SetComputeIntParam(accumulationShader, HDShaderIDs._AccumulationNeedsExposure, data.needExposure ? 1 : 0);
                    ctx.cmd.DispatchCompute(accumulationShader, data.accumulationKernel, (data.hdCamera.actualWidth + 7) / 8, (data.hdCamera.actualHeight + 7) / 8, data.hdCamera.viewCount);

                    // Increment the iteration counter, if we haven't converged yet
                    if (camData.currentIteration < data.subFrameManager.subFrameCount)
                    {
                        camData.currentIteration++;
                        data.subFrameManager.SetCameraData(camID, camData);
                    }
                });
            }
        }
Exemple #7
0
        void RenderDenoisePass(RenderGraph renderGraph, HDCamera hdCamera, TextureHandle outputTexture)
        {
#if ENABLE_UNITY_DENOISING_PLUGIN
            using (var builder = renderGraph.AddRenderPass <RenderDenoisePassData>("Denoise Pass", out var passData))
            {
                passData.blitAndExposeCS     = m_Asset.renderPipelineResources.shaders.blitAndExposeCS;
                passData.blitAndExposeKernel = passData.blitAndExposeCS.FindKernel("KMain");
                passData.subFrameManager     = m_SubFrameManager;
                // Note: for now we enable AOVs when temporal is enabled, because this seems to work better with Optix.
                passData.useAOV   = m_PathTracingSettings.useAOVs.value || m_PathTracingSettings.temporal.value;
                passData.temporal = m_PathTracingSettings.temporal.value && hdCamera.camera.cameraType == CameraType.Game;
                passData.async    = m_PathTracingSettings.asyncDenoising.value && hdCamera.camera.cameraType == CameraType.SceneView;

                // copy camera params
                passData.camID  = hdCamera.camera.GetInstanceID();
                passData.width  = hdCamera.actualWidth;
                passData.height = hdCamera.actualHeight;
                passData.slices = hdCamera.viewCount;

                // Grab the history buffer
                TextureHandle ptAccumulation = renderGraph.ImportTexture(hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.PathTracing));
                TextureHandle denoiseHistory = renderGraph.ImportTexture(hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.DenoiseHistory)
                                                                         ?? hdCamera.AllocHistoryFrameRT((int)HDCameraFrameHistoryType.DenoiseHistory, PathTracingHistoryBufferAllocatorFunction, 1));

                passData.color          = builder.ReadWriteTexture(ptAccumulation);
                passData.outputTexture  = builder.WriteTexture(outputTexture);
                passData.denoiseHistory = builder.ReadTexture(denoiseHistory);

                if (passData.useAOV)
                {
                    TextureHandle albedoHistory = renderGraph.ImportTexture(hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.AlbedoAOV));

                    TextureHandle normalHistory = renderGraph.ImportTexture(hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.NormalAOV));

                    passData.albedoAOV = builder.ReadTexture(albedoHistory);
                    passData.normalAOV = builder.ReadTexture(normalHistory);
                }

                if (passData.temporal)
                {
                    TextureHandle motionVectorHistory = renderGraph.ImportTexture(hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.MotionVectorAOV));
                    passData.motionVectorAOV = builder.ReadTexture(motionVectorHistory);
                }

                builder.SetRenderFunc(
                    (RenderDenoisePassData data, RenderGraphContext ctx) =>
                {
                    CameraData camData = data.subFrameManager.GetCameraData(data.camID);

                    camData.denoiser.Init((DenoiserType)m_PathTracingSettings.denoising.value, data.width, data.height);

                    if (camData.currentIteration >= (data.subFrameManager.subFrameCount) && camData.denoiser.type != DenoiserType.None)
                    {
                        if (!camData.validDenoiseHistory)
                        {
                            if (!camData.activeDenoiseRequest)
                            {
                                camData.denoiser.DenoiseRequest(ctx.cmd, "color", data.color);

                                if (data.useAOV)
                                {
                                    camData.denoiser.DenoiseRequest(ctx.cmd, "albedo", data.albedoAOV);
                                    camData.denoiser.DenoiseRequest(ctx.cmd, "normal", data.normalAOV);
                                }

                                if (data.temporal)
                                {
                                    camData.denoiser.DenoiseRequest(ctx.cmd, "flow", data.motionVectorAOV);
                                }
                                camData.activeDenoiseRequest  = true;
                                camData.discardDenoiseRequest = false;
                            }

                            if (!data.async)
                            {
                                camData.denoiser.WaitForCompletion(ctx.renderContext, ctx.cmd);

                                Denoiser.State ret           = camData.denoiser.GetResults(ctx.cmd, data.denoiseHistory);
                                camData.validDenoiseHistory  = (ret == Denoiser.State.Success);
                                camData.activeDenoiseRequest = false;
                            }
                            else
                            {
                                if (camData.activeDenoiseRequest && camData.denoiser.QueryCompletion() != Denoiser.State.Executing)
                                {
                                    Denoiser.State ret           = camData.denoiser.GetResults(ctx.cmd, data.denoiseHistory);
                                    camData.validDenoiseHistory  = (ret == Denoiser.State.Success) && (camData.discardDenoiseRequest == false);
                                    camData.activeDenoiseRequest = false;
                                }
                            }

                            m_SubFrameManager.SetCameraData(data.camID, camData);
                        }

                        if (camData.validDenoiseHistory)
                        {
                            // Blit the denoised image from the history buffer and apply exposure.
                            ctx.cmd.SetComputeTextureParam(data.blitAndExposeCS, data.blitAndExposeKernel, HDShaderIDs._InputTexture, data.denoiseHistory);
                            ctx.cmd.SetComputeTextureParam(data.blitAndExposeCS, data.blitAndExposeKernel, HDShaderIDs._OutputTexture, data.outputTexture);
                            ctx.cmd.DispatchCompute(data.blitAndExposeCS, data.blitAndExposeKernel, (data.width + 7) / 8, (data.height + 7) / 8, data.slices);
                        }
                    }
                });
            }
#endif
        }