void RenderReflectionsPerformance(HDCamera hdCamera, CommandBuffer cmd, RTHandle outputTexture, ScriptableRenderContext renderContext, int frameCount, bool transparent)
        {
            // Fetch the required resources
            BlueNoise        blueNoise           = GetBlueNoiseManager();
            RayTracingShader reflectionShaderRT  = m_Asset.renderPipelineRayTracingResources.reflectionRaytracingRT;
            ComputeShader    reflectionShaderCS  = m_Asset.renderPipelineRayTracingResources.reflectionRaytracingCS;
            ComputeShader    reflectionFilter    = m_Asset.renderPipelineRayTracingResources.reflectionBilateralFilterCS;
            RTHandle         intermediateBuffer0 = GetRayTracingBuffer(InternalRayTracingBuffers.RGBA0);
            RTHandle         intermediateBuffer1 = GetRayTracingBuffer(InternalRayTracingBuffers.RGBA1);

            CoreUtils.SetRenderTarget(cmd, intermediateBuffer1, m_SharedRTManager.GetDepthStencilBuffer(), ClearFlag.Color, clearColor: Color.black);

            // Fetch all the settings
            var                settings             = hdCamera.volumeStack.GetComponent <ScreenSpaceReflection>();
            LightCluster       lightClusterSettings = hdCamera.volumeStack.GetComponent <LightCluster>();
            RayTracingSettings rtSettings           = hdCamera.volumeStack.GetComponent <RayTracingSettings>();

            // Texture dimensions
            int texWidth  = hdCamera.actualWidth;
            int texHeight = hdCamera.actualHeight;

            // Evaluate the dispatch parameters
            int areaTileSize = 8;
            int numTilesXHR = 0, numTilesYHR = 0;
            int currentKernel = 0;
            RenderTargetIdentifier clearCoatMaskTexture;

            using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.RaytracingIntegrateReflection)))
            {
                // Fetch the new sample kernel
                if (settings.fullResolution.value)
                {
                    currentKernel = transparent ? m_RaytracingReflectionsTransparentFullResKernel : m_RaytracingReflectionsFullResKernel;
                }
                else
                {
                    currentKernel = transparent ? m_RaytracingReflectionsTransparentHalfResKernel : m_RaytracingReflectionsHalfResKernel;
                }

                // Inject the ray-tracing sampling data
                blueNoise.BindDitheredRNGData8SPP(cmd);

                // Bind all the required textures
                cmd.SetComputeTextureParam(reflectionShaderCS, currentKernel, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthStencilBuffer());
                cmd.SetComputeTextureParam(reflectionShaderCS, currentKernel, HDShaderIDs._NormalBufferTexture, m_SharedRTManager.GetNormalBuffer());
                clearCoatMaskTexture = hdCamera.frameSettings.litShaderMode == LitShaderMode.Deferred ? m_GbufferManager.GetBuffersRTI()[2] : TextureXR.GetBlackTexture();
                cmd.SetComputeTextureParam(reflectionShaderCS, currentKernel, HDShaderIDs._SsrClearCoatMaskTexture, clearCoatMaskTexture);
                cmd.SetComputeIntParam(reflectionShaderCS, HDShaderIDs._SsrStencilBit, (int)StencilUsage.TraceReflectionRay);
                cmd.SetComputeTextureParam(reflectionShaderCS, currentKernel, HDShaderIDs._StencilTexture, m_SharedRTManager.GetDepthStencilBuffer(), 0, RenderTextureSubElement.Stencil);

                // Bind all the required scalars
                m_ShaderVariablesRayTracingCB._RaytracingIntensityClamp = settings.clampValue.value;
                m_ShaderVariablesRayTracingCB._RaytracingIncludeSky     = settings.reflectSky.value ? 1 : 0;
                ConstantBuffer.PushGlobal(cmd, m_ShaderVariablesRayTracingCB, HDShaderIDs._ShaderVariablesRaytracing);

                // Bind the output buffers
                cmd.SetComputeTextureParam(reflectionShaderCS, currentKernel, HDShaderIDs._RaytracingDirectionBuffer, intermediateBuffer1);

                if (settings.fullResolution.value)
                {
                    // Evaluate the dispatch parameters
                    numTilesXHR = (texWidth + (areaTileSize - 1)) / areaTileSize;
                    numTilesYHR = (texHeight + (areaTileSize - 1)) / areaTileSize;
                }
                else
                {
                    // Evaluate the dispatch parameters
                    numTilesXHR = (texWidth / 2 + (areaTileSize - 1)) / areaTileSize;
                    numTilesYHR = (texHeight / 2 + (areaTileSize - 1)) / areaTileSize;
                }

                // Compute the directions
                cmd.DispatchCompute(reflectionShaderCS, currentKernel, numTilesXHR, numTilesYHR, hdCamera.viewCount);

                // Prepare the components for the deferred lighting
                DeferredLightingRTParameters deferredParamters = PrepareReflectionDeferredLightingRTParameters(hdCamera);
                DeferredLightingRTResources  deferredResources = PrepareDeferredLightingRTResources(hdCamera, intermediateBuffer1, intermediateBuffer0);

                // Evaluate the deferred lighting
                RenderRaytracingDeferredLighting(cmd, deferredParamters, deferredResources);

                // Fetch the right filter to use
                if (settings.fullResolution.value)
                {
                    currentKernel = reflectionFilter.FindKernel("ReflectionIntegrationUpscaleFullRes");
                }
                else
                {
                    currentKernel = reflectionFilter.FindKernel("ReflectionIntegrationUpscaleHalfRes");
                }

                // Inject all the parameters for the compute
                cmd.SetComputeTextureParam(reflectionFilter, currentKernel, HDShaderIDs._SsrLightingTextureRW, intermediateBuffer0);
                cmd.SetComputeTextureParam(reflectionFilter, currentKernel, HDShaderIDs._SsrHitPointTexture, intermediateBuffer1);
                cmd.SetComputeTextureParam(reflectionFilter, currentKernel, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthStencilBuffer());
                cmd.SetComputeTextureParam(reflectionFilter, currentKernel, HDShaderIDs._NormalBufferTexture, m_SharedRTManager.GetNormalBuffer());
                cmd.SetComputeTextureParam(reflectionFilter, currentKernel, HDShaderIDs._BlueNoiseTexture, blueNoise.textureArray16RGB);
                cmd.SetComputeTextureParam(reflectionFilter, currentKernel, "_RaytracingReflectionTexture", outputTexture);
                cmd.SetComputeTextureParam(reflectionFilter, currentKernel, HDShaderIDs._ScramblingTexture, m_Asset.renderPipelineResources.textures.scramblingTex);
                cmd.SetComputeIntParam(reflectionFilter, HDShaderIDs._SpatialFilterRadius, settings.upscaleRadius.value);
                cmd.SetComputeIntParam(reflectionFilter, HDShaderIDs._RaytracingDenoiseRadius, settings.denoise.value ? settings.denoiserRadius.value : 0);

                numTilesXHR = (texWidth + (areaTileSize - 1)) / areaTileSize;
                numTilesYHR = (texHeight + (areaTileSize - 1)) / areaTileSize;

                // Bind the right texture for clear coat support
                clearCoatMaskTexture = hdCamera.frameSettings.litShaderMode == LitShaderMode.Deferred ? m_GbufferManager.GetBuffersRTI()[2] : TextureXR.GetBlackTexture();
                cmd.SetComputeTextureParam(reflectionFilter, currentKernel, HDShaderIDs._SsrClearCoatMaskTexture, clearCoatMaskTexture);

                // Compute the texture
                cmd.DispatchCompute(reflectionFilter, currentKernel, numTilesXHR, numTilesYHR, hdCamera.viewCount);
            }

            using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.RaytracingFilterReflection)))
            {
                if (settings.denoise.value && !transparent)
                {
                    // Grab the history buffer
                    RTHandle reflectionHistory = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.RaytracedReflection)
                                                 ?? hdCamera.AllocHistoryFrameRT((int)HDCameraFrameHistoryType.RaytracedReflection, ReflectionHistoryBufferAllocatorFunction, 1);

                    float historyValidity = 1.0f;
#if UNITY_HDRP_DXR_TESTS_DEFINE
                    if (Application.isPlaying)
                    {
                        historyValidity = 0.0f;
                    }
                    else
#endif
                    // We need to check if something invalidated the history buffers
                    historyValidity *= ValidRayTracingHistory(hdCamera) ? 1.0f : 0.0f;

                    HDReflectionDenoiser reflectionDenoiser = GetReflectionDenoiser();
                    reflectionDenoiser.DenoiseBuffer(cmd, hdCamera, settings.denoiserRadius.value, outputTexture, reflectionHistory, intermediateBuffer0, historyValidity: historyValidity);
                    HDUtils.BlitCameraTexture(cmd, intermediateBuffer0, outputTexture);
                }
            }
        }
Exemple #2
0
 private void CameraEndRendering(ScriptableRenderContext context, Camera camera)
 {
     CameraPostRender(camera);
 }
Exemple #3
0
 private void CameraEndFrameRendering(ScriptableRenderContext ctx, Camera[] cameras)
 {
 }
        public void RenderReflections(HDCamera hdCamera, CommandBuffer cmd, RTHandleSystem.RTHandle outputTexture, ScriptableRenderContext renderContext, uint frameCount)
        {
            // First thing to check is: Do we have a valid ray-tracing environment?
            HDRaytracingEnvironment rtEnvironement = m_RaytracingManager.CurrentEnvironment();
            BlueNoise        blueNoise             = m_RaytracingManager.GetBlueNoiseManager();
            ComputeShader    bilateralFilter       = m_PipelineAsset.renderPipelineResources.shaders.reflectionBilateralFilterCS;
            RaytracingShader reflectionShader      = m_PipelineAsset.renderPipelineResources.shaders.reflectionRaytracing;
            bool             missingResources      = rtEnvironement == null || blueNoise == null || bilateralFilter == null || reflectionShader == null ||
                                                     m_PipelineResources.textures.owenScrambledTex == null || m_PipelineResources.textures.scramblingTex == null;

            // Try to grab the acceleration structure and the list of HD lights for the target camera
            RaytracingAccelerationStructure accelerationStructure = m_RaytracingManager.RequestAccelerationStructure(hdCamera);
            HDRaytracingLightCluster        lightCluster          = m_RaytracingManager.RequestLightCluster(hdCamera);

            // If no acceleration structure available, end it now
            if (accelerationStructure == null || lightCluster == null || missingResources)
            {
                return;
            }

            // Compute the actual resolution that is needed base on the quality
            string targetRayGen = "";

            switch (rtEnvironement.reflQualityMode)
            {
            case HDRaytracingEnvironment.ReflectionsQuality.QuarterRes:
            {
                targetRayGen = m_RayGenHalfResName;
            };
                break;

            case HDRaytracingEnvironment.ReflectionsQuality.Integration:
            {
                targetRayGen = m_RayGenIntegrationName;
            };
                break;
            }

            // Define the shader pass to use for the reflection pass
            cmd.SetRaytracingShaderPass(reflectionShader, "ReflectionDXR");

            // Set the acceleration structure for the pass
            cmd.SetRaytracingAccelerationStructure(reflectionShader, HDShaderIDs._RaytracingAccelerationStructureName, accelerationStructure);

            // Inject the ray-tracing sampling data
            cmd.SetRaytracingTextureParam(reflectionShader, targetRayGen, HDShaderIDs._OwenScrambledTexture, m_PipelineResources.textures.owenScrambledTex);
            cmd.SetRaytracingTextureParam(reflectionShader, targetRayGen, HDShaderIDs._ScramblingTexture, m_PipelineResources.textures.scramblingTex);

            // Global reflection parameters
            cmd.SetRaytracingFloatParams(reflectionShader, HDShaderIDs._RaytracingIntensityClamp, rtEnvironement.reflClampValue);
            cmd.SetRaytracingFloatParams(reflectionShader, HDShaderIDs._RaytracingReflectionMinSmoothness, rtEnvironement.reflMinSmoothness);
            cmd.SetRaytracingFloatParams(reflectionShader, HDShaderIDs._RaytracingReflectionMaxDistance, rtEnvironement.reflBlendDistance);

            // Inject the ray generation data
            cmd.SetGlobalFloat(HDShaderIDs._RaytracingRayBias, rtEnvironement.rayBias);
            cmd.SetGlobalFloat(HDShaderIDs._RaytracingRayMaxLength, rtEnvironement.reflRayLength);
            cmd.SetRaytracingIntParams(reflectionShader, HDShaderIDs._RaytracingNumSamples, rtEnvironement.reflNumMaxSamples);
            int frameIndex = hdCamera.IsTAAEnabled() ? hdCamera.taaFrameIndex : (int)frameCount % 8;

            cmd.SetGlobalInt(HDShaderIDs._RaytracingFrameIndex, frameIndex);

            // Set the data for the ray generation
            cmd.SetRaytracingTextureParam(reflectionShader, targetRayGen, HDShaderIDs._SsrLightingTextureRW, m_LightingTexture);
            cmd.SetRaytracingTextureParam(reflectionShader, targetRayGen, HDShaderIDs._SsrHitPointTexture, m_HitPdfTexture);
            cmd.SetRaytracingTextureParam(reflectionShader, targetRayGen, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthStencilBuffer());
            cmd.SetRaytracingTextureParam(reflectionShader, targetRayGen, HDShaderIDs._NormalBufferTexture, m_SharedRTManager.GetNormalBuffer());

            // Set ray count tex
            cmd.SetRaytracingIntParam(reflectionShader, HDShaderIDs._RayCountEnabled, m_RaytracingManager.rayCountManager.rayCountEnabled);
            cmd.SetRaytracingTextureParam(reflectionShader, targetRayGen, HDShaderIDs._RayCountTexture, m_RaytracingManager.rayCountManager.rayCountTex);

            // Compute the pixel spread value
            float pixelSpreadAngle = Mathf.Atan(2.0f * Mathf.Tan(hdCamera.camera.fieldOfView * Mathf.PI / 360.0f) / Mathf.Min(hdCamera.actualWidth, hdCamera.actualHeight));

            cmd.SetRaytracingFloatParam(reflectionShader, HDShaderIDs._RaytracingPixelSpreadAngle, pixelSpreadAngle);

            // LightLoop data
            cmd.SetGlobalBuffer(HDShaderIDs._RaytracingLightCluster, lightCluster.GetCluster());
            cmd.SetGlobalBuffer(HDShaderIDs._LightDatasRT, lightCluster.GetLightDatas());
            cmd.SetGlobalVector(HDShaderIDs._MinClusterPos, lightCluster.GetMinClusterPos());
            cmd.SetGlobalVector(HDShaderIDs._MaxClusterPos, lightCluster.GetMaxClusterPos());
            cmd.SetGlobalInt(HDShaderIDs._LightPerCellCount, rtEnvironement.maxNumLightsPercell);
            cmd.SetGlobalInt(HDShaderIDs._PunctualLightCountRT, lightCluster.GetPunctualLightCount());
            cmd.SetGlobalInt(HDShaderIDs._AreaLightCountRT, lightCluster.GetAreaLightCount());

            // Evaluate the clear coat mask texture based on the lit shader mode
            RenderTargetIdentifier clearCoatMaskTexture = hdCamera.frameSettings.litShaderMode == LitShaderMode.Deferred ? m_GbufferManager.GetBuffersRTI()[2] : Texture2D.blackTexture;

            cmd.SetRaytracingTextureParam(reflectionShader, targetRayGen, HDShaderIDs._SsrClearCoatMaskTexture, clearCoatMaskTexture);

            // Set the data for the ray miss
            cmd.SetRaytracingTextureParam(reflectionShader, m_MissShaderName, HDShaderIDs._SkyTexture, m_SkyManager.skyReflection);

            // Compute the actual resolution that is needed base on the quality
            uint widthResolution = 1, heightResolution = 1;

            switch (rtEnvironement.reflQualityMode)
            {
            case HDRaytracingEnvironment.ReflectionsQuality.QuarterRes:
            {
                widthResolution  = (uint)hdCamera.actualWidth / 2;
                heightResolution = (uint)hdCamera.actualHeight / 2;
            };
                break;

            case HDRaytracingEnvironment.ReflectionsQuality.Integration:
            {
                widthResolution  = (uint)hdCamera.actualWidth;
                heightResolution = (uint)hdCamera.actualHeight;
            };
                break;
            }

            // Run the calculus
            cmd.DispatchRays(reflectionShader, targetRayGen, widthResolution, heightResolution, 1);

            using (new ProfilingSample(cmd, "Filter Reflection", CustomSamplerId.RaytracingFilterReflection.GetSampler()))
            {
                switch (rtEnvironement.reflQualityMode)
                {
                case HDRaytracingEnvironment.ReflectionsQuality.QuarterRes:
                {
                    // Fetch the right filter to use
                    int currentKernel = bilateralFilter.FindKernel("RaytracingReflectionFilter");

                    // Inject all the parameters for the compute
                    cmd.SetComputeTextureParam(bilateralFilter, currentKernel, HDShaderIDs._SsrLightingTextureRW, m_LightingTexture);
                    cmd.SetComputeTextureParam(bilateralFilter, currentKernel, HDShaderIDs._SsrHitPointTexture, m_HitPdfTexture);
                    cmd.SetComputeTextureParam(bilateralFilter, currentKernel, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthStencilBuffer());
                    cmd.SetComputeTextureParam(bilateralFilter, currentKernel, HDShaderIDs._NormalBufferTexture, m_SharedRTManager.GetNormalBuffer());
                    cmd.SetComputeTextureParam(bilateralFilter, currentKernel, "_NoiseTexture", blueNoise.textureArray16RGB);
                    cmd.SetComputeTextureParam(bilateralFilter, currentKernel, "_VarianceTexture", m_VarianceBuffer);
                    cmd.SetComputeTextureParam(bilateralFilter, currentKernel, "_MinColorRangeTexture", m_MinBoundBuffer);
                    cmd.SetComputeTextureParam(bilateralFilter, currentKernel, "_MaxColorRangeTexture", m_MaxBoundBuffer);
                    cmd.SetComputeTextureParam(bilateralFilter, currentKernel, "_RaytracingReflectionTexture", outputTexture);
                    cmd.SetComputeTextureParam(bilateralFilter, currentKernel, HDShaderIDs._ScramblingTexture, m_PipelineResources.textures.scramblingTex);
                    cmd.SetComputeIntParam(bilateralFilter, HDShaderIDs._SpatialFilterRadius, rtEnvironement.reflSpatialFilterRadius);

                    // Texture dimensions
                    int texWidth  = outputTexture.rt.width;
                    int texHeight = outputTexture.rt.width;

                    // Evaluate the dispatch parameters
                    int areaTileSize = 8;
                    int numTilesXHR  = (texWidth / 2 + (areaTileSize - 1)) / areaTileSize;
                    int numTilesYHR  = (texHeight / 2 + (areaTileSize - 1)) / areaTileSize;

                    // Bind the right texture for clear coat support
                    cmd.SetComputeTextureParam(bilateralFilter, currentKernel, HDShaderIDs._SsrClearCoatMaskTexture, clearCoatMaskTexture);

                    // Compute the texture
                    cmd.DispatchCompute(bilateralFilter, currentKernel, numTilesXHR, numTilesYHR, 1);

                    int numTilesXFR = (texWidth + (areaTileSize - 1)) / areaTileSize;
                    int numTilesYFR = (texHeight + (areaTileSize - 1)) / areaTileSize;

                    RTHandleSystem.RTHandle history = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.RaytracedReflection)
                                                      ?? hdCamera.AllocHistoryFrameRT((int)HDCameraFrameHistoryType.RaytracedReflection, ReflectionHistoryBufferAllocatorFunction, 1);

                    // Fetch the right filter to use
                    currentKernel = bilateralFilter.FindKernel("TemporalAccumulationFilter");
                    cmd.SetComputeFloatParam(bilateralFilter, HDShaderIDs._TemporalAccumuationWeight, rtEnvironement.reflTemporalAccumulationWeight);
                    cmd.SetComputeTextureParam(bilateralFilter, currentKernel, HDShaderIDs._AccumulatedFrameTexture, history);
                    cmd.SetComputeTextureParam(bilateralFilter, currentKernel, HDShaderIDs._CurrentFrameTexture, outputTexture);
                    cmd.SetComputeTextureParam(bilateralFilter, currentKernel, "_MinColorRangeTexture", m_MinBoundBuffer);
                    cmd.SetComputeTextureParam(bilateralFilter, currentKernel, "_MaxColorRangeTexture", m_MaxBoundBuffer);
                    cmd.DispatchCompute(bilateralFilter, currentKernel, numTilesXFR, numTilesYFR, 1);
                }
                break;

                case HDRaytracingEnvironment.ReflectionsQuality.Integration:
                {
                    HDUtils.BlitCameraTexture(cmd, hdCamera, m_LightingTexture, outputTexture);
                }
                break;
                }
            }
        }
Exemple #5
0
    public void Render(ScriptableRenderContext IN_context, 
        Camera IN_camera, CameraBufferSettings cameraBufferSettings, bool useDynameicBatching, 
        bool useGPUInstancing, bool useLightPerObject,
        ShadowSettings shadowSetting, PostFXSettings postFXSettings, int colorLUTResolution)
    {
        this.context = IN_context;
        this.camera = IN_camera;

        //setup custom camera settings
        //for per camera blend, PostFX settings
        var crpCamera = camera.GetComponent<CustomRenderPipelineCamera>();
        CameraSettings cameraSettings = crpCamera ? crpCamera.Settings : defaultCameraSettings;

        //use depthtexture so shader can access the current buffer depth    
        if (camera.cameraType == CameraType.Reflection)
        {
            useDepthTexture = cameraBufferSettings.copyDepthReflection;
            useColorTexture = cameraBufferSettings.copyColorReflection;
        }
        else 
        {
            useDepthTexture = cameraBufferSettings.copyDepth && cameraSettings.copyDepth;
            useColorTexture = cameraBufferSettings.copyColor && cameraSettings.copyColor;
        }

        if (cameraSettings.overridePostFX)
        {
            //override PostFX option for each cam
            postFXSettings = cameraSettings.postFXSettings;
        }

        //set render scale, scale should atleast move a bit to take effect
        float renderScale = cameraSettings.GetRenderScale(cameraBufferSettings.renderScale);
        useScaledRendering = renderScale < 0.99f || renderScale > 1.01f;

        //change buffer name to the camera name
        PrepareBuffer();

        //add UI (WorldGeometry) to the scene camera, so we can see UI in editor view
        PrepareForSceneView();
        if (!Cull(shadowSetting.maxDistance))
        {
            return;
        }

        this.useHDR = cameraBufferSettings.allowHDR && camera.allowHDR;

        //calculate and store buffersize
        if (useScaledRendering)
        {
            renderScale = Mathf.Clamp(renderScale, renderScaleMin, renderScaleMax);
            bufferSize.x = (int)(camera.pixelWidth * renderScale);
            bufferSize.y = (int)(camera.pixelHeight * renderScale);
        }
        else 
        {
            bufferSize.x = camera.pixelWidth;
            bufferSize.y = camera.pixelHeight;
        }

        buffer.BeginSample(SampleName);//Include lights and shadow rendering in main cam profile

        //pass the buffer size to GPU so the when sample color and depthe texture, 
        //we can refer to correct buffer size
        buffer.SetGlobalVector(bufferSizeId, new Vector4(
            (float)1/bufferSize.x, (float)1 /bufferSize.y, bufferSize.x, bufferSize.y
        ));


        ExecuteBuffer();
        //get transfer DirLight data to GPU
        //Setup shadow RT and shadow rendering
        lighting.Setup(context, cullingResults, shadowSetting, useLightPerObject,
            cameraSettings.maskLights ? cameraSettings.RenderingLayerMask : -1);

        //FXAA is enable per camera
        cameraBufferSettings.fxaa.enabled = cameraSettings.allowFXAA;
        //setup postFX
        postFXStack.Setup(context, camera, 
            bufferSize, postFXSettings, cameraSettings.keepAlpha, useHDR, 
            colorLUTResolution, cameraSettings.finalBlendMode,
            cameraBufferSettings.bicubicResampling,
            cameraBufferSettings.fxaa);

        buffer.EndSample(SampleName);

        //Setup rendertarget for normal oject rendering
        Setup();
        DrawVisibleGeometry(useDynameicBatching, useGPUInstancing, useLightPerObject, cameraSettings.RenderingLayerMask);

        //this makes the Legacy shader draw upon the tranparent object
        //makes it wired, but they are not supported who cares~
        DrawUnsupportedShaders();

        DrawGizmosBeforeFX();

        if (postFXStack.IsActive)
        {
            postFXStack.Render(colorAttachmentId);
        }
        else if (useIntermediateBuffer)
        {
            // we need to copy the image from intermediate to final 
            //otherwise nothing goes to camera target, Since PFX is not active 
            //Draw(colorAttachmentId, BuiltinRenderTextureType.CameraTarget);
            DrawFinal(cameraSettings.finalBlendMode);
            ExecuteBuffer();
        }

        DrawGizmosAfterFX();

        Cleanup();

        //all action will be buffered and render action only begin after submit!
        Submit();
    }
        public override void Setup(ScriptableRenderContext context, ref RenderingData renderingData)
        {
            Camera camera = renderingData.cameraData.camera;
            RenderTextureDescriptor cameraTargetDescriptor = renderingData.cameraData.cameraTargetDescriptor;

            // Special path for depth only offscreen cameras. Only write opaques + transparents.
            bool isOffscreenDepthTexture = camera.targetTexture != null && camera.targetTexture.format == RenderTextureFormat.Depth;

            if (isOffscreenDepthTexture)
            {
                ConfigureCameraTarget(BuiltinRenderTextureType.CameraTarget, BuiltinRenderTextureType.CameraTarget);

                for (int i = 0; i < rendererFeatures.Count; ++i)
                {
                    rendererFeatures[i].AddRenderPasses(this, ref renderingData);
                }

                EnqueuePass(m_RenderOpaqueForwardPass);
                EnqueuePass(m_DrawSkyboxPass);
                EnqueuePass(m_RenderTransparentForwardPass);
                return;
            }

            bool mainLightShadows            = m_MainLightShadowCasterPass.Setup(ref renderingData);
            bool additionalLightShadows      = m_AdditionalLightsShadowCasterPass.Setup(ref renderingData);
            bool resolveShadowsInScreenSpace = mainLightShadows && renderingData.shadowData.requiresScreenSpaceShadowResolve;

            // Depth prepass is generated in the following cases:
            // - We resolve shadows in screen space
            // - Scene view camera always requires a depth texture. We do a depth pre-pass to simplify it and it shouldn't matter much for editor.
            // - If game or offscreen camera requires it we check if we can copy the depth from the rendering opaques pass and use that instead.
            bool requiresDepthPrepass = renderingData.cameraData.isSceneViewCamera ||
                                        (renderingData.cameraData.requiresDepthTexture && (!CanCopyDepth(ref renderingData.cameraData)));

            requiresDepthPrepass |= resolveShadowsInScreenSpace;

            // TODO: There's an issue in multiview and depth copy pass. Atm forcing a depth prepass on XR until
            // we have a proper fix.
            if (renderingData.cameraData.isStereoEnabled && renderingData.cameraData.requiresDepthTexture)
            {
                requiresDepthPrepass = true;
            }

            bool createColorTexture = RequiresIntermediateColorTexture(ref renderingData, cameraTargetDescriptor) ||
                                      rendererFeatures.Count != 0;

            // If camera requires depth and there's no depth pre-pass we create a depth texture that can be read
            // later by effect requiring it.
            bool createDepthTexture   = renderingData.cameraData.requiresDepthTexture && !requiresDepthPrepass;
            bool postProcessEnabled   = renderingData.cameraData.postProcessEnabled;
            bool hasOpaquePostProcess = postProcessEnabled &&
                                        renderingData.cameraData.postProcessLayer.HasOpaqueOnlyEffects(RenderingUtils.postProcessRenderContext);

            m_ActiveCameraColorAttachment = (createColorTexture) ? m_CameraColorAttachment : RenderTargetHandle.CameraTarget;
            m_ActiveCameraDepthAttachment = (createDepthTexture) ? m_CameraDepthAttachment : RenderTargetHandle.CameraTarget;
            if (createColorTexture || createDepthTexture)
            {
                CreateCameraRenderTarget(context, ref renderingData.cameraData);
            }
            ConfigureCameraTarget(m_ActiveCameraColorAttachment.Identifier(), m_ActiveCameraDepthAttachment.Identifier());

            for (int i = 0; i < rendererFeatures.Count; ++i)
            {
                rendererFeatures[i].AddRenderPasses(this, ref renderingData);
            }

            int count = activeRenderPassQueue.Count;

            for (int i = count - 1; i >= 0; i--)
            {
                if (activeRenderPassQueue[i] == null)
                {
                    activeRenderPassQueue.RemoveAt(i);
                }
            }
            bool hasAfterRendering = activeRenderPassQueue.Find(x => x.renderPassEvent == RenderPassEvent.AfterRendering) != null;

            if (mainLightShadows)
            {
                EnqueuePass(m_MainLightShadowCasterPass);
            }

            if (additionalLightShadows)
            {
                EnqueuePass(m_AdditionalLightsShadowCasterPass);
            }

            if (requiresDepthPrepass)
            {
                m_DepthPrepass.Setup(cameraTargetDescriptor, m_DepthTexture);
                EnqueuePass(m_DepthPrepass);
            }

            if (resolveShadowsInScreenSpace)
            {
                m_ScreenSpaceShadowResolvePass.Setup(cameraTargetDescriptor);
                EnqueuePass(m_ScreenSpaceShadowResolvePass);
            }

            EnqueuePass(m_RenderOpaqueForwardPass);

            if (hasOpaquePostProcess)
            {
                m_OpaquePostProcessPass.Setup(cameraTargetDescriptor, m_ActiveCameraColorAttachment, m_ActiveCameraColorAttachment);
            }

            if (camera.clearFlags == CameraClearFlags.Skybox && RenderSettings.skybox != null)
            {
                EnqueuePass(m_DrawSkyboxPass);
            }

            // If a depth texture was created we necessarily need to copy it, otherwise we could have render it to a renderbuffer
            if (createDepthTexture)
            {
                m_CopyDepthPass.Setup(m_ActiveCameraDepthAttachment, m_DepthTexture);
                EnqueuePass(m_CopyDepthPass);
            }

            if (renderingData.cameraData.requiresOpaqueTexture)
            {
                // TODO: Downsampling method should be store in the renderer isntead of in the asset.
                // We need to migrate this data to renderer. For now, we query the method in the active asset.
                Downsampling downsamplingMethod = LightweightRenderPipeline.asset.opaqueDownsampling;
                m_CopyColorPass.Setup(m_ActiveCameraColorAttachment.Identifier(), m_OpaqueColor, downsamplingMethod);
                EnqueuePass(m_CopyColorPass);
            }

            EnqueuePass(m_RenderTransparentForwardPass);

            bool afterRenderExists = renderingData.cameraData.captureActions != null ||
                                     hasAfterRendering;

            // if we have additional filters
            // we need to stay in a RT
            if (afterRenderExists)
            {
                // perform post with src / dest the same
                if (postProcessEnabled)
                {
                    m_PostProcessPass.Setup(cameraTargetDescriptor, m_ActiveCameraColorAttachment, m_ActiveCameraColorAttachment);
                    EnqueuePass(m_PostProcessPass);
                }

                //now blit into the final target
                if (m_ActiveCameraColorAttachment != RenderTargetHandle.CameraTarget)
                {
                    if (renderingData.cameraData.captureActions != null)
                    {
                        m_CapturePass.Setup(m_ActiveCameraColorAttachment);
                        EnqueuePass(m_CapturePass);
                    }

                    m_FinalBlitPass.Setup(cameraTargetDescriptor, m_ActiveCameraColorAttachment);
                    EnqueuePass(m_FinalBlitPass);
                }
            }
            else
            {
                if (postProcessEnabled)
                {
                    m_PostProcessPass.Setup(cameraTargetDescriptor, m_ActiveCameraColorAttachment, RenderTargetHandle.CameraTarget);
                    EnqueuePass(m_PostProcessPass);
                }
                else if (m_ActiveCameraColorAttachment != RenderTargetHandle.CameraTarget)
                {
                    m_FinalBlitPass.Setup(cameraTargetDescriptor, m_ActiveCameraColorAttachment);
                    EnqueuePass(m_FinalBlitPass);
                }
            }

#if UNITY_EDITOR
            if (renderingData.cameraData.isSceneViewCamera)
            {
                m_SceneViewDepthCopyPass.Setup(m_DepthTexture);
                EnqueuePass(m_SceneViewDepthCopyPass);
            }
#endif
        }
        public void RenderReflectionsT2(HDCamera hdCamera, CommandBuffer cmd, RTHandleSystem.RTHandle outputTexture, ScriptableRenderContext renderContext, int frameCount)
        {
            // First thing to check is: Do we have a valid ray-tracing environment?
            HDRaytracingEnvironment rtEnvironment = m_RayTracingManager.CurrentEnvironment();

            // If no acceleration structure available, end it now
            if (rtEnvironment == null)
            {
                return;
            }

            // Fetch the shaders that we will be using
            ComputeShader    reflectionFilter = m_Asset.renderPipelineRayTracingResources.reflectionBilateralFilterCS;
            RayTracingShader reflectionShader = m_Asset.renderPipelineRayTracingResources.reflectionRaytracingRT;

            var          settings             = VolumeManager.instance.stack.GetComponent <ScreenSpaceReflection>();
            LightCluster lightClusterSettings = VolumeManager.instance.stack.GetComponent <LightCluster>();

            // Bind all the required data for ray tracing
            BindRayTracedReflectionData(cmd, hdCamera, rtEnvironment, reflectionShader, settings, lightClusterSettings);

            // Force to disable specular lighting
            cmd.SetGlobalInt(HDShaderIDs._EnableSpecularLighting, 0);

            // Run the computation
            cmd.DispatchRays(reflectionShader, m_RayGenIntegrationName, (uint)hdCamera.actualWidth, (uint)hdCamera.actualHeight, 1);

            // Restore the previous state of specular lighting
            cmd.SetGlobalInt(HDShaderIDs._EnableSpecularLighting, hdCamera.frameSettings.IsEnabled(FrameSettingsField.SpecularLighting) ? 1 : 0);

            using (new ProfilingSample(cmd, "Filter Reflection", CustomSamplerId.RaytracingFilterReflection.GetSampler()))
            {
                if (settings.enableFilter.value)
                {
                    // Grab the history buffer
                    RTHandleSystem.RTHandle reflectionHistory = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.RaytracedReflection)
                                                                ?? hdCamera.AllocHistoryFrameRT((int)HDCameraFrameHistoryType.RaytracedReflection, ReflectionHistoryBufferAllocatorFunction, 1);

                    HDSimpleDenoiser simpleDenoiser = m_RayTracingManager.GetSimpleDenoiser();
                    simpleDenoiser.DenoiseBuffer(cmd, hdCamera, m_ReflIntermediateTexture0, reflectionHistory, outputTexture, settings.filterRadius.value, singleChannel: false);
                }
                else
                {
                    HDUtils.BlitCameraTexture(cmd, m_ReflIntermediateTexture0, outputTexture);
                }
            }
        }
Exemple #8
0
    public static void BloomPost(ScriptableRenderContext context, int screenWidth, int screenHeight, MobileBaseSRPAsset pipeLineAsset)
    {
        Material dualFilterMat  = pipeLineAsset.DualFiltering;
        int      blurOffsetDown = Shader.PropertyToID("_BlurOffsetDown");
        int      blurOffsetUp   = Shader.PropertyToID("_BlurOffsetUp");
        int      brightId       = Shader.PropertyToID("_BrightID");
        int      bloomResult    = Shader.PropertyToID("_BloomResult");
        int      passes         = pipeLineAsset.BloomPasses;

        int[] downId = new int[passes];
        int[] upId   = new int[passes];

        //Set Offset to the shader
        bloomBuffer.SetGlobalFloat(blurOffsetDown, pipeLineAsset.BlurOffsetDown);
        bloomBuffer.SetGlobalFloat(blurOffsetUp, pipeLineAsset.BlurOffsetUp);

        //IDs Loop
        for (int i = 0; i < passes; i++)
        {
            downId[i] = Shader.PropertyToID("_DownID" + i);
            upId[i]   = Shader.PropertyToID("_UpID" + i);
        }

        //Bright Pass
        bloomBuffer.GetTemporaryRT(brightId, screenWidth, screenHeight, 0, FilterMode.Bilinear, RenderTextureFormat.ARGBHalf);
        bloomBuffer.SetGlobalTexture("_BrightPassTex", MobileBaseSRPCommonValues.RenderTexture.FrameBufferId);
        bloomBuffer.Blit(null, brightId, dualFilterMat, 2);

        //DownScale Pass
        for (int i = 0; i < passes; i++)
        {
            bloomBuffer.GetTemporaryRT(downId[i], screenWidth >> i, screenHeight >> i, 0, FilterMode.Bilinear, RenderTextureFormat.ARGBHalf);
            if (i == 0)
            {
                bloomBuffer.SetGlobalTexture("_DownScalePassTex", brightId);
                bloomBuffer.Blit(null, downId[i], dualFilterMat, 0);
            }
            else
            {
                bloomBuffer.SetGlobalTexture("_DownScalePassTex", downId[i - 1]);
                bloomBuffer.Blit(null, downId[i], dualFilterMat, 0);
            }
        }

        //UpScale Pass
        for (int i = passes - 1; i > 0; i--)
        {
            bloomBuffer.GetTemporaryRT(upId[i], screenWidth >> i, screenHeight >> i, 0, FilterMode.Bilinear, RenderTextureFormat.ARGBHalf);
            if (i == passes - 1)
            {
                bloomBuffer.SetGlobalTexture("_UpscalePassTex", downId[i]);
                bloomBuffer.SetGlobalTexture("_DownPass", downId[i]);
                bloomBuffer.Blit(null, upId[i], dualFilterMat, 1);
            }
            else
            {
                bloomBuffer.SetGlobalTexture("_UpscalePassTex", upId[i + 1]);
                bloomBuffer.SetGlobalTexture("_DownPass", downId[i]);
                bloomBuffer.Blit(null, upId[i], dualFilterMat, 1);
            }
        }

        bloomBuffer.GetTemporaryRT(bloomResult, screenWidth * 2, screenHeight * 2, 0, FilterMode.Bilinear, RenderTextureFormat.ARGBHalf);
        bloomBuffer.Blit(upId[1], bloomResult, dualFilterMat, 1);
        bloomBuffer.SetGlobalTexture("_BloomResult", bloomResult);

        //CleanUp Chain
        for (int i = 0; i < passes; i++)
        {
            bloomBuffer.ReleaseTemporaryRT(downId[i]);
        }
        for (int i = passes - 1; i > 0; i--)
        {
            bloomBuffer.ReleaseTemporaryRT(upId[i]);
        }

        context.ExecuteCommandBuffer(bloomBuffer);
        bloomBuffer.Clear();
    }
 protected virtual void OnSrpCameraPreRender(ScriptableRenderContext context, Camera givenCamera)
 {
     Process();
 }
Exemple #10
0
    public void RenderDeferred(ScriptableRenderContext renderContext, Camera camera, VXGI vxgi)
    {
        if (!camera.TryGetCullingParameters(camera, out var cullingParams))
        {
            return;
        }
        _cullResults = renderContext.Cull(ref cullingParams);

        renderContext.SetupCameraProperties(camera);

        int width  = camera.pixelWidth;
        int height = camera.pixelHeight;

        _command.BeginSample(_command.name);

        if (camera.cameraType != CameraType.SceneView)
        {
            _command.EnableShaderKeyword("PROJECTION_PARAMS_X");
        }
        else
        {
            _command.DisableShaderKeyword("PROJECTION_PARAMS_X");
        }

        _command.GetTemporaryRT(ShaderIDs._CameraDepthTexture, width, height, 24, FilterMode.Point, RenderTextureFormat.Depth, RenderTextureReadWrite.Linear);
        _command.GetTemporaryRT(ShaderIDs._CameraGBufferTexture0, width, height, 0, FilterMode.Point, RenderTextureFormat.ARGB32, RenderTextureReadWrite.Linear);
        _command.GetTemporaryRT(ShaderIDs._CameraGBufferTexture1, width, height, 0, FilterMode.Point, RenderTextureFormat.ARGB32, RenderTextureReadWrite.Linear);
        _command.GetTemporaryRT(ShaderIDs._CameraGBufferTexture2, width, height, 0, FilterMode.Point, RenderTextureFormat.ARGB2101010, RenderTextureReadWrite.Linear);
        _command.GetTemporaryRT(ShaderIDs._CameraGBufferTexture3, width, height, 0, FilterMode.Point, RenderTextureFormat.ARGBHalf, RenderTextureReadWrite.Linear);
        _command.GetTemporaryRT(ShaderIDs.FrameBuffer, width, height, 0, FilterMode.Point, RenderTextureFormat.ARGBHalf, RenderTextureReadWrite.Linear);
        _command.SetRenderTarget(_gBufferBinding);
        _command.ClearRenderTarget(true, true, Color.clear);
        renderContext.ExecuteCommandBuffer(_command);
        _command.Clear();

        TriggerCameraEvent(renderContext, camera, CameraEvent.BeforeGBuffer, vxgi);
        RenderGBuffers(renderContext, camera);
        TriggerCameraEvent(renderContext, camera, CameraEvent.AfterGBuffer, vxgi);

        CopyCameraTargetToFrameBuffer(renderContext, camera);

        bool depthNormalsNeeded = (camera.depthTextureMode & DepthTextureMode.DepthNormals) != DepthTextureMode.None;

        if (depthNormalsNeeded)
        {
            TriggerCameraEvent(renderContext, camera, CameraEvent.BeforeDepthNormalsTexture, vxgi);
            RenderCameraDepthNormalsTexture(renderContext, camera);
            TriggerCameraEvent(renderContext, camera, CameraEvent.AfterDepthNormalsTexture, vxgi);
        }

        TriggerCameraEvent(renderContext, camera, CameraEvent.BeforeLighting, vxgi);
        RenderLighting(renderContext, camera, vxgi);
        TriggerCameraEvent(renderContext, camera, CameraEvent.AfterLighting, vxgi);

        if (camera.clearFlags == CameraClearFlags.Skybox)
        {
            TriggerCameraEvent(renderContext, camera, CameraEvent.BeforeSkybox, vxgi);
            RenderSkyBox(renderContext, camera);
            TriggerCameraEvent(renderContext, camera, CameraEvent.AfterSkybox, vxgi);
        }

        UpdatePostProcessingLayer(renderContext, camera, vxgi);

        TriggerCameraEvent(renderContext, camera, CameraEvent.BeforeImageEffectsOpaque, vxgi);
        RenderPostProcessingOpaqueOnly(renderContext, camera);
        TriggerCameraEvent(renderContext, camera, CameraEvent.AfterImageEffectsOpaque, vxgi);

        TriggerCameraEvent(renderContext, camera, CameraEvent.BeforeForwardAlpha, vxgi);
        RenderTransparent(renderContext, camera);
        TriggerCameraEvent(renderContext, camera, CameraEvent.AfterForwardAlpha, vxgi);

        TriggerCameraEvent(renderContext, camera, CameraEvent.BeforeImageEffects, vxgi);
        RenderPostProcessing(renderContext, camera);
        _command.Blit(ShaderIDs.FrameBuffer, BuiltinRenderTextureType.CameraTarget);
        renderContext.ExecuteCommandBuffer(_command);
        _command.Clear();
        TriggerCameraEvent(renderContext, camera, CameraEvent.AfterImageEffects, vxgi);

        TriggerCameraEvent(renderContext, camera, CameraEvent.AfterEverything, vxgi);

        if (depthNormalsNeeded)
        {
            _command.ReleaseTemporaryRT(ShaderIDs._CameraDepthNormalsTexture);
        }

        _command.ReleaseTemporaryRT(ShaderIDs._CameraDepthTexture);
        _command.ReleaseTemporaryRT(ShaderIDs._CameraGBufferTexture0);
        _command.ReleaseTemporaryRT(ShaderIDs._CameraGBufferTexture1);
        _command.ReleaseTemporaryRT(ShaderIDs._CameraGBufferTexture2);
        _command.ReleaseTemporaryRT(ShaderIDs._CameraGBufferTexture3);
        _command.ReleaseTemporaryRT(ShaderIDs.FrameBuffer);
        _command.EndSample(_command.name);
        renderContext.ExecuteCommandBuffer(_command);
        _command.Clear();
    }
Exemple #11
0
        /// <summary>
        /// The camera has stopped rendering.
        /// </summary>
        /// <param name="context">The context of the SRP.</param>
        /// <param name="camera">The camera that stopped rendering.</param>
#if UNIVERSAL_RENDER_PIPELINE
        private void EndCameraRendering(ScriptableRenderContext context, Camera camera)
 private static void BeginCameraRendering(ScriptableRenderContext context, Camera camera)
 {
     _currentCamera = camera;
 }
        void RenderRayTracedReflections(HDCamera hdCamera, CommandBuffer cmd, RTHandle outputTexture, ScriptableRenderContext renderContext, int frameCount, bool transparent = false)
        {
            ScreenSpaceReflection reflectionSettings = hdCamera.volumeStack.GetComponent <ScreenSpaceReflection>();

            // Based on what the asset supports, follow the volume or force the right mode.
            if (m_Asset.currentPlatformRenderPipelineSettings.supportedRayTracingMode == RenderPipelineSettings.SupportedRayTracingMode.Both)
            {
                switch (reflectionSettings.mode.value)
                {
                case RayTracingMode.Performance:
                {
                    RenderReflectionsPerformance(hdCamera, cmd, outputTexture, renderContext, frameCount, transparent);
                }
                break;

                case RayTracingMode.Quality:
                {
                    RenderReflectionsQuality(hdCamera, cmd, outputTexture, renderContext, frameCount, transparent);
                }
                break;
                }
            }
            else if (m_Asset.currentPlatformRenderPipelineSettings.supportedRayTracingMode == RenderPipelineSettings.SupportedRayTracingMode.Quality)
            {
                RenderReflectionsQuality(hdCamera, cmd, outputTexture, renderContext, frameCount, transparent);
            }
            else
            {
                RenderReflectionsPerformance(hdCamera, cmd, outputTexture, renderContext, frameCount, transparent);
            }
        }
        void RenderReflectionsQuality(HDCamera hdCamera, CommandBuffer cmd, RTHandle outputTexture, ScriptableRenderContext renderContext, int frameCount, bool transparent)
        {
            // Fetch the shaders that we will be using
            ComputeShader    reflectionFilter = m_Asset.renderPipelineRayTracingResources.reflectionBilateralFilterCS;
            RayTracingShader reflectionShader = m_Asset.renderPipelineRayTracingResources.reflectionRaytracingRT;

            // Request the buffers we shall be using
            RTHandle intermediateBuffer0 = GetRayTracingBuffer(InternalRayTracingBuffers.RGBA0);
            RTHandle intermediateBuffer1 = GetRayTracingBuffer(InternalRayTracingBuffers.RGBA1);

            var          settings             = hdCamera.volumeStack.GetComponent <ScreenSpaceReflection>();
            LightCluster lightClusterSettings = hdCamera.volumeStack.GetComponent <LightCluster>();

            using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.RaytracingIntegrateReflection)))
            {
                // Bind all the required data for ray tracing
                BindRayTracedReflectionData(cmd, hdCamera, reflectionShader, settings, lightClusterSettings, intermediateBuffer0, intermediateBuffer1);

                // Only use the shader variant that has multi bounce if the bounce count > 1
                CoreUtils.SetKeyword(cmd, "MULTI_BOUNCE_INDIRECT", settings.bounceCount.value > 1);

                // We are not in the diffuse only case
                m_ShaderVariablesRayTracingCB._RayTracingDiffuseLightingOnly = 0;
                ConstantBuffer.PushGlobal(cmd, m_ShaderVariablesRayTracingCB, HDShaderIDs._ShaderVariablesRaytracing);

                // Run the computation
                cmd.DispatchRays(reflectionShader, transparent ? m_RayGenIntegrationTransparentName : m_RayGenIntegrationName, (uint)hdCamera.actualWidth, (uint)hdCamera.actualHeight, (uint)hdCamera.viewCount);

                // Disable multi-bounce
                CoreUtils.SetKeyword(cmd, "MULTI_BOUNCE_INDIRECT", false);
            }

            using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.RaytracingFilterReflection)))
            {
                if (settings.denoise.value && !transparent)
                {
                    // Grab the history buffer
                    RTHandle reflectionHistory = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.RaytracedReflection)
                                                 ?? hdCamera.AllocHistoryFrameRT((int)HDCameraFrameHistoryType.RaytracedReflection, ReflectionHistoryBufferAllocatorFunction, 1);

                    float historyValidity = 1.0f;
#if UNITY_HDRP_DXR_TESTS_DEFINE
                    if (Application.isPlaying)
                    {
                        historyValidity = 0.0f;
                    }
                    else
#endif
                    // We need to check if something invalidated the history buffers
                    historyValidity *= ValidRayTracingHistory(hdCamera) ? 1.0f : 0.0f;

                    HDReflectionDenoiser reflectionDenoiser = GetReflectionDenoiser();
                    reflectionDenoiser.DenoiseBuffer(cmd, hdCamera, settings.denoiserRadius.value, intermediateBuffer0, reflectionHistory, outputTexture, historyValidity: historyValidity);
                }
                else
                {
                    HDUtils.BlitCameraTexture(cmd, intermediateBuffer0, outputTexture);
                }
            }
        }
Exemple #15
0
 public void RenderShadows(ScriptableRenderContext renderContext, CommandBuffer cmd, in ShaderVariablesGlobal globalCB, CullingResults cullResults, HDCamera hdCamera)
Exemple #16
0
    private void Render(ScriptableRenderContext context, Camera camera)
    {
        /* Attempt to cull the current camera.
         * If it fails to cull, don't render anything to this camera.
         */
        ScriptableCullingParameters cullingParameters;

        if (!camera.TryGetCullingParameters(out cullingParameters))
        {
            return;
        }

#if UNITY_EDITOR
        // For rendering UI in the scene view
        if (camera.cameraType == CameraType.SceneView)
        {
            ScriptableRenderContext.EmitWorldGeometryForSceneView(camera);
        }
#endif

        cullingResults = context.Cull(ref cullingParameters);


        // Setup properties for a specific camera.
        context.SetupCameraProperties(camera);


        /* Creates a graphics command buffer.
         * This particular one is told to clear the depth buffer, not the colour, and to set the clear colour to transparent.
         * It finally releases the memory it has, as the commands have been sent to the internal buffer of the rendering context.
         */
        //var buffer = new CommandBuffer();
        //buffer.ClearRenderTarget(true, false, Color.clear);
        //context.ExecuteCommandBuffer(buffer);
        //buffer.Release();

        /* Creates a graphics command buffer.
         * This particular one is told to clear the depth buffer, not the colour, and to set the clear colour to transparent.
         * It finally releases the memory it has, as the commands have been sent to the internal buffer of the rendering context.
         */
        CameraClearFlags clearFlags = camera.clearFlags;
        buffer.ClearRenderTarget((clearFlags & CameraClearFlags.Depth) != 0, (clearFlags & CameraClearFlags.Color) != 0, camera.backgroundColor);
        buffer.BeginSample("Render Camera");    // Start nested grouping for command buffer (For Frame Debugger)
        context.ExecuteCommandBuffer(buffer);
        //buffer.Release();
        buffer.Clear();


        // Once culling and clearing have been completed we can draw the appropriate renderers.
        var drawSettings   = new DrawingSettings(new ShaderTagId("SRPDefaultUnlit"), new SortingSettings(camera));
        var filterSettings = FilteringSettings.defaultValue;
        // Only render the opaque objects first. To avoid transparent objects being drawn behind the skybox.
        filterSettings.renderQueueRange = RenderQueueRange.opaque;
        context.DrawRenderers(cullingResults, ref drawSettings, ref filterSettings);


        // Check for any problems drawing objects with custom pipeline.
        DrawDefaultPipeline(context, camera);


        // Draws a skybox to the specified camera.
        context.DrawSkybox(camera);


        // After all the opaque objects have been drawn, draw the transparent objects.
        filterSettings.renderQueueRange = RenderQueueRange.transparent;
        context.DrawRenderers(cullingResults, ref drawSettings, ref filterSettings);

        // End nested grouping for command buffer (For Frame Debugger)
        buffer.EndSample("Render Camera");
        context.ExecuteCommandBuffer(buffer);
        buffer.Clear();


        // Commands sent to the rendering context are just buffered. They are only excuted once we submit them.
        context.Submit();
    }
 public override void SetupLights(ScriptableRenderContext context, ref RenderingData renderingData)
 {
     m_ForwardLights.Setup(context, ref renderingData);
 }
Exemple #18
0
    protected override void Render(ScriptableRenderContext context, Camera[] cameras)
    {
        BeginFrameRendering(cameras);

        foreach (Camera camera in cameras)
        {
            #if UNITY_EDITOR
            bool isSceneViewCam = camera.cameraType == CameraType.SceneView;
            if (isSceneViewCam)
            {
                ScriptableRenderContext.EmitWorldGeometryForSceneView(camera);                //This makes the UI Canvas geometry appear on scene view
            }
            #endif

            BeginCameraRendering(camera);

            //Culling
            ScriptableCullingParameters cullingParams;
            if (!camera.TryGetCullingParameters(out cullingParams))
            {
                continue;
            }
            CullingResults cull = context.Cull(ref cullingParams);

            //Camera setup some builtin variables e.g. camera projection matrices etc
            context.SetupCameraProperties(camera);

            //Get the setting from camera component
            bool drawSkyBox = camera.clearFlags == CameraClearFlags.Skybox? true : false;
            bool clearDepth = camera.clearFlags == CameraClearFlags.Nothing? false : true;
            bool clearColor = camera.clearFlags == CameraClearFlags.Color? true : false;

            //Setup DrawSettings and FilterSettings
            var               sortingSettings = new SortingSettings(camera);
            DrawingSettings   drawSettings    = new DrawingSettings(m_PassName, sortingSettings);
            FilteringSettings filterSettings  = new FilteringSettings(RenderQueueRange.all);

            //Camera clear flag
            CommandBuffer cmd = new CommandBuffer();
            cmd.name = "Cam:" + camera.name + " ClearFlag";
            cmd.ClearRenderTarget(clearDepth, clearColor, camera.backgroundColor);
            context.ExecuteCommandBuffer(cmd);
            cmd.Release();

            //Skybox
            if (drawSkyBox)
            {
                context.DrawSkybox(camera);
            }

            //Opaque objects
            sortingSettings.criteria        = SortingCriteria.CommonOpaque;
            drawSettings.sortingSettings    = sortingSettings;
            filterSettings.renderQueueRange = RenderQueueRange.opaque;
            context.DrawRenderers(cull, ref drawSettings, ref filterSettings);

            //Transparent objects
            sortingSettings.criteria        = SortingCriteria.CommonTransparent;
            drawSettings.sortingSettings    = sortingSettings;
            filterSettings.renderQueueRange = RenderQueueRange.transparent;
            context.DrawRenderers(cull, ref drawSettings, ref filterSettings);

            //SceneView fix, so that it draws the gizmos on scene view
            #if UNITY_EDITOR
            if (isSceneViewCam)
            {
                context.DrawGizmos(camera, GizmoSubset.PostImageEffects);
            }
            #endif

            context.Submit();
        }
    }
        public void RenderReflectionsT1(HDCamera hdCamera, CommandBuffer cmd, RTHandleSystem.RTHandle outputTexture, ScriptableRenderContext renderContext, int frameCount)
        {
            // First thing to check is: Do we have a valid ray-tracing environment?
            HDRaytracingEnvironment rtEnvironment = m_RayTracingManager.CurrentEnvironment();

            // If no ray tracing environment, then we do not want to evaluate this effect
            if (rtEnvironment == null)
            {
                return;
            }

            // Fetch the required resources
            BlueNoise        blueNoise          = m_RayTracingManager.GetBlueNoiseManager();
            RayTracingShader reflectionShaderRT = m_Asset.renderPipelineRayTracingResources.reflectionRaytracingRT;
            ComputeShader    reflectionShaderCS = m_Asset.renderPipelineRayTracingResources.reflectionRaytracingCS;
            ComputeShader    reflectionFilter   = m_Asset.renderPipelineRayTracingResources.reflectionBilateralFilterCS;

            // Fetch all the settings
            var          settings             = VolumeManager.instance.stack.GetComponent <ScreenSpaceReflection>();
            LightCluster lightClusterSettings = VolumeManager.instance.stack.GetComponent <LightCluster>();

            if (settings.deferredMode.value)
            {
                // Fetch the new sample kernel
                int currentKernel = reflectionShaderCS.FindKernel(settings.fullResolution.value ? "RaytracingReflectionsFullRes" : "RaytracingReflectionsHalfRes");

                // Bind all the required textures
                cmd.SetComputeTextureParam(reflectionShaderCS, currentKernel, HDShaderIDs._OwenScrambledTexture, m_Asset.renderPipelineResources.textures.owenScrambledTex);
                cmd.SetComputeTextureParam(reflectionShaderCS, currentKernel, HDShaderIDs._ScramblingTexture, m_Asset.renderPipelineResources.textures.scramblingTex);
                cmd.SetComputeTextureParam(reflectionShaderCS, currentKernel, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthStencilBuffer());
                cmd.SetComputeTextureParam(reflectionShaderCS, currentKernel, HDShaderIDs._NormalBufferTexture, m_SharedRTManager.GetNormalBuffer());
                RenderTargetIdentifier clearCoatMaskTexture = hdCamera.frameSettings.litShaderMode == LitShaderMode.Deferred ? m_GbufferManager.GetBuffersRTI()[2] : TextureXR.GetBlackTexture();
                cmd.SetComputeTextureParam(reflectionShaderCS, currentKernel, HDShaderIDs._SsrClearCoatMaskTexture, clearCoatMaskTexture);

                // Bind all the required scalars
                cmd.SetComputeFloatParam(reflectionShaderCS, HDShaderIDs._RaytracingIntensityClamp, settings.clampValue.value);
                cmd.SetComputeFloatParam(reflectionShaderCS, HDShaderIDs._RaytracingReflectionMinSmoothness, settings.minSmoothness.value);
                cmd.SetComputeIntParam(reflectionShaderCS, HDShaderIDs._RaytracingReflectSky, settings.reflectSky.value ? 1 : 0);

                // Bind the sampling data
                int frameIndex = hdCamera.IsTAAEnabled() ? hdCamera.taaFrameIndex : (int)m_FrameCount % 8;
                cmd.SetComputeIntParam(reflectionShaderCS, HDShaderIDs._RaytracingFrameIndex, frameIndex);

                // Bind the output buffers
                cmd.SetComputeTextureParam(reflectionShaderCS, currentKernel, HDShaderIDs._RaytracingDirectionBuffer, m_ReflIntermediateTexture1);

                // Texture dimensions
                int texWidth  = hdCamera.actualWidth;
                int texHeight = hdCamera.actualHeight;

                if (settings.fullResolution.value)
                {
                    // Evaluate the dispatch parameters
                    int areaTileSize = 8;
                    int numTilesXHR  = (texWidth + (areaTileSize - 1)) / areaTileSize;
                    int numTilesYHR  = (texHeight + (areaTileSize - 1)) / areaTileSize;

                    // Compute the directions
                    cmd.DispatchCompute(reflectionShaderCS, currentKernel, numTilesXHR, numTilesYHR, 1);
                }
                else
                {
                    // Evaluate the dispatch parameters
                    int areaTileSize = 8;
                    int numTilesXHR  = (texWidth / 2 + (areaTileSize - 1)) / areaTileSize;
                    int numTilesYHR  = (texHeight / 2 + (areaTileSize - 1)) / areaTileSize;

                    // Compute the directions
                    cmd.DispatchCompute(reflectionShaderCS, currentKernel, numTilesXHR, numTilesYHR, 1);
                }
                // Evaluate the deferred lighting
                cmd.SetGlobalInt(HDShaderIDs._RaytracingReflectSky, settings.reflectSky.value ? 1 : 0);
                RenderRaytracingDeferredLighting(cmd, hdCamera, rtEnvironment, m_ReflIntermediateTexture1, settings.rayBinning.value, rtEnvironment.reflLayerMask, settings.rayLength.value, m_ReflIntermediateTexture0, disableSpecularLighting: true, halfResolution: !settings.fullResolution.value);
            }
            else
            {
                // Bind all the required data for ray tracing
                BindRayTracedReflectionData(cmd, hdCamera, rtEnvironment, reflectionShaderRT, settings, lightClusterSettings);

                // Set the data for the ray miss
                cmd.SetRayTracingTextureParam(reflectionShaderRT, HDShaderIDs._SkyTexture, m_SkyManager.skyReflection);

                // Force to disable specular lighting
                cmd.SetGlobalInt(HDShaderIDs._EnableSpecularLighting, 0);

                // Run the computation
                if (settings.fullResolution.value)
                {
                    cmd.DispatchRays(reflectionShaderRT, m_RayGenReflectionFullResName, (uint)hdCamera.actualWidth, (uint)hdCamera.actualHeight, 1);
                }
                else
                {
                    // Run the computation
                    cmd.DispatchRays(reflectionShaderRT, m_RayGenReflectionHalfResName, (uint)(hdCamera.actualWidth / 2), (uint)(hdCamera.actualHeight / 2), 1);
                }

                // Restore the previous state of specular lighting
                cmd.SetGlobalInt(HDShaderIDs._EnableSpecularLighting, hdCamera.frameSettings.IsEnabled(FrameSettingsField.SpecularLighting) ? 1 : 0);
            }

            using (new ProfilingSample(cmd, "Filter Reflection", CustomSamplerId.RaytracingFilterReflection.GetSampler()))
            {
                // Fetch the right filter to use
                int currentKernel = 0;
                if (settings.fullResolution.value)
                {
                    currentKernel = reflectionFilter.FindKernel("ReflectionIntegrationUpscaleFullRes");
                }
                else
                {
                    currentKernel = reflectionFilter.FindKernel("ReflectionIntegrationUpscaleHalfRes");
                }

                // Inject all the parameters for the compute
                cmd.SetComputeTextureParam(reflectionFilter, currentKernel, HDShaderIDs._SsrLightingTextureRW, m_ReflIntermediateTexture0);
                cmd.SetComputeTextureParam(reflectionFilter, currentKernel, HDShaderIDs._SsrHitPointTexture, m_ReflIntermediateTexture1);
                cmd.SetComputeTextureParam(reflectionFilter, currentKernel, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthStencilBuffer());
                cmd.SetComputeTextureParam(reflectionFilter, currentKernel, HDShaderIDs._NormalBufferTexture, m_SharedRTManager.GetNormalBuffer());
                cmd.SetComputeTextureParam(reflectionFilter, currentKernel, "_NoiseTexture", blueNoise.textureArray16RGB);
                cmd.SetComputeTextureParam(reflectionFilter, currentKernel, "_RaytracingReflectionTexture", outputTexture);
                cmd.SetComputeTextureParam(reflectionFilter, currentKernel, HDShaderIDs._ScramblingTexture, m_Asset.renderPipelineResources.textures.scramblingTex);
                cmd.SetComputeIntParam(reflectionFilter, HDShaderIDs._SpatialFilterRadius, settings.spatialFilterRadius.value);
                cmd.SetComputeIntParam(reflectionFilter, HDShaderIDs._RaytracingDenoiseRadius, settings.enableFilter.value ? settings.filterRadius.value : 0);
                cmd.SetComputeFloatParam(reflectionFilter, HDShaderIDs._RaytracingReflectionMinSmoothness, settings.minSmoothness.value);

                // Texture dimensions
                int texWidth  = hdCamera.actualWidth;
                int texHeight = hdCamera.actualHeight;

                // Evaluate the dispatch parameters
                int areaTileSize = 8;
                int numTilesXHR  = (texWidth + (areaTileSize - 1)) / areaTileSize;
                int numTilesYHR  = (texHeight + (areaTileSize - 1)) / areaTileSize;

                // Bind the right texture for clear coat support
                RenderTargetIdentifier clearCoatMaskTexture = hdCamera.frameSettings.litShaderMode == LitShaderMode.Deferred ? m_GbufferManager.GetBuffersRTI()[2] : TextureXR.GetBlackTexture();
                cmd.SetComputeTextureParam(reflectionFilter, currentKernel, HDShaderIDs._SsrClearCoatMaskTexture, clearCoatMaskTexture);

                // Compute the texture
                cmd.DispatchCompute(reflectionFilter, currentKernel, numTilesXHR, numTilesYHR, 1);

                using (new ProfilingSample(cmd, "Filter Reflection", CustomSamplerId.RaytracingFilterReflection.GetSampler()))
                {
                    if (settings.enableFilter.value)
                    {
                        // Grab the history buffer
                        RTHandleSystem.RTHandle reflectionHistory = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.RaytracedReflection)
                                                                    ?? hdCamera.AllocHistoryFrameRT((int)HDCameraFrameHistoryType.RaytracedReflection, ReflectionHistoryBufferAllocatorFunction, 1);

                        HDSimpleDenoiser simpleDenoiser = m_RayTracingManager.GetSimpleDenoiser();
                        simpleDenoiser.DenoiseBuffer(cmd, hdCamera, outputTexture, reflectionHistory, m_ReflIntermediateTexture0, settings.filterRadius.value, singleChannel: false);
                        HDUtils.BlitCameraTexture(cmd, m_ReflIntermediateTexture0, outputTexture);
                    }
                }
            }
        }
Exemple #20
0
        private void RaymarchCloud(ScriptableRenderContext context, CommandBuffer cmd, ref RenderingData renderingData)
        {
            //raymarch cloud
            //###############################################

            //CloudColor----------------
            solidCloudMaterial.SetColor(CloudColor_ID, settings.cloudAlbedoColor.value);

            CoreUtils.SetKeyword(solidCloudMaterial, k_CLOUD_USE_XY_PLANE, settings.useXYPlane.value);


            float scale = 0.01f / settings.noiseScale.value;

            //CloudDistance_ID----------------
            float distance         = settings.distance.value;
            float distanceFallOff  = settings.distanceFallOff.value;
            float maxLength        = settings.maxLength.value;
            float maxLengthFallOff = settings.maxLengthFallOff.value * maxLength + 1.0f;

            CoreUtils.SetKeyword(solidCloudMaterial, k_CLOUD_DISTANCE_ON, distance > 0);

            solidCloudMaterial.SetVector(CloudDistance_ID,
                                         new Vector4(scale * scale * distance * distance, (distanceFallOff * distanceFallOff + 0.1f),
                                                     maxLength, maxLengthFallOff)); //, _distance * (1.0f - _distanceFallOff)));


            //CloudWindDir_ID-----------------
            windSpeedAcum += Time.deltaTime * settings.windDirection.value.normalized
                             * settings.windSpeed.value / scale;
            if (windSpeedAcum.sqrMagnitude > 1000000)             //1000*1000
            {
                windSpeedAcum = windSpeedAcum.normalized * windSpeedAcum.magnitude / 1000;
            }

            solidCloudMaterial.SetVector(CloudWindDir_ID, windSpeedAcum);


            //CloudData_ID---------------------
            Vector3 cloudAreaPosition = settings.cloudAreaPosition.value;

            solidCloudMaterial.SetVector(CloudData_ID
                                         , new Vector4(0, settings.height.value, 1.0f / settings.density.value, scale));

            //CloudAreaPosition_ID---------------
            // solidCloudMaterial.SetVector(CloudAreaPosition_ID,
            //  new Vector4(cloudAreaPosition.x, 0, cloudAreaPosition.z, 0));

            solidCloudMaterial.SetVector(CloudAreaPosition_ID, cloudAreaPosition);

            //CloudAreaData_ID-----------
            float cloudAreaRadius  = settings.cloudAreaRadius.value;
            float cloudAreaHeight  = settings.cloudAreaHeight.value;
            float cloudAreaDepth   = settings.cloudAreaDepth.value;
            float cloudAreaFallOff = settings.cloudAreaFallOff.value;

            Vector4 areaData = new Vector4(1.0f / (1.0f + cloudAreaRadius), 1.0f / (1.0f + cloudAreaHeight),
                                           1.0f / (1.0f + cloudAreaDepth), cloudAreaFallOff);

            if (cloudAreaHeight > 0 && cloudAreaDepth > 0)
            {
                solidCloudMaterial.DisableKeyword(k_CLOUD_AREA_SPHERE);
            }
            else
            {
                solidCloudMaterial.EnableKeyword(k_CLOUD_AREA_SPHERE);

                areaData.y  = cloudAreaRadius * cloudAreaRadius;
                areaData.x /= scale;
                areaData.z /= scale;
            }

            solidCloudMaterial.SetVector(CloudAreaData_ID, areaData);


            //SunShadowsData_ID------------
            float shadowStrength = settings.sunShadowsStrength.value;

            if (shadowStrength <= 0)
            {
                solidCloudMaterial.DisableKeyword(k_CLOUD_SUN_SHADOWS_ON);
            }
            else
            {
                if (mainLightIndex >= 0)
                {
                    solidCloudMaterial.EnableKeyword(k_CLOUD_SUN_SHADOWS_ON);
                    solidCloudMaterial.SetVector(SunShadowsData_ID,
                                                 new Vector4(shadowStrength
                                                             , settings.sunShadowsJitterStrength.value, settings.sunShadowsCancellation.value, 0));
                }
                else
                {
                    solidCloudMaterial.DisableKeyword(k_CLOUD_SUN_SHADOWS_ON);
                }
            }


            //CloudStepping_ID------------
            float stepping     = settings.stepping.value;
            float steppingNear = settings.steppingNear.value;
            float dithering    = settings.ditherStrength.value;

            Vector4 fogStepping = new Vector4(1.0f / (stepping + 1.0f), 1 / (1 + steppingNear), 0,
                                              dithering * 0.1f);

            solidCloudMaterial.SetVector(CloudStepping_ID, fogStepping);

            //URP 7.5.3之后  cameraData.GetGPUProjectionMatrix() 进行y翻转
            var cameraData = renderingData.cameraData;

            if (cameraData.IsCameraProjectionMatrixFlipped())
            {
                Matrix4x4 viewAndProjectionMatrix =
                    GL.GetGPUProjectionMatrix(cameraData.GetProjectionMatrix(), false) * cameraData.GetViewMatrix();
                Matrix4x4 inverseViewProjection = Matrix4x4.Inverse(viewAndProjectionMatrix);
                cmd.SetGlobalMatrix(inverseViewAndProjectionMatrix, inverseViewProjection);
            }


            if (enableFrame)
            {
                CoreUtils.SetKeyword(solidCloudMaterial, k_CLOUD_FRAME_ON, true);
                if (frameMode == 2 || frameMode == 3)
                {
                    frame = (frame + 1) % 3;
                }
                else
                {
                    frame = (frame + 1) % 4;
                }

                solidCloudMaterial.SetInt(Frame_ID, frame);
                for (int i = 0; i < k_FRAME_MODE.Length; i++)
                {
                    //手机的bug 短时间内对一个关键字快速的enable disable  如果shader过于麻烦 手机反应不过来会死机
                    if ((i == 0 && frameMode == 1) || (i == 1 && frameMode == 0))
                    {
                        CoreUtils.SetKeyword(solidCloudMaterial, k_FRAME_MODE[0], true);
                    }
                    else
                    {
                        CoreUtils.SetKeyword(solidCloudMaterial, k_FRAME_MODE[i], i == frameMode);
                    }
                }
            }
            else
            {
                CoreUtils.SetKeyword(solidCloudMaterial, k_CLOUD_FRAME_ON, false);
                frame = -1;
            }

            CoreUtils.SetKeyword(solidCloudMaterial, k_CLOUD_MUL_RT_ON, mulRTBlend);

            int div    = rtSize - 1;
            int width  = screenWidth >> div;
            int height = screenHeight >> div;

            if (settings.enableBlur.value)
            {
                if (div != 0)
                {
                    if (mulRTBlend && !enableFrame)
                    {
                        solidCloudMaterial.SetVector(Step_TexelSize_ID,
                                                     new Vector4(1.5f / (div * width), 1.5f / (div * height)));
                    }
                    else if (!mulRTBlend && enableFrame)
                    {
                        solidCloudMaterial.SetVector(Step_TexelSize_ID,
                                                     new Vector4(1f / (div * width), 1f / (div * height)));
                    }
                    else if (mulRTBlend && enableFrame)
                    {
                        solidCloudMaterial.SetVector(Step_TexelSize_ID,
                                                     new Vector4(2f / (div * width), 2f / (div * height)));
                    }
                    else
                    {
                        solidCloudMaterial.SetVector(Step_TexelSize_ID,
                                                     new Vector4(1.5f / (div * width), 1.5f / (div * height)));
                    }
                }
                else
                {
                    solidCloudMaterial.SetVector(Step_TexelSize_ID,
                                                 Vector4.zero);
                }
            }

            //如果是mul叠加  则 用下面两级来叠加 1/2 1/4 1/8 1/16  步长则最短
            if (mulRTBlend)
            {
                cmd.SetGlobalInt(DstBlend_ID, (int)BlendMode.Zero);

                //blend
                for (int i = 1; i < 3; i++)
                {
                    if (enableFrame)
                    {
                        if ((frameMode == 2 || frameMode == 3) && frame == 2)
                        {
                            break;
                        }

                        if (frameMode == 4)
                        {
                            cmd.SetGlobalVector(MulRTScale_ID
                                                , new Vector4(frameRTS[0].width, frameRTS[0].height, i - 1, i * 2));
                        }

                        //如果dontcare 出现花屏幕  改成 load
                        cmd.SetRenderTarget(frameRTS[i], RenderBufferLoadAction.DontCare,
                                            RenderBufferStoreAction.Store,
                                            RenderBufferLoadAction.DontCare, RenderBufferStoreAction.DontCare);
                        //frame 叠加   如果clear了  覆盖不上去   而且还少了一个clear
                        // CoreUtils.ClearRenderTarget(cmd, ClearFlag.None, new Color(0, 0, 0, 0));
                        CoreUtils.DrawFullScreen(cmd, solidCloudMaterial, null, 0);
                        cmd.SetGlobalTexture(TempBlendTex_ID[i], frameRTS[i]);
                    }
                    else
                    {
                        //其实FPS够高 cmd.GetTemporaryRT 也有效果  但是我这里为了保险用了常驻的rt
                        cmd.GetTemporaryRT(TempBlendTex_ID[i], width / (2 * i), height / (2 * i), 0,
                                           FilterMode.Bilinear, RenderTextureFormat.ARGB32);
                        cmd.SetRenderTarget(TempBlendTex_RTI[i], RenderBufferLoadAction.DontCare,
                                            RenderBufferStoreAction.Store,
                                            RenderBufferLoadAction.DontCare, RenderBufferStoreAction.DontCare);
                        //frame 叠加   如果clear了  覆盖不上去   而且还少了一个clear
                        // CoreUtils.ClearRenderTarget(cmd, ClearFlag.Color, new Color(0, 0, 0, 0));
                        CoreUtils.DrawFullScreen(cmd, solidCloudMaterial, null, 0);
                        cmd.SetGlobalTexture(TempBlendTex_ID[i], TempBlendTex_RTI[i]);
                    }
                }

                context.ExecuteCommandBuffer(cmd);
                cmd.Clear();

                if (!enableFrame && !settings.enableBlur.value)
                {
                    cmd.SetGlobalInt(DstBlend_ID,
                                     (int)(settings.enableBlend.value ? BlendMode.OneMinusSrcAlpha : BlendMode.Zero));
                    cmd.SetRenderTarget(CameraColorTexture_RTI);
                    CoreUtils.DrawFullScreen(cmd, solidCloudMaterial, null, 4);
                }
                else
                {
                    if (enableFrame)
                    {
                        if (!((frameMode == 2 || frameMode == 3) && frame == 2))
                        {
                            //如果dontcare 出现花屏幕  改成 load
                            cmd.SetRenderTarget(frameRTS[0], RenderBufferLoadAction.DontCare,
                                                RenderBufferStoreAction.Store,
                                                RenderBufferLoadAction.DontCare, RenderBufferStoreAction.DontCare);
                            CoreUtils.DrawFullScreen(cmd, solidCloudMaterial, null, 4);
                            cmd.SetGlobalTexture(BlendTex_ID, frameRTS[0]);

                            context.ExecuteCommandBuffer(cmd);
                            cmd.Clear();
                        }
                    }
                    else
                    {
                        cmd.GetTemporaryRT(BlendTex_ID, width, height, 0, FilterMode.Bilinear,
                                           RenderTextureFormat.ARGB32);
                        cmd.SetRenderTarget(BlendTex_RTI, RenderBufferLoadAction.DontCare,
                                            RenderBufferStoreAction.Store,
                                            RenderBufferLoadAction.DontCare, RenderBufferStoreAction.DontCare);
                        CoreUtils.DrawFullScreen(cmd, solidCloudMaterial, null, 4);
                        cmd.SetGlobalTexture(BlendTex_ID, BlendTex_RTI);

                        context.ExecuteCommandBuffer(cmd);
                        cmd.Clear();

                        for (int i = 1; i < 3; i++)
                        {
                            cmd.ReleaseTemporaryRT(TempBlendTex_ID[i]);
                        }
                    }

                    CoreUtils.SetKeyword(solidCloudMaterial, k_CLOUD_BLUR_ON, settings.enableBlur.value);
                    cmd.SetGlobalInt(DstBlend_ID,
                                     (int)(settings.enableBlend.value ? BlendMode.OneMinusSrcAlpha : BlendMode.Zero));

                    cmd.SetRenderTarget(CameraColorTexture_RTI);
                    CoreUtils.DrawFullScreen(cmd, solidCloudMaterial, null, 3);
                }

                cmd.ReleaseTemporaryRT(BlendTex_ID);
            }
            else
            {
                if (rtSize == 1 && enableFrame == false)
                {
                    CoreUtils.SetKeyword(solidCloudMaterial, k_CLOUD_BLUR_ON, false);
                    cmd.SetGlobalInt(DstBlend_ID,
                                     (int)(settings.enableBlend.value ? BlendMode.OneMinusSrcAlpha : BlendMode.Zero));
                    cmd.SetRenderTarget(CameraColorTexture_RTI);
                    CoreUtils.DrawFullScreen(cmd, solidCloudMaterial, null, 0);
                }
                else
                {
                    cmd.SetGlobalInt(DstBlend_ID, (int)BlendMode.Zero);

                    //blend
                    if (enableFrame)
                    {
                        if (!((frameMode == 2 || frameMode == 3) && frame == 2))
                        {
                            cmd.SetGlobalInt(DstBlend_ID, (int)BlendMode.Zero);

                            //如果dontcare 出现花屏幕  改成 load
                            cmd.SetRenderTarget(frameRTS[0], RenderBufferLoadAction.DontCare,
                                                RenderBufferStoreAction.Store);
                            CoreUtils.DrawFullScreen(cmd, solidCloudMaterial, null, 0);
                            cmd.SetGlobalTexture(BlendTex_ID, frameRTS[0]);
                        }
                    }
                    else
                    {
                        cmd.GetTemporaryRT(BlendTex_ID, width, height, 0, FilterMode.Bilinear,
                                           RenderTextureFormat.ARGB32);
                        cmd.SetRenderTarget(BlendTex_RTI, RenderBufferLoadAction.DontCare,
                                            RenderBufferStoreAction.Store,
                                            RenderBufferLoadAction.DontCare, RenderBufferStoreAction.DontCare);
                        CoreUtils.DrawFullScreen(cmd, solidCloudMaterial, null, 0);
                        cmd.SetGlobalTexture(BlendTex_ID, BlendTex_RTI);
                    }

                    context.ExecuteCommandBuffer(cmd);
                    cmd.Clear();

                    CoreUtils.SetKeyword(solidCloudMaterial, k_CLOUD_BLUR_ON, settings.enableBlur.value);
                    cmd.SetGlobalInt(DstBlend_ID,
                                     (int)(settings.enableBlend.value ? BlendMode.OneMinusSrcAlpha : BlendMode.Zero));
                    cmd.SetRenderTarget(CameraColorTexture_RTI);
                    CoreUtils.DrawFullScreen(cmd, solidCloudMaterial, null, 3);

                    cmd.ReleaseTemporaryRT(BlendTex_ID);
                }
            }

            context.ExecuteCommandBuffer(cmd);
            cmd.Clear();
        }
        public void RenderRayTracedReflections(HDCamera hdCamera, CommandBuffer cmd, RTHandleSystem.RTHandle outputTexture, ScriptableRenderContext renderContext, int frameCount)
        {
            RenderPipelineSettings.RaytracingTier currentTier = m_Asset.currentPlatformRenderPipelineSettings.supportedRaytracingTier;
            switch (currentTier)
            {
            case RenderPipelineSettings.RaytracingTier.Tier1:
            {
                RenderReflectionsT1(hdCamera, cmd, outputTexture, renderContext, frameCount);
            }
            break;

            case RenderPipelineSettings.RaytracingTier.Tier2:
            {
                RenderReflectionsT2(hdCamera, cmd, outputTexture, renderContext, frameCount);
            }
            break;
            }
        }
 // Here you can implement the rendering logic.
 // Use <c>ScriptableRenderContext</c> to issue drawing commands or execute command buffers
 // https://docs.unity3d.com/ScriptReference/Rendering.ScriptableRenderContext.html
 // You don't have to call ScriptableRenderContext.submit, the render pipeline will call it at specific points in the pipeline.
 public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
 {
 }
        internal void ExecuteNativeRenderPass(ScriptableRenderContext context, ScriptableRenderPass renderPass, CameraData cameraData, ref RenderingData renderingData)
        {
            using (new ProfilingScope(null, Profiling.execute))
            {
                int     currentPassIndex       = renderPass.renderPassQueueIndex;
                Hash128 currentPassHash        = m_PassIndexToPassHash[currentPassIndex];
                int[]   currentMergeablePasses = m_MergeableRenderPassesMap[currentPassHash];

                int validColorBuffersCount = m_RenderPassesAttachmentCount[currentPassHash];

                bool isLastPass = renderPass.isLastPass;
                // TODO: review the lastPassToBB logic to mak it work with merged passes
                // keep track if this is the current camera's last pass and the RT is the backbuffer (BuiltinRenderTextureType.CameraTarget)
                bool isLastPassToBB = isLastPass && (m_ActiveColorAttachmentDescriptors[0].loadStoreTarget ==
                                                     BuiltinRenderTextureType.CameraTarget);
                var  depthOnly = renderPass.depthOnly || (cameraData.targetTexture != null && cameraData.targetTexture.graphicsFormat == GraphicsFormat.DepthAuto);
                bool useDepth  = depthOnly || (!renderPass.overrideCameraTarget || (renderPass.overrideCameraTarget && renderPass.depthAttachment != BuiltinRenderTextureType.CameraTarget)) &&
                                 (!(isLastPassToBB || (isLastPass && cameraData.camera.targetTexture != null)));

                var attachments =
                    new NativeArray <AttachmentDescriptor>(useDepth && !depthOnly ? validColorBuffersCount + 1 : 1,
                                                           Allocator.Temp);

                for (int i = 0; i < validColorBuffersCount; ++i)
                {
                    attachments[i] = m_ActiveColorAttachmentDescriptors[i];
                }

                if (useDepth && !depthOnly)
                {
                    attachments[validColorBuffersCount] = m_ActiveDepthAttachmentDescriptor;
                }

                var rpDesc = InitializeRenderPassDescriptor(cameraData, renderPass);

                int validPassCount = GetValidPassIndexCount(currentMergeablePasses);

                var attachmentIndicesCount = GetSubPassAttachmentIndicesCount(renderPass);

                var attachmentIndices = new NativeArray <int>(!depthOnly ? (int)attachmentIndicesCount : 0, Allocator.Temp);
                if (!depthOnly)
                {
                    for (int i = 0; i < attachmentIndicesCount; ++i)
                    {
                        attachmentIndices[i] = renderPass.m_ColorAttachmentIndices[i];
                    }
                }

                if (validPassCount == 1 || currentMergeablePasses[0] == currentPassIndex) // Check if it's the first pass
                {
                    if (PassHasInputAttachments(renderPass))
                    {
                        Debug.LogWarning("First pass in a RenderPass should not have input attachments.");
                    }

                    context.BeginRenderPass(rpDesc.w, rpDesc.h, Math.Max(rpDesc.samples, 1), attachments,
                                            useDepth ? (!depthOnly ? validColorBuffersCount : 0) : -1);
                    attachments.Dispose();

                    context.BeginSubPass(attachmentIndices);

                    m_LastBeginSubpassPassIndex = currentPassIndex;
                }
                else
                {
                    // Regarding input attachments, currently we always recreate a new subpass if it contains input attachments
                    // This might not the most optimal way though and it should be investigated in the future
                    // Whether merging subpasses with matching input attachments is a more viable option
                    if (!AreAttachmentIndicesCompatible(m_ActiveRenderPassQueue[m_LastBeginSubpassPassIndex], m_ActiveRenderPassQueue[currentPassIndex]))
                    {
                        context.EndSubPass();
                        if (PassHasInputAttachments(m_ActiveRenderPassQueue[currentPassIndex]))
                        {
                            context.BeginSubPass(attachmentIndices, m_ActiveRenderPassQueue[currentPassIndex].m_InputAttachmentIndices);
                        }
                        else
                        {
                            context.BeginSubPass(attachmentIndices);
                        }

                        m_LastBeginSubpassPassIndex = currentPassIndex;
                    }
                    else if (PassHasInputAttachments(m_ActiveRenderPassQueue[currentPassIndex]))
                    {
                        context.EndSubPass();
                        context.BeginSubPass(attachmentIndices, m_ActiveRenderPassQueue[currentPassIndex].m_InputAttachmentIndices);

                        m_LastBeginSubpassPassIndex = currentPassIndex;
                    }
                }

                attachmentIndices.Dispose();

                renderPass.Execute(context, ref renderingData);

                if (validPassCount == 1 || currentMergeablePasses[validPassCount - 1] == currentPassIndex) // Check if it's the last pass
                {
                    context.EndSubPass();
                    context.EndRenderPass();

                    m_LastBeginSubpassPassIndex = 0;
                }

                for (int i = 0; i < m_ActiveColorAttachmentDescriptors.Length; ++i)
                {
                    m_ActiveColorAttachmentDescriptors[i] = RenderingUtils.emptyAttachment;
                }

                m_ActiveDepthAttachmentDescriptor = RenderingUtils.emptyAttachment;
            }
        }
 internal void Render(CommandBuffer cmd, HDCamera camera, ScriptableRenderContext renderContext, RTHandle depthTexture, RTHandle normalBuffer, in ShaderVariablesRaytracing globalRTCB, int frameCount)
Exemple #25
0
 private void CameraBeginRendering(ScriptableRenderContext context, Camera camera)
 {
     CameraPreCull(camera);
     CameraPreRender(camera);
 }
        void RenderReflectionsT2(HDCamera hdCamera, CommandBuffer cmd, RTHandle outputTexture, ScriptableRenderContext renderContext, int frameCount)
        {
            // Fetch the shaders that we will be using
            HDRaytracingEnvironment rtEnvironment    = m_RayTracingManager.CurrentEnvironment();
            ComputeShader           reflectionFilter = m_Asset.renderPipelineRayTracingResources.reflectionBilateralFilterCS;
            RayTracingShader        reflectionShader = m_Asset.renderPipelineRayTracingResources.reflectionRaytracingRT;

            var          settings             = VolumeManager.instance.stack.GetComponent <ScreenSpaceReflection>();
            LightCluster lightClusterSettings = VolumeManager.instance.stack.GetComponent <LightCluster>();

            // Bind all the required data for ray tracing
            BindRayTracedReflectionData(cmd, hdCamera, rtEnvironment, reflectionShader, settings, lightClusterSettings);

            // Only use the shader variant that has multi bounce if the bounce count > 1
            CoreUtils.SetKeyword(cmd, "MULTI_BOUNCE_INDIRECT", settings.bounceCount.value > 1);

            // We are not in the diffuse only case
            CoreUtils.SetKeyword(cmd, "DIFFUSE_LIGHTING_ONLY", false);

            // Run the computation
            cmd.DispatchRays(reflectionShader, m_RayGenIntegrationName, (uint)hdCamera.actualWidth, (uint)hdCamera.actualHeight, (uint)hdCamera.viewCount);

            // Disable multi-bounce
            CoreUtils.SetKeyword(cmd, "MULTI_BOUNCE_INDIRECT", false);

            using (new ProfilingSample(cmd, "Filter Reflection", CustomSamplerId.RaytracingFilterReflection.GetSampler()))
            {
                if (settings.denoise.value)
                {
                    // Grab the history buffer
                    RTHandle reflectionHistory = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.RaytracedReflection)
                                                 ?? hdCamera.AllocHistoryFrameRT((int)HDCameraFrameHistoryType.RaytracedReflection, ReflectionHistoryBufferAllocatorFunction, 1);

                    HDSimpleDenoiser simpleDenoiser = m_RayTracingManager.GetSimpleDenoiser();
                    simpleDenoiser.DenoiseBuffer(cmd, hdCamera, m_ReflIntermediateTexture0, reflectionHistory, outputTexture, settings.denoiserRadius.value, singleChannel: false);
                }
                else
                {
                    HDUtils.BlitCameraTexture(cmd, m_ReflIntermediateTexture0, outputTexture);
                }
            }
        }
Exemple #27
0
 private void CameraBeginFrameRendering(ScriptableRenderContext ctx, Camera[] cameras)
 {
     currentLWRPContext = ctx;
 }
Exemple #28
0
        void RenderDirectionalCascadeShadowmap(ref ScriptableRenderContext context, ref CullResults cullResults, ref LightData lightData, ref ShadowData shadowData)
        {
            LightShadows shadowQuality    = LightShadows.None;
            int          shadowLightIndex = lightData.mainLightIndex;

            if (shadowLightIndex == -1)
            {
                return;
            }

            VisibleLight shadowLight = lightData.visibleLights[shadowLightIndex];
            Light        light       = shadowLight.light;

            Debug.Assert(shadowLight.lightType == LightType.Directional);

            if (light.shadows == LightShadows.None)
            {
                return;
            }

            Bounds bounds;

            if (!cullResults.GetShadowCasterBounds(shadowLightIndex, out bounds))
            {
                return;
            }

            CommandBuffer cmd = CommandBufferPool.Get(k_RenderDirectionalShadowmapTag);

            using (new ProfilingSample(cmd, k_RenderDirectionalShadowmapTag))
            {
                m_ShadowCasterCascadesCount = shadowData.directionalLightCascadeCount;

                int   shadowResolution = LightweightShadowUtils.GetMaxTileResolutionInAtlas(shadowData.directionalShadowAtlasWidth, shadowData.directionalShadowAtlasHeight, m_ShadowCasterCascadesCount);
                float shadowNearPlane  = light.shadowNearPlane;

                Matrix4x4 view, proj;
                var       settings = new DrawShadowsSettings(cullResults, shadowLightIndex);

                m_DirectionalShadowmapTexture = RenderTexture.GetTemporary(shadowData.directionalShadowAtlasWidth,
                                                                           shadowData.directionalShadowAtlasHeight, k_ShadowmapBufferBits, m_ShadowmapFormat);
                m_DirectionalShadowmapTexture.filterMode = FilterMode.Bilinear;
                m_DirectionalShadowmapTexture.wrapMode   = TextureWrapMode.Clamp;
                SetRenderTarget(cmd, m_DirectionalShadowmapTexture, RenderBufferLoadAction.DontCare,
                                RenderBufferStoreAction.Store, ClearFlag.Depth, Color.black, TextureDimension.Tex2D);

                bool success = false;
                for (int cascadeIndex = 0; cascadeIndex < m_ShadowCasterCascadesCount; ++cascadeIndex)
                {
                    success = LightweightShadowUtils.ExtractDirectionalLightMatrix(ref cullResults, ref shadowData, shadowLightIndex, cascadeIndex, shadowResolution, shadowNearPlane, out m_CascadeSplitDistances[cascadeIndex], out m_CascadeSlices[cascadeIndex], out view, out proj);
                    if (success)
                    {
                        LightweightShadowUtils.SetupShadowCasterConstants(cmd, ref shadowLight, proj, shadowResolution);
                        LightweightShadowUtils.RenderShadowSlice(cmd, ref context, ref m_CascadeSlices[cascadeIndex], proj,
                                                                 view, settings);
                    }
                }

                if (success)
                {
                    shadowQuality = (shadowData.supportsSoftShadows) ? light.shadows : LightShadows.Hard;
                    SetupDirectionalShadowReceiverConstants(cmd, ref shadowData, shadowLight);
                }
            }
            context.ExecuteCommandBuffer(cmd);
            CommandBufferPool.Release(cmd);

            // TODO: We should have RenderingData as a readonly but currently we need this to pass shadow rendering to litpass
            shadowData.renderedDirectionalShadowQuality = shadowQuality;
        }
Exemple #29
0
 public override void Setup(ScriptableRenderContext context, ref RenderingData renderingData)
 {
     ref CameraData     cameraData             = ref renderingData.cameraData;
Exemple #30
0
 public override void SetupLights(ScriptableRenderContext context, ref RenderingData renderingData)
 {
 }