コード例 #1
0
    void RenderScattering(CommandBuffer cb, HDCamera hdCamera, Camera cam, RTHandleSystem.RTHandle depthHandle, RTHandleSystem.RTHandle srcHandle, RTHandleSystem.RTHandle dstHandle)
    {
        cb.BeginSample("AtmosphericScatteringBlit");

        CommandBufferBlit(cb, hdCamera, cam, depthHandle, srcHandle, dstHandle);

        cb.EndSample("AtmosphericScatteringBlit");
    }
コード例 #2
0
ファイル: HDUtils.cs プロジェクト: zxf1178649100/FPSSample
        // This particular case is for blitting a non-scaled texture into a scaled texture. So we setup the partial viewport but don't scale the input UVs.
        public static void BlitCameraTexture(CommandBuffer cmd, HDCamera camera, RenderTargetIdentifier source, RTHandleSystem.RTHandle destination)
        {
            // Will set the correct camera viewport as well.
            SetRenderTarget(cmd, camera, destination);

            cmd.SetGlobalTexture(HDShaderIDs._BlitTexture, source);
            cmd.SetGlobalVector(HDShaderIDs._BlitScaleBias, new Vector4(1.0f, 1.0f, 0.0f, 0.0f));
            cmd.SetGlobalFloat(HDShaderIDs._BlitMipLevel, 0.0f);
            // Wanted to make things clean and not use SetGlobalXXX APIs but can't use MaterialPropertyBlock with RenderTargetIdentifier so YEY
            //s_PropertyBlock.SetTexture(HDShaderIDs._BlitTexture, source);
            //s_PropertyBlock.SetVector(HDShaderIDs._BlitScaleBias, camera.scaleBias);
            cmd.DrawProcedural(Matrix4x4.identity, GetBlitMaterial(), 0, MeshTopology.Triangles, 3, 1);
        }
コード例 #3
0
 public static void SetRenderTarget(CommandBuffer cmd, HDCamera camera, RTHandleSystem.RTHandle buffer, ClearFlag clearFlag = ClearFlag.None, int miplevel = 0, CubemapFace cubemapFace = CubemapFace.Unknown, int depthSlice = 0)
 {
     SetRenderTarget(cmd, camera, buffer, clearFlag, CoreUtils.clearColorAllBlack, miplevel, cubemapFace, depthSlice);
 }
コード例 #4
0
ファイル: HDUtils.cs プロジェクト: zxf1178649100/FPSSample
 public static void BlitTexture(CommandBuffer cmd, RTHandleSystem.RTHandle source, RTHandleSystem.RTHandle destination, Vector4 scaleBias, float mipLevel, bool bilinear)
 {
     s_PropertyBlock.SetTexture(HDShaderIDs._BlitTexture, source);
     s_PropertyBlock.SetVector(HDShaderIDs._BlitScaleBias, scaleBias);
     s_PropertyBlock.SetFloat(HDShaderIDs._BlitMipLevel, mipLevel);
     cmd.DrawProcedural(Matrix4x4.identity, GetBlitMaterial(), bilinear ? 1 : 0, MeshTopology.Triangles, 3, 1, s_PropertyBlock);
 }
コード例 #5
0
ファイル: HDUtils.cs プロジェクト: zxf1178649100/FPSSample
 public static void BlitCameraTexture(CommandBuffer cmd, HDCamera camera, RTHandleSystem.RTHandle source, RTHandleSystem.RTHandle destination, Rect destViewport, float mipLevel = 0.0f, bool bilinear = false)
 {
     SetRenderTarget(cmd, camera, destination);
     cmd.SetViewport(destViewport);
     BlitTexture(cmd, source, destination, camera.viewportScale, mipLevel, bilinear);
 }
コード例 #6
0
 public void RenderOpaqueAtmosphericScattering(CommandBuffer cmd, HDCamera hdCamera, RTHandleSystem.RTHandle colorBuffer, RTHandleSystem.RTHandle depthBuffer,
                                               Matrix4x4 pixelCoordToViewDirWS, bool isMSAA)
 {
     using (new ProfilingSample(cmd, "Opaque Atmospheric Scattering"))
     {
         var propertyBlock = new MaterialPropertyBlock();
         propertyBlock.SetMatrix(HDShaderIDs._PixelCoordToViewDirWS, pixelCoordToViewDirWS);
         HDUtils.DrawFullScreen(cmd, hdCamera, m_OpaqueAtmScatteringMaterial, colorBuffer, depthBuffer, propertyBlock, isMSAA? 1 : 0);
     }
 }
コード例 #7
0
ファイル: HDUtils.cs プロジェクト: zxf1178649100/FPSSample
 public static void SetRenderTarget(CommandBuffer cmd, HDCamera camera, RenderTargetIdentifier[] colorBuffers, RTHandleSystem.RTHandle depthBuffer, ClearFlag clearFlag = ClearFlag.None)
 {
     CoreUtils.SetRenderTarget(cmd, colorBuffers, depthBuffer); // Don't clear here, viewport needs to be set before we do.
     SetViewportAndClear(cmd, camera, depthBuffer, clearFlag, CoreUtils.clearColorAllBlack);
 }
コード例 #8
0
ファイル: SharedRTManager.cs プロジェクト: KoniroIris/VFX
        public void ResolveMSAAColor(CommandBuffer cmd, HDCamera hdCamera, RTHandleSystem.RTHandle msaaTarget, RTHandleSystem.RTHandle simpleTarget)
        {
            if (hdCamera.frameSettings.IsEnabled(FrameSettingsField.MSAA))
            {
                Debug.Assert(m_MSAASupported);
                using (new ProfilingSample(cmd, "ResolveColor", CustomSamplerId.VolumeUpdate.GetSampler()))
                {
                    // Grab the RTIs and set the output render targets
                    HDUtils.SetRenderTarget(cmd, hdCamera, simpleTarget);

                    // Set the input textures
                    m_PropertyBlock.SetTexture(HDShaderIDs._ColorTextureMS, msaaTarget);

                    // Resolve the depth and normal buffers
                    cmd.DrawProcedural(Matrix4x4.identity, m_ColorResolveMaterial, SampleCountToPassIndex(m_MSAASamples), MeshTopology.Triangles, 3, 1, m_PropertyBlock);
                }
            }
        }
コード例 #9
0
        public void RenderSky(SkyUpdateContext skyContext, HDCamera hdCamera, Light sunLight, RTHandleSystem.RTHandle colorBuffer, RTHandleSystem.RTHandle depthBuffer, DebugDisplaySettings debugSettings, CommandBuffer cmd)
        {
            if (skyContext.IsValid() && hdCamera.clearColorMode == HDAdditionalCameraData.ClearColorMode.Sky)
            {
                using (new ProfilingSample(cmd, "Sky Pass"))
                {
                    m_BuiltinParameters.commandBuffer             = cmd;
                    m_BuiltinParameters.sunLight                  = sunLight;
                    m_BuiltinParameters.pixelCoordToViewDirMatrix = HDUtils.ComputePixelCoordToWorldSpaceViewDirectionMatrix(hdCamera.camera.fieldOfView * Mathf.Deg2Rad, hdCamera.screenSize, hdCamera.viewMatrix, false);
                    m_BuiltinParameters.invViewProjMatrix         = hdCamera.viewProjMatrix.inverse;
                    m_BuiltinParameters.screenSize                = hdCamera.screenSize;
                    m_BuiltinParameters.cameraPosWS               = hdCamera.camera.transform.position;
                    m_BuiltinParameters.colorBuffer               = colorBuffer;
                    m_BuiltinParameters.depthBuffer               = depthBuffer;
                    m_BuiltinParameters.hdCamera                  = hdCamera;
                    m_BuiltinParameters.debugSettings             = debugSettings;

                    skyContext.renderer.SetRenderTargets(m_BuiltinParameters);
                    skyContext.renderer.RenderSky(m_BuiltinParameters, false);
                }
            }
        }
コード例 #10
0
        public static void BlitCameraTexture(CommandBuffer cmd, RTHandleSystem.RTHandle source, RTHandleSystem.RTHandle destination, Rect destViewport, float mipLevel = 0.0f, bool bilinear = false)
        {
            Vector2 viewportScale = new Vector2(source.rtHandleProperties.rtHandleScale.x, source.rtHandleProperties.rtHandleScale.y);

            SetRenderTarget(cmd, destination);
            cmd.SetViewport(destViewport);
            BlitTexture(cmd, source, viewportScale, mipLevel, bilinear);
        }
コード例 #11
0
        public void ClearAndSetTargets(CommandBuffer cmd, HDCamera camera, bool rtCount4, RTHandleSystem.RTHandle cameraDepthStencilBuffer)
        {
            // for alpha compositing, color is cleared to 0, alpha to 1
            // https://developer.nvidia.com/gpugems/GPUGems3/gpugems3_ch23.html

            // to avoid temporary allocations, because number of render targets is not passed explicitly to SetRenderTarget, but rather deduces it from array size
            RenderTargetIdentifier[] RTIDs = rtCount4 ? m_RTIDs4 : m_RTIDs3;

            // this clears the targets
            Color clearColor         = new Color(0.0f, 0.0f, 0.0f, 1.0f);
            Color clearColorNormal   = new Color(0.5f, 0.5f, 0.5f, 1.0f); // for normals 0.5 is neutral
            Color clearColorAOSBlend = new Color(1.0f, 1.0f, 1.0f, 1.0f);

            HDUtils.SetRenderTarget(cmd, camera, m_RTs[0], ClearFlag.Color, clearColor);
            HDUtils.SetRenderTarget(cmd, camera, m_RTs[1], ClearFlag.Color, clearColorNormal);
            HDUtils.SetRenderTarget(cmd, camera, m_RTs[2], ClearFlag.Color, clearColor);

            // names IDs have to be set every frame, because they can change
            RTIDs[0] = m_RTs[0].nameID;
            RTIDs[1] = m_RTs[1].nameID;
            RTIDs[2] = m_RTs[2].nameID;
            if (rtCount4)
            {
                HDUtils.SetRenderTarget(cmd, camera, m_RTs[3], ClearFlag.Color, clearColorAOSBlend);
                RTIDs[3] = m_RTs[3].nameID;
            }
            HDUtils.SetRenderTarget(cmd, camera, m_HTile, ClearFlag.Color, Color.clear);

            // this actually sets the MRTs and HTile RWTexture, this is done separately because we do not have an api to clear MRTs to different colors
            HDUtils.SetRenderTarget(cmd, camera, RTIDs, cameraDepthStencilBuffer); // do not clear anymore
            cmd.SetRandomWriteTarget(rtCount4 ? 4 : 3, m_HTile);
        }
コード例 #12
0
        public static void SetRenderTarget(CommandBuffer cmd, RTHandleSystem.RTHandle colorBuffer, RTHandleSystem.RTHandle depthBuffer, ClearFlag clearFlag, Color clearColor, int miplevel = 0, CubemapFace cubemapFace = CubemapFace.Unknown, int depthSlice = -1)
        {
            int cw = colorBuffer.rt.width;
            int ch = colorBuffer.rt.height;
            int dw = depthBuffer.rt.width;
            int dh = depthBuffer.rt.height;

            Debug.Assert(cw == dw && ch == dh);

            CoreUtils.SetRenderTarget(cmd, colorBuffer, depthBuffer, miplevel, cubemapFace, depthSlice);
            SetViewportAndClear(cmd, colorBuffer, clearFlag, clearColor);
        }
コード例 #13
0
 public static void SetRenderTarget(CommandBuffer cmd, RTHandleSystem.RTHandle buffer, ClearFlag clearFlag = ClearFlag.None, int miplevel = 0, CubemapFace cubemapFace = CubemapFace.Unknown, int depthSlice = -1)
 => SetRenderTarget(cmd, buffer, clearFlag, Color.clear, miplevel, cubemapFace, depthSlice);
コード例 #14
0
 public static void BlitCameraTexture(CommandBuffer cmd, RGCamera rgCam, RTHandleSystem.RTHandle source, RTHandleSystem.RTHandle destination, float mipLevel = 0.0f, bool bilinear = false)
 {
     SetRenderTarget(cmd, rgCam, destination);
     BlitTexture(cmd, source, destination, rgCam.viewportScaleBias, mipLevel, bilinear);
 }
コード例 #15
0
        public void RenderReflectionsT1(HDCamera hdCamera, CommandBuffer cmd, RTHandleSystem.RTHandle outputTexture, ScriptableRenderContext renderContext, int frameCount)
        {
            // First thing to check is: Do we have a valid ray-tracing environment?
            HDRaytracingEnvironment rtEnvironment  = m_RaytracingManager.CurrentEnvironment();
            HDRenderPipeline        renderPipeline = m_RaytracingManager.GetRenderPipeline();
            BlueNoise        blueNoise             = m_RaytracingManager.GetBlueNoiseManager();
            ComputeShader    reflectionFilter      = m_PipelineAsset.renderPipelineRayTracingResources.reflectionBilateralFilterCS;
            RayTracingShader reflectionShader      = m_PipelineAsset.renderPipelineRayTracingResources.reflectionRaytracing;

            bool invalidState = rtEnvironment == null || blueNoise == null ||
                                reflectionFilter == null || reflectionShader == null ||
                                m_PipelineResources.textures.owenScrambledTex == null || m_PipelineResources.textures.scramblingTex == null;

            // If no acceleration structure available, end it now
            if (invalidState)
            {
                return;
            }

            var          settings             = VolumeManager.instance.stack.GetComponent <ScreenSpaceReflection>();
            LightCluster lightClusterSettings = VolumeManager.instance.stack.GetComponent <LightCluster>();

            // Grab the acceleration structures and the light cluster to use
            RayTracingAccelerationStructure accelerationStructure = m_RaytracingManager.RequestAccelerationStructure(rtEnvironment.reflLayerMask);
            HDRaytracingLightCluster        lightCluster          = m_RaytracingManager.RequestLightCluster(rtEnvironment.reflLayerMask);

            // Compute the actual resolution that is needed base on the quality
            string targetRayGen = m_RayGenHalfResName;

            // Define the shader pass to use for the reflection pass
            cmd.SetRayTracingShaderPass(reflectionShader, "IndirectDXR");

            // Set the acceleration structure for the pass
            cmd.SetRayTracingAccelerationStructure(reflectionShader, HDShaderIDs._RaytracingAccelerationStructureName, accelerationStructure);

            // Inject the ray-tracing sampling data
            cmd.SetRayTracingTextureParam(reflectionShader, HDShaderIDs._OwenScrambledTexture, m_PipelineResources.textures.owenScrambledTex);
            cmd.SetRayTracingTextureParam(reflectionShader, HDShaderIDs._ScramblingTexture, m_PipelineResources.textures.scramblingTex);

            // Global reflection parameters
            cmd.SetRayTracingFloatParams(reflectionShader, HDShaderIDs._RaytracingIntensityClamp, settings.clampValue.value);
            cmd.SetRayTracingFloatParams(reflectionShader, HDShaderIDs._RaytracingReflectionMinSmoothness, settings.minSmoothness.value);
            cmd.SetRayTracingIntParams(reflectionShader, HDShaderIDs._RaytracingReflectSky, settings.reflectSky.value ? 1 : 0);

            // Inject the ray generation data
            cmd.SetGlobalFloat(HDShaderIDs._RaytracingRayBias, rtEnvironment.rayBias);
            cmd.SetGlobalFloat(HDShaderIDs._RaytracingRayMaxLength, settings.rayLength.value);
            cmd.SetRayTracingIntParams(reflectionShader, HDShaderIDs._RaytracingNumSamples, settings.numSamples.value);
            int frameIndex = hdCamera.IsTAAEnabled() ? hdCamera.taaFrameIndex : (int)frameCount % 8;

            cmd.SetGlobalInt(HDShaderIDs._RaytracingFrameIndex, frameIndex);

            // Set the data for the ray generation
            cmd.SetRayTracingTextureParam(reflectionShader, HDShaderIDs._SsrLightingTextureRW, m_LightingTexture);
            cmd.SetRayTracingTextureParam(reflectionShader, HDShaderIDs._SsrHitPointTexture, m_HitPdfTexture);
            cmd.SetRayTracingTextureParam(reflectionShader, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthStencilBuffer());
            cmd.SetRayTracingTextureParam(reflectionShader, HDShaderIDs._NormalBufferTexture, m_SharedRTManager.GetNormalBuffer());

            // Set ray count tex
            cmd.SetRayTracingIntParam(reflectionShader, HDShaderIDs._RayCountEnabled, m_RaytracingManager.rayCountManager.RayCountIsEnabled());
            cmd.SetRayTracingTextureParam(reflectionShader, HDShaderIDs._RayCountTexture, m_RaytracingManager.rayCountManager.rayCountTexture);

            // Compute the pixel spread value
            float pixelSpreadAngle = Mathf.Atan(2.0f * Mathf.Tan(hdCamera.camera.fieldOfView * Mathf.PI / 360.0f) / Mathf.Min(hdCamera.actualWidth, hdCamera.actualHeight));

            cmd.SetRayTracingFloatParam(reflectionShader, HDShaderIDs._RaytracingPixelSpreadAngle, pixelSpreadAngle);

            // LightLoop data
            cmd.SetGlobalBuffer(HDShaderIDs._RaytracingLightCluster, lightCluster.GetCluster());
            cmd.SetGlobalBuffer(HDShaderIDs._LightDatasRT, lightCluster.GetLightDatas());
            cmd.SetGlobalVector(HDShaderIDs._MinClusterPos, lightCluster.GetMinClusterPos());
            cmd.SetGlobalVector(HDShaderIDs._MaxClusterPos, lightCluster.GetMaxClusterPos());
            cmd.SetGlobalInt(HDShaderIDs._LightPerCellCount, lightClusterSettings.maxNumLightsPercell.value);
            cmd.SetGlobalInt(HDShaderIDs._PunctualLightCountRT, lightCluster.GetPunctualLightCount());
            cmd.SetGlobalInt(HDShaderIDs._AreaLightCountRT, lightCluster.GetAreaLightCount());

            // Note: Just in case, we rebind the directional light data (in case they were not)
            cmd.SetGlobalBuffer(HDShaderIDs._DirectionalLightDatas, renderPipeline.m_LightLoopLightData.directionalLightData);
            cmd.SetGlobalInt(HDShaderIDs._DirectionalLightCount, renderPipeline.m_lightList.directionalLights.Count);

            // Evaluate the clear coat mask texture based on the lit shader mode
            RenderTargetIdentifier clearCoatMaskTexture = hdCamera.frameSettings.litShaderMode == LitShaderMode.Deferred ? m_GbufferManager.GetBuffersRTI()[2] : TextureXR.GetBlackTexture();

            cmd.SetRayTracingTextureParam(reflectionShader, HDShaderIDs._SsrClearCoatMaskTexture, clearCoatMaskTexture);

            // Set the data for the ray miss
            cmd.SetRayTracingTextureParam(reflectionShader, HDShaderIDs._SkyTexture, m_SkyManager.skyReflection);

            // Compute the actual resolution that is needed base on the quality
            uint widthResolution  = (uint)hdCamera.actualWidth / 2;
            uint heightResolution = (uint)hdCamera.actualHeight / 2;

            // Force to disable specular lighting
            cmd.SetGlobalInt(HDShaderIDs._EnableSpecularLighting, 0);

            // Run the computation
            cmd.DispatchRays(reflectionShader, targetRayGen, widthResolution, heightResolution, 1);

            // Restore the previous state of specular lighting
            cmd.SetGlobalInt(HDShaderIDs._EnableSpecularLighting, hdCamera.frameSettings.IsEnabled(FrameSettingsField.SpecularLighting) ? 1 : 0);

            using (new ProfilingSample(cmd, "Filter Reflection", CustomSamplerId.RaytracingFilterReflection.GetSampler()))
            {
                // Fetch the right filter to use
                int currentKernel = reflectionFilter.FindKernel("RaytracingReflectionFilter");

                // Inject all the parameters for the compute
                cmd.SetComputeTextureParam(reflectionFilter, currentKernel, HDShaderIDs._SsrLightingTextureRW, m_LightingTexture);
                cmd.SetComputeTextureParam(reflectionFilter, currentKernel, HDShaderIDs._SsrHitPointTexture, m_HitPdfTexture);
                cmd.SetComputeTextureParam(reflectionFilter, currentKernel, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthStencilBuffer());
                cmd.SetComputeTextureParam(reflectionFilter, currentKernel, HDShaderIDs._NormalBufferTexture, m_SharedRTManager.GetNormalBuffer());
                cmd.SetComputeTextureParam(reflectionFilter, currentKernel, "_NoiseTexture", blueNoise.textureArray16RGB);
                cmd.SetComputeTextureParam(reflectionFilter, currentKernel, "_VarianceTexture", m_VarianceBuffer);
                cmd.SetComputeTextureParam(reflectionFilter, currentKernel, "_MinColorRangeTexture", m_MinBoundBuffer);
                cmd.SetComputeTextureParam(reflectionFilter, currentKernel, "_MaxColorRangeTexture", m_MaxBoundBuffer);
                cmd.SetComputeTextureParam(reflectionFilter, currentKernel, "_RaytracingReflectionTexture", outputTexture);
                cmd.SetComputeTextureParam(reflectionFilter, currentKernel, HDShaderIDs._ScramblingTexture, m_PipelineResources.textures.scramblingTex);
                cmd.SetComputeIntParam(reflectionFilter, HDShaderIDs._SpatialFilterRadius, settings.spatialFilterRadius.value);
                cmd.SetComputeFloatParam(reflectionFilter, HDShaderIDs._RaytracingReflectionMinSmoothness, settings.minSmoothness.value);

                // Texture dimensions
                int texWidth  = outputTexture.rt.width;
                int texHeight = outputTexture.rt.height;

                // Evaluate the dispatch parameters
                int areaTileSize = 8;
                int numTilesXHR  = (texWidth + (areaTileSize - 1)) / areaTileSize;
                int numTilesYHR  = (texHeight + (areaTileSize - 1)) / areaTileSize;

                // Bind the right texture for clear coat support
                cmd.SetComputeTextureParam(reflectionFilter, currentKernel, HDShaderIDs._SsrClearCoatMaskTexture, clearCoatMaskTexture);

                // Compute the texture
                cmd.DispatchCompute(reflectionFilter, currentKernel, numTilesXHR, numTilesYHR, 1);

                int numTilesXFR = (texWidth + (areaTileSize - 1)) / areaTileSize;
                int numTilesYFR = (texHeight + (areaTileSize - 1)) / areaTileSize;

                RTHandleSystem.RTHandle history = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.RaytracedReflection)
                                                  ?? hdCamera.AllocHistoryFrameRT((int)HDCameraFrameHistoryType.RaytracedReflection, ReflectionHistoryBufferAllocatorFunction, 1);

                // Fetch the right filter to use
                currentKernel = reflectionFilter.FindKernel("TemporalAccumulationFilter");
                cmd.SetComputeFloatParam(reflectionFilter, HDShaderIDs._TemporalAccumuationWeight, settings.temporalAccumulationWeight.value);
                cmd.SetComputeTextureParam(reflectionFilter, currentKernel, HDShaderIDs._AccumulatedFrameTexture, history);
                cmd.SetComputeTextureParam(reflectionFilter, currentKernel, HDShaderIDs._CurrentFrameTexture, outputTexture);
                cmd.SetComputeTextureParam(reflectionFilter, currentKernel, "_MinColorRangeTexture", m_MinBoundBuffer);
                cmd.SetComputeTextureParam(reflectionFilter, currentKernel, "_MaxColorRangeTexture", m_MaxBoundBuffer);
                cmd.DispatchCompute(reflectionFilter, currentKernel, numTilesXFR, numTilesYFR, 1);
            }
        }
コード例 #16
0
        public void RenderLightVolumes(CommandBuffer cmd, HDCamera hdCamera, CullingResults cullResults, LightingDebugSettings lightDebugSettings, RTHandleSystem.RTHandle finalRT)
        {
            using (new ProfilingSample(cmd, "Display Light Volumes"))
            {
                // Clear the buffers
                HDUtils.SetRenderTarget(cmd, m_ColorAccumulationBuffer, ClearFlag.Color, Color.black);
                HDUtils.SetRenderTarget(cmd, m_LightCountBuffer, ClearFlag.Color, Color.black);
                HDUtils.SetRenderTarget(cmd, m_DebugLightVolumesTexture, ClearFlag.Color, Color.black);

                // Set the render target array
                HDUtils.SetRenderTarget(cmd, m_RTIDs, m_DepthBuffer);

                // First of all let's do the regions for the light sources (we only support Punctual and Area)
                int numLights = cullResults.visibleLights.Length;
                for (int lightIdx = 0; lightIdx < numLights; ++lightIdx)
                {
                    // Let's build the light's bounding sphere matrix
                    Light currentLegacyLight = cullResults.visibleLights[lightIdx].light;
                    if (currentLegacyLight == null)
                    {
                        continue;
                    }
                    HDAdditionalLightData currentHDRLight = currentLegacyLight.GetComponent <HDAdditionalLightData>();
                    if (currentHDRLight == null)
                    {
                        continue;
                    }

                    Matrix4x4 positionMat = Matrix4x4.Translate(currentLegacyLight.transform.position);

                    if (currentLegacyLight.type == LightType.Point || currentLegacyLight.type == LightType.Area)
                    {
                        m_MaterialProperty.SetVector(_RangeShaderID, new Vector3(currentLegacyLight.range, currentLegacyLight.range, currentLegacyLight.range));
                        switch (currentHDRLight.lightTypeExtent)
                        {
                        case LightTypeExtent.Punctual:
                        {
                            m_MaterialProperty.SetColor(_ColorShaderID, new Color(0.0f, 0.5f, 0.0f, 1.0f));
                            m_MaterialProperty.SetVector(_OffsetShaderID, new Vector3(0, 0, 0));
                            cmd.DrawMesh(DebugShapes.instance.RequestSphereMesh(), positionMat, m_DebugLightVolumeMaterial, 0, 0, m_MaterialProperty);
                        }
                        break;

                        case LightTypeExtent.Rectangle:
                        {
                            m_MaterialProperty.SetColor(_ColorShaderID, new Color(0.0f, 1.0f, 1.0f, 1.0f));
                            m_MaterialProperty.SetVector(_OffsetShaderID, new Vector3(0, 0, 0));
                            cmd.DrawMesh(DebugShapes.instance.RequestSphereMesh(), positionMat, m_DebugLightVolumeMaterial, 0, 0, m_MaterialProperty);
                        }
                        break;

                        case LightTypeExtent.Tube:
                        {
                            m_MaterialProperty.SetColor(_ColorShaderID, new Color(1.0f, 0.0f, 0.5f, 1.0f));
                            m_MaterialProperty.SetVector(_OffsetShaderID, new Vector3(0, 0, 0));
                            cmd.DrawMesh(DebugShapes.instance.RequestSphereMesh(), positionMat, m_DebugLightVolumeMaterial, 0, 0, m_MaterialProperty);
                        }
                        break;

                        default:
                            break;
                        }
                    }
                    else if (currentLegacyLight.type == LightType.Spot)
                    {
                        if (currentHDRLight.spotLightShape == SpotLightShape.Cone)
                        {
                            float bottomRadius = Mathf.Tan(currentLegacyLight.spotAngle * Mathf.PI / 360.0f) * currentLegacyLight.range;
                            m_MaterialProperty.SetColor(_ColorShaderID, new Color(1.0f, 0.5f, 0.0f, 1.0f));
                            m_MaterialProperty.SetVector(_RangeShaderID, new Vector3(bottomRadius, bottomRadius, currentLegacyLight.range));
                            m_MaterialProperty.SetVector(_OffsetShaderID, new Vector3(0, 0, 0));
                            cmd.DrawMesh(DebugShapes.instance.RequestConeMesh(), currentLegacyLight.gameObject.transform.localToWorldMatrix, m_DebugLightVolumeMaterial, 0, 0, m_MaterialProperty);
                        }
                        else if (currentHDRLight.spotLightShape == SpotLightShape.Box)
                        {
                            m_MaterialProperty.SetColor(_ColorShaderID, new Color(1.0f, 0.5f, 0.0f, 1.0f));
                            m_MaterialProperty.SetVector(_RangeShaderID, new Vector3(currentHDRLight.shapeWidth, currentHDRLight.shapeHeight, currentLegacyLight.range));
                            m_MaterialProperty.SetVector(_OffsetShaderID, new Vector3(0, 0, currentLegacyLight.range / 2.0f));
                            cmd.DrawMesh(DebugShapes.instance.RequestBoxMesh(), currentLegacyLight.gameObject.transform.localToWorldMatrix, m_DebugLightVolumeMaterial, 0, 0, m_MaterialProperty);
                        }
                        else if (currentHDRLight.spotLightShape == SpotLightShape.Pyramid)
                        {
                            float bottomWidth = Mathf.Tan(currentLegacyLight.spotAngle * Mathf.PI / 360.0f) * currentLegacyLight.range;
                            m_MaterialProperty.SetColor(_ColorShaderID, new Color(1.0f, 0.5f, 0.0f, 1.0f));
                            m_MaterialProperty.SetVector(_RangeShaderID, new Vector3(currentHDRLight.aspectRatio * bottomWidth * 2, bottomWidth * 2, currentLegacyLight.range));
                            m_MaterialProperty.SetVector(_OffsetShaderID, new Vector3(0, 0, 0));
                            cmd.DrawMesh(DebugShapes.instance.RequestPyramidMesh(), currentLegacyLight.gameObject.transform.localToWorldMatrix, m_DebugLightVolumeMaterial, 0, 0, m_MaterialProperty);
                        }
                    }
                }

                // Now let's do the same but for reflection probes
                int numProbes = cullResults.visibleReflectionProbes.Length;
                for (int probeIdx = 0; probeIdx < numProbes; ++probeIdx)
                {
                    // Let's build the light's bounding sphere matrix
                    ReflectionProbe            currentLegacyProbe = cullResults.visibleReflectionProbes[probeIdx].reflectionProbe;
                    HDAdditionalReflectionData currentHDProbe     = currentLegacyProbe.GetComponent <HDAdditionalReflectionData>();

                    if (!currentHDProbe)
                    {
                        continue;
                    }

                    MaterialPropertyBlock m_MaterialProperty = new MaterialPropertyBlock();
                    Mesh targetMesh = null;
                    if (currentHDProbe.influenceVolume.shape == InfluenceShape.Sphere)
                    {
                        m_MaterialProperty.SetVector(_RangeShaderID, new Vector3(currentHDProbe.influenceVolume.sphereRadius, currentHDProbe.influenceVolume.sphereRadius, currentHDProbe.influenceVolume.sphereRadius));
                        targetMesh = DebugShapes.instance.RequestSphereMesh();
                    }
                    else
                    {
                        m_MaterialProperty.SetVector(_RangeShaderID, new Vector3(currentHDProbe.influenceVolume.boxSize.x, currentHDProbe.influenceVolume.boxSize.y, currentHDProbe.influenceVolume.boxSize.z));
                        targetMesh = DebugShapes.instance.RequestBoxMesh();
                    }

                    m_MaterialProperty.SetColor(_ColorShaderID, new Color(1.0f, 1.0f, 0.0f, 1.0f));
                    m_MaterialProperty.SetVector(_OffsetShaderID, new Vector3(0, 0, 0));
                    Matrix4x4 positionMat = Matrix4x4.Translate(currentLegacyProbe.transform.position);
                    cmd.DrawMesh(targetMesh, positionMat, m_DebugLightVolumeMaterial, 0, 0, m_MaterialProperty);
                }

                // Define which kernel to use based on the lightloop options
                int targetKernel = lightDebugSettings.lightVolumeDebugByCategory == LightVolumeDebug.ColorAndEdge ? m_DebugLightVolumeColorsKernel : m_DebugLightVolumeGradientKernel;

                // Set the input params for the compute
                cmd.SetComputeTextureParam(m_DebugLightVolumeCompute, targetKernel, _DebugLightCountBufferShaderID, m_LightCountBuffer);
                cmd.SetComputeTextureParam(m_DebugLightVolumeCompute, targetKernel, _DebugColorAccumulationBufferShaderID, m_ColorAccumulationBuffer);
                cmd.SetComputeTextureParam(m_DebugLightVolumeCompute, targetKernel, _DebugLightVolumesTextureShaderID, m_DebugLightVolumesTexture);
                cmd.SetComputeTextureParam(m_DebugLightVolumeCompute, targetKernel, _ColorGradientTextureShaderID, m_ColorGradientTexture);
                cmd.SetComputeIntParam(m_DebugLightVolumeCompute, _MaxDebugLightCountShaderID, (int)lightDebugSettings.maxDebugLightCount);

                // Texture dimensions
                int texWidth  = hdCamera.actualWidth;  // m_ColorAccumulationBuffer.rt.width;
                int texHeight = hdCamera.actualHeight; // m_ColorAccumulationBuffer.rt.width;


                // Dispatch the compute
                int lightVolumesTileSize = 8;
                int numTilesX            = (texWidth + (lightVolumesTileSize - 1)) / lightVolumesTileSize;
                int numTilesY            = (texHeight + (lightVolumesTileSize - 1)) / lightVolumesTileSize;
                cmd.DispatchCompute(m_DebugLightVolumeCompute, targetKernel, numTilesX, numTilesY, hdCamera.viewCount);

                // Blit this into the camera target
                HDUtils.SetRenderTarget(cmd, finalRT);
                m_MaterialProperty.SetTexture(HDShaderIDs._BlitTexture, m_DebugLightVolumesTexture);
                cmd.DrawProcedural(Matrix4x4.identity, m_DebugLightVolumeMaterial, 1, MeshTopology.Triangles, 3, 1, m_MaterialProperty);
            }
        }
コード例 #17
0
 public void RenderSky(HDCamera camera, Light sunLight, RTHandleSystem.RTHandle colorBuffer, RTHandleSystem.RTHandle depthBuffer, DebugDisplaySettings debugSettings, CommandBuffer cmd)
 {
     m_SkyRenderingContext.RenderSky(m_VisualSky, camera, sunLight, colorBuffer, depthBuffer, debugSettings, cmd);
 }
        public void RenderAO(HDCamera hdCamera, CommandBuffer cmd, RTHandleSystem.RTHandle outputTexture, ScriptableRenderContext renderContext, int frameCount)
        {
            // Let's check all the resources
            HDRaytracingEnvironment rtEnvironment = m_RaytracingManager.CurrentEnvironment();
            RayTracingShader        aoShader      = m_PipelineRayTracingResources.aoRaytracing;
            var aoSettings = VolumeManager.instance.stack.GetComponent <AmbientOcclusion>();

            // Check if the state is valid for evaluating ambient occlusion
            bool invalidState = rtEnvironment == null ||
                                aoShader == null ||
                                m_PipelineResources.textures.owenScrambledTex == null || m_PipelineResources.textures.scramblingTex == null;

            // If any of the previous requirements is missing, the effect is not requested or no acceleration structure, set the default one and leave right away
            if (invalidState)
            {
                SetDefaultAmbientOcclusionTexture(cmd);
                return;
            }

            // Grab the acceleration structure for the target camera
            RayTracingAccelerationStructure accelerationStructure = m_RaytracingManager.RequestAccelerationStructure(rtEnvironment.aoLayerMask);

            // Define the shader pass to use for the reflection pass
            cmd.SetRayTracingShaderPass(aoShader, "VisibilityDXR");

            // Set the acceleration structure for the pass
            cmd.SetRayTracingAccelerationStructure(aoShader, HDShaderIDs._RaytracingAccelerationStructureName, accelerationStructure);

            // Inject the ray-tracing sampling data
            cmd.SetRayTracingTextureParam(aoShader, HDShaderIDs._OwenScrambledTexture, m_PipelineResources.textures.owenScrambledTex);
            cmd.SetRayTracingTextureParam(aoShader, HDShaderIDs._ScramblingTexture, m_PipelineResources.textures.scramblingTex);

            // Inject the ray generation data
            cmd.SetRayTracingFloatParams(aoShader, HDShaderIDs._RaytracingRayBias, rtEnvironment.rayBias);
            cmd.SetRayTracingFloatParams(aoShader, HDShaderIDs._RaytracingRayMaxLength, aoSettings.rayLength.value);
            cmd.SetRayTracingIntParams(aoShader, HDShaderIDs._RaytracingNumSamples, aoSettings.numSamples.value);

            // Set the data for the ray generation
            cmd.SetRayTracingTextureParam(aoShader, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthStencilBuffer());
            cmd.SetRayTracingTextureParam(aoShader, HDShaderIDs._NormalBufferTexture, m_SharedRTManager.GetNormalBuffer());
            int frameIndex = hdCamera.IsTAAEnabled() ? hdCamera.taaFrameIndex : (int)frameCount % 8;

            cmd.SetGlobalInt(HDShaderIDs._RaytracingFrameIndex, frameIndex);

            // Value used to scale the ao intensity
            cmd.SetRayTracingFloatParam(aoShader, HDShaderIDs._RaytracingAOIntensity, aoSettings.intensity.value);

            cmd.SetRayTracingIntParam(aoShader, HDShaderIDs._RayCountEnabled, m_RaytracingManager.rayCountManager.RayCountIsEnabled());
            cmd.SetRayTracingTextureParam(aoShader, HDShaderIDs._RayCountTexture, m_RaytracingManager.rayCountManager.rayCountTexture);

            // Set the output textures
            cmd.SetRayTracingTextureParam(aoShader, HDShaderIDs._AmbientOcclusionTextureRW, m_IntermediateBuffer);
            cmd.SetRayTracingTextureParam(aoShader, HDShaderIDs._RaytracingVSNormalTexture, m_ViewSpaceNormalBuffer);

            // Run the computation
            cmd.DispatchRays(aoShader, m_RayGenShaderName, (uint)hdCamera.actualWidth, (uint)hdCamera.actualHeight, 1);

            using (new ProfilingSample(cmd, "Filter Reflection", CustomSamplerId.RaytracingAmbientOcclusion.GetSampler()))
            {
                if (aoSettings.enableFilter.value)
                {
                    // Grab the history buffer
                    RTHandleSystem.RTHandle ambientOcclusionHistory = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.RaytracedAmbientOcclusion)
                                                                      ?? hdCamera.AllocHistoryFrameRT((int)HDCameraFrameHistoryType.RaytracedAmbientOcclusion, AmbientOcclusionHistoryBufferAllocatorFunction, 1);

                    // Apply the simple denoiser
                    HDSimpleDenoiser simpleDenoiser = m_RaytracingManager.GetSimpleDenoiser();
                    simpleDenoiser.DenoiseBuffer(cmd, hdCamera, m_IntermediateBuffer, ambientOcclusionHistory, outputTexture, aoSettings.filterRadius.value, singleChannel: true);
                }
                else
                {
                    HDUtils.BlitCameraTexture(cmd, m_IntermediateBuffer, outputTexture);
                }
            }

            // Bind the textures and the params
            cmd.SetGlobalTexture(HDShaderIDs._AmbientOcclusionTexture, outputTexture);
            cmd.SetGlobalVector(HDShaderIDs._AmbientOcclusionParam, new Vector4(0f, 0f, 0f, VolumeManager.instance.stack.GetComponent <AmbientOcclusion>().directLightingStrength.value));

            // TODO: All the push-debug stuff should be centralized somewhere
            (RenderPipelineManager.currentPipeline as HDRenderPipeline).PushFullScreenDebugTexture(hdCamera, cmd, outputTexture, FullScreenDebugMode.SSAO);
        }
コード例 #19
0
ファイル: HDUtils.cs プロジェクト: zxf1178649100/FPSSample
 public static void SetRenderTarget(CommandBuffer cmd, HDCamera camera, RenderTargetIdentifier[] colorBuffers, RTHandleSystem.RTHandle depthBuffer)
 {
     CoreUtils.SetRenderTarget(cmd, colorBuffers, depthBuffer, ClearFlag.None, CoreUtils.clearColorAllBlack);
     SetViewport(cmd, camera, depthBuffer);
 }
コード例 #20
0
        // Combines specular lighting and diffuse lighting with subsurface scattering.
        // In the case our frame is MSAA, for the moment given the fact that we do not have read/write access to the stencil buffer of the MSAA target; we need to keep this pass MSAA
        // However, the compute can't output and MSAA target so we blend the non-MSAA target into the MSAA one.
        public void SubsurfaceScatteringPass(HDCamera hdCamera, CommandBuffer cmd, RTHandleSystem.RTHandle colorBufferRT,
                                             RTHandleSystem.RTHandle diffuseBufferRT, RTHandleSystem.RTHandle depthStencilBufferRT, RTHandleSystem.RTHandle depthTextureRT)
        {
            if (!hdCamera.frameSettings.IsEnabled(FrameSettingsField.SubsurfaceScattering))
            {
                return;
            }

            // TODO: For MSAA, at least initially, we can only support Jimenez, because we can't
            // create MSAA + UAV render targets.

            using (new ProfilingSample(cmd, "Subsurface Scattering", CustomSamplerId.SubsurfaceScattering.GetSampler()))
            {
                // For Jimenez we always need an extra buffer, for Disney it depends on platform
                if (NeedTemporarySubsurfaceBuffer() || hdCamera.frameSettings.IsEnabled(FrameSettingsField.MSAA))
                {
                    // Clear the SSS filtering target
                    using (new ProfilingSample(cmd, "Clear SSS filtering target", CustomSamplerId.ClearSSSFilteringTarget.GetSampler()))
                    {
                        HDUtils.SetRenderTarget(cmd, hdCamera, m_CameraFilteringBuffer, ClearFlag.Color, Color.clear);
                    }
                }

                using (new ProfilingSample(cmd, "HTile for SSS", CustomSamplerId.HTileForSSS.GetSampler()))
                {
                    // Currently, Unity does not offer a way to access the GCN HTile even on PS4 and Xbox One.
                    // Therefore, it's computed in a pixel shader, and optimized to only contain the SSS bit.

                    // Clear the HTile texture. TODO: move this to ClearBuffers(). Clear operations must be batched!
                    HDUtils.SetRenderTarget(cmd, hdCamera, m_HTile, ClearFlag.Color, Color.clear);

                    HDUtils.SetRenderTarget(cmd, hdCamera, depthStencilBufferRT); // No need for color buffer here
                    cmd.SetRandomWriteTarget(1, m_HTile);                         // This need to be done AFTER SetRenderTarget
                    // Generate HTile for the split lighting stencil usage. Don't write into stencil texture (shaderPassId = 2)
                    // Use ShaderPassID 1 => "Pass 2 - Export HTILE for stencilRef to output"
                    CoreUtils.DrawFullScreen(cmd, m_CopyStencilForSplitLighting, null, 2);
                    cmd.ClearRandomWriteTargets();
                }

                unsafe
                {
                    // Warning: Unity is not able to losslessly transfer integers larger than 2^24 to the shader system.
                    // Therefore, we bitcast uint to float in C#, and bitcast back to uint in the shader.
                    uint texturingModeFlags = this.texturingModeFlags;
                    cmd.SetComputeFloatParam(m_SubsurfaceScatteringCS, HDShaderIDs._TexturingModeFlags, *(float *)&texturingModeFlags);
                }

                cmd.SetComputeVectorArrayParam(m_SubsurfaceScatteringCS, HDShaderIDs._WorldScales, worldScales);
                cmd.SetComputeVectorArrayParam(m_SubsurfaceScatteringCS, HDShaderIDs._FilterKernels, filterKernels);
                cmd.SetComputeVectorArrayParam(m_SubsurfaceScatteringCS, HDShaderIDs._ShapeParams, shapeParams);
                cmd.SetComputeFloatParams(m_SubsurfaceScatteringCS, HDShaderIDs._DiffusionProfileHashTable, diffusionProfileHashes);

                int sssKernel = hdCamera.frameSettings.IsEnabled(FrameSettingsField.MSAA) ? m_SubsurfaceScatteringKernelMSAA : m_SubsurfaceScatteringKernel;

                cmd.SetComputeTextureParam(m_SubsurfaceScatteringCS, sssKernel, HDShaderIDs._DepthTexture, depthTextureRT);
                cmd.SetComputeTextureParam(m_SubsurfaceScatteringCS, sssKernel, HDShaderIDs._SSSHTile, m_HTile);
                cmd.SetComputeTextureParam(m_SubsurfaceScatteringCS, sssKernel, HDShaderIDs._IrradianceSource, diffuseBufferRT);

                for (int i = 0; i < sssBufferCount; ++i)
                {
                    cmd.SetComputeTextureParam(m_SubsurfaceScatteringCS, sssKernel, HDShaderIDs._SSSBufferTexture[i], GetSSSBuffer(i));
                }

                int numTilesX = ((int)(hdCamera.textureWidthScaling.x * hdCamera.screenSize.x) + 15) / 16;
                int numTilesY = ((int)hdCamera.screenSize.y + 15) / 16;
                int numTilesZ = XRGraphics.computePassCount;

                if (NeedTemporarySubsurfaceBuffer() || hdCamera.frameSettings.IsEnabled(FrameSettingsField.MSAA))
                {
                    cmd.SetComputeTextureParam(m_SubsurfaceScatteringCS, sssKernel, HDShaderIDs._CameraFilteringBuffer, m_CameraFilteringBuffer);

                    // Perform the SSS filtering pass which fills 'm_CameraFilteringBufferRT'.
                    cmd.DispatchCompute(m_SubsurfaceScatteringCS, sssKernel, numTilesX, numTilesY, numTilesZ);

                    cmd.SetGlobalTexture(HDShaderIDs._IrradianceSource, m_CameraFilteringBuffer);  // Cannot set a RT on a material

                    // Additively blend diffuse and specular lighting into 'm_CameraColorBufferRT'.
                    HDUtils.DrawFullScreen(cmd, hdCamera, m_CombineLightingPass, colorBufferRT, depthStencilBufferRT);
                }
                else
                {
                    cmd.SetComputeTextureParam(m_SubsurfaceScatteringCS, m_SubsurfaceScatteringKernel, HDShaderIDs._CameraColorTexture, colorBufferRT);

                    // Perform the SSS filtering pass which performs an in-place update of 'colorBuffer'.
                    cmd.DispatchCompute(m_SubsurfaceScatteringCS, m_SubsurfaceScatteringKernel, numTilesX, numTilesY, numTilesZ);
                }
            }
        }
コード例 #21
0
ファイル: HDUtils.cs プロジェクト: zxf1178649100/FPSSample
 public static void SetRenderTarget(CommandBuffer cmd, HDCamera camera, RenderTargetIdentifier[] colorBuffers, RTHandleSystem.RTHandle depthBuffer, ClearFlag clearFlag, Color clearColor)
 {
     cmd.SetRenderTarget(colorBuffers, depthBuffer);
     SetViewportAndClear(cmd, camera, depthBuffer, clearFlag, clearColor);
 }
コード例 #22
0
        public void RenderSky(SkyUpdateContext skyContext, HDCamera hdCamera, Light sunLight, RTHandleSystem.RTHandle colorBuffer, RTHandleSystem.RTHandle depthBuffer, DebugDisplaySettings debugSettings, CommandBuffer cmd)
        {
            if (skyContext.IsValid() && hdCamera.clearColorMode == HDAdditionalCameraData.ClearColorMode.Sky)
            {
                using (new ProfilingSample(cmd, "Sky Pass"))
                {
                    m_BuiltinParameters.commandBuffer = cmd;
                    m_BuiltinParameters.sunLight      = sunLight;
#if UNITY_2019_1_OR_NEWER
                    m_BuiltinParameters.pixelCoordToViewDirMatrix = HDUtils.ComputePixelCoordToWorldSpaceViewDirectionMatrix(hdCamera.camera.GetGateFittedFieldOfView() * Mathf.Deg2Rad, hdCamera.camera.GetGateFittedLensShift(), hdCamera.screenSize, hdCamera.viewMatrix, false);
#else
                    m_BuiltinParameters.pixelCoordToViewDirMatrix = HDUtils.ComputePixelCoordToWorldSpaceViewDirectionMatrix(hdCamera.camera.fieldOfView * Mathf.Deg2Rad, Vector2.zero, hdCamera.screenSize, hdCamera.viewMatrix, false);
#endif
                    m_BuiltinParameters.screenSize    = hdCamera.screenSize;
                    m_BuiltinParameters.cameraPosWS   = hdCamera.camera.transform.position;
                    m_BuiltinParameters.colorBuffer   = colorBuffer;
                    m_BuiltinParameters.depthBuffer   = depthBuffer;
                    m_BuiltinParameters.hdCamera      = hdCamera;
                    m_BuiltinParameters.debugSettings = debugSettings;

                    skyContext.renderer.SetRenderTargets(m_BuiltinParameters);

                    // If the luxmeter is enabled, we don't render the sky
                    if (debugSettings.data.lightingDebugSettings.debugLightingMode != DebugLightingMode.LuxMeter)
                    {
                        // When rendering the visual sky for reflection probes, we need to remove the sun disk if skySettings.includeSunInBaking is false.
                        skyContext.renderer.RenderSky(m_BuiltinParameters, false, hdCamera.camera.cameraType != CameraType.Reflection || skyContext.skySettings.includeSunInBaking);
                    }
                }
            }
        }
コード例 #23
0
ファイル: HDUtils.cs プロジェクト: zxf1178649100/FPSSample
 // This case, both source and destination are camera-scaled but we want to override the scale/bias parameter.
 public static void BlitCameraTexture(CommandBuffer cmd, HDCamera camera, RTHandleSystem.RTHandle source, RTHandleSystem.RTHandle destination, Vector4 scaleBias, float mipLevel = 0.0f, bool bilinear = false)
 {
     // Will set the correct camera viewport as well.
     SetRenderTarget(cmd, camera, destination);
     BlitTexture(cmd, source, destination, scaleBias, mipLevel, bilinear);
 }
コード例 #24
0
        public void RebuildTextures(int resolution)
        {
            bool updateNeeded = m_SkyboxCubemapRT == null || (m_SkyboxCubemapRT.rt.width != resolution);

            // Cleanup first if needed
            if (updateNeeded)
            {
                RTHandles.Release(m_SkyboxCubemapRT);
                m_SkyboxCubemapRT = null;

                RTHandles.Release(m_SkyboxBSDFCubemapIntermediate);
                m_SkyboxBSDFCubemapIntermediate = null;

                CoreUtils.Destroy(m_SkyboxBSDFCubemapArray);
                m_SkyboxBSDFCubemapArray = null;

                RTHandles.Release(m_SkyboxConditionalCdfRT);
                m_SkyboxConditionalCdfRT = null;

                RTHandles.Release(m_SkyboxMarginalRowCdfRT);
                m_SkyboxMarginalRowCdfRT = null;
            }

            // Reallocate everything
            if (m_SkyboxCubemapRT == null)
            {
                m_SkyboxCubemapRT = RTHandles.Alloc(resolution, resolution, colorFormat: GraphicsFormat.R16G16B16A16_SFloat, dimension: TextureDimension.Cube, useMipMap: true, autoGenerateMips: false, filterMode: FilterMode.Trilinear, name: "SkyboxCubemap");

                if (m_SupportsConvolution)
                {
                    m_SkyboxBSDFCubemapIntermediate = RTHandles.Alloc(resolution, resolution, colorFormat: GraphicsFormat.R16G16B16A16_SFloat, dimension: TextureDimension.Cube, useMipMap: true, autoGenerateMips: false, filterMode: FilterMode.Trilinear, name: "SkyboxBSDFIntermediate");
                    m_SkyboxBSDFCubemapArray        = new CubemapArray(resolution, m_IBLFilterArray.Length, TextureFormat.RGBAHalf, true)
                    {
                        hideFlags  = HideFlags.HideAndDontSave,
                        wrapMode   = TextureWrapMode.Repeat,
                        wrapModeV  = TextureWrapMode.Clamp,
                        filterMode = FilterMode.Trilinear,
                        anisoLevel = 0,
                        name       = "SkyboxCubemapConvolution"
                    };
                }

                if (m_SupportsMIS)
                {
                    // Temporary, it should be dependent on the sky resolution
                    int width  = (int)LightSamplingParameters.TextureWidth;
                    int height = (int)LightSamplingParameters.TextureHeight;

                    // + 1 because we store the value of the integral of the cubemap at the end of the texture.
                    m_SkyboxMarginalRowCdfRT = RTHandles.Alloc(height + 1, 1, colorFormat: GraphicsFormat.R32_SFloat, useMipMap: false, enableRandomWrite: true, filterMode: FilterMode.Point, name: "SkyboxMarginalRowCdf");

                    // TODO: switch the format to R16 (once it's available) to save some bandwidth.
                    m_SkyboxConditionalCdfRT = RTHandles.Alloc(width, height, colorFormat: GraphicsFormat.R32_SFloat, useMipMap: false, enableRandomWrite: true, filterMode: FilterMode.Point, name: "SkyboxConditionalRowCdf");
                }
            }

            m_CubemapScreenSize = new Vector4((float)resolution, (float)resolution, 1.0f / (float)resolution, 1.0f / (float)resolution);

            if (updateNeeded)
            {
                m_NeedUpdate = true; // Special case. Even if update mode is set to OnDemand, we need to regenerate the environment after destroying the texture.
                RebuildSkyMatrices(resolution);
            }
        }
コード例 #25
0
ファイル: HDUtils.cs プロジェクト: zxf1178649100/FPSSample
 // This particular case is for blitting a camera-scaled texture into a non scaling texture. So we setup the full viewport (implicit in cmd.Blit) but have to scale the input UVs.
 public static void BlitCameraTexture(CommandBuffer cmd, HDCamera camera, RTHandleSystem.RTHandle source, RenderTargetIdentifier destination)
 {
     cmd.Blit(source, destination, new Vector2(camera.viewportScale.x, camera.viewportScale.y), Vector2.zero);
 }
コード例 #26
0
ファイル: SkyManager.cs プロジェクト: supertask/VFXSample
 public void RenderOpaqueAtmosphericScattering(CommandBuffer cmd, HDCamera hdCamera, RTHandleSystem.RTHandle colorBuffer, RTHandleSystem.RTHandle depthBuffer, bool isMSAA)
 {
     using (new ProfilingSample(cmd, "Opaque Atmospheric Scattering"))
     {
         HDUtils.DrawFullScreen(cmd, hdCamera, m_OpaqueAtmScatteringMaterial, colorBuffer, depthBuffer, null, isMSAA? 1 : 0);
     }
 }
コード例 #27
0
 // This set of RenderTarget management methods is supposed to be used when rendering into a camera dependent render texture.
 // This will automatically set the viewport based on the camera size and the RTHandle scaling info.
 public static void SetRenderTarget(CommandBuffer cmd, HDCamera camera, RTHandleSystem.RTHandle buffer, ClearFlag clearFlag, Color clearColor, int miplevel = 0, CubemapFace cubemapFace = CubemapFace.Unknown, int depthSlice = 0)
 {
     cmd.SetRenderTarget(buffer, miplevel, cubemapFace, depthSlice);
     SetViewportAndClear(cmd, camera, buffer, clearFlag, clearColor);
 }
コード例 #28
0
        public void Render(HDCamera hdCamera, CommandBuffer cmd, RTHandleSystem.RTHandle outputTexture, ScriptableRenderContext renderContext, CullingResults cull)
        {
            // First thing to check is: Do we have a valid ray-tracing environment?
            HDRaytracingEnvironment rtEnvironment  = m_RaytracingManager.CurrentEnvironment();
            HDRenderPipeline        renderPipeline = m_RaytracingManager.GetRenderPipeline();
            RayTracingShader        forwardShader  = m_PipelineAsset.renderPipelineRayTracingResources.forwardRaytracing;
            Shader raytracingMask = m_PipelineAsset.renderPipelineRayTracingResources.raytracingFlagMask;

            RecursiveRendering recursiveSettings    = VolumeManager.instance.stack.GetComponent <RecursiveRendering>();
            LightCluster       lightClusterSettings = VolumeManager.instance.stack.GetComponent <LightCluster>();

            // Check the validity of the state before computing the effect
            bool invalidState = rtEnvironment == null || !recursiveSettings.enable.value ||
                                forwardShader == null || raytracingMask == null ||
                                m_PipelineResources.textures.owenScrambledTex == null || m_PipelineResources.textures.scramblingTex == null;

            // If any resource or game-object is missing We stop right away
            if (invalidState)
            {
                return;
            }

            // Grab the acceleration structure and the list of HD lights for the target camera
            RayTracingAccelerationStructure accelerationStructure = m_RaytracingManager.RequestAccelerationStructure(rtEnvironment.raytracedLayerMask);
            HDRaytracingLightCluster        lightCluster          = m_RaytracingManager.RequestLightCluster(rtEnvironment.raytracedLayerMask);

            if (m_RaytracingFlagMaterial == null)
            {
                m_RaytracingFlagMaterial = CoreUtils.CreateEngineMaterial(raytracingMask);
            }

            // Before going into raytracing, we need to flag which pixels needs to be raytracing
            EvaluateRaytracingMask(cull, hdCamera, cmd, renderContext);

            // Define the shader pass to use for the reflection pass
            cmd.SetRayTracingShaderPass(forwardShader, "ForwardDXR");

            // Set the acceleration structure for the pass
            cmd.SetRayTracingAccelerationStructure(forwardShader, HDShaderIDs._RaytracingAccelerationStructureName, accelerationStructure);

            // Inject the ray-tracing sampling data
            cmd.SetRayTracingTextureParam(forwardShader, HDShaderIDs._OwenScrambledTexture, m_PipelineResources.textures.owenScrambledTex);
            cmd.SetRayTracingTextureParam(forwardShader, HDShaderIDs._ScramblingTexture, m_PipelineResources.textures.scramblingTex);

            // Inject the ray generation data
            cmd.SetGlobalFloat(HDShaderIDs._RaytracingRayBias, rtEnvironment.rayBias);
            cmd.SetGlobalFloat(HDShaderIDs._RaytracingRayMaxLength, recursiveSettings.rayLength.value);
            cmd.SetGlobalFloat(HDShaderIDs._RaytracingMaxRecursion, recursiveSettings.maxDepth.value);
            cmd.SetGlobalFloat(HDShaderIDs._RaytracingCameraNearPlane, hdCamera.camera.nearClipPlane);

            // Set the data for the ray generation
            cmd.SetRayTracingTextureParam(forwardShader, HDShaderIDs._RaytracingFlagMask, m_RaytracingFlagTarget);
            cmd.SetRayTracingTextureParam(forwardShader, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthStencilBuffer());
            cmd.SetRayTracingTextureParam(forwardShader, HDShaderIDs._CameraColorTextureRW, outputTexture);

            // Compute an approximate pixel spread angle value (in radians)
            float pixelSpreadAngle = hdCamera.camera.fieldOfView * (Mathf.PI / 180.0f) / Mathf.Min(hdCamera.actualWidth, hdCamera.actualHeight);

            cmd.SetGlobalFloat(HDShaderIDs._RaytracingPixelSpreadAngle, pixelSpreadAngle);

            // LightLoop data
            cmd.SetGlobalBuffer(HDShaderIDs._RaytracingLightCluster, lightCluster.GetCluster());
            cmd.SetGlobalBuffer(HDShaderIDs._LightDatasRT, lightCluster.GetLightDatas());
            cmd.SetGlobalVector(HDShaderIDs._MinClusterPos, lightCluster.GetMinClusterPos());
            cmd.SetGlobalVector(HDShaderIDs._MaxClusterPos, lightCluster.GetMaxClusterPos());
            cmd.SetGlobalInt(HDShaderIDs._LightPerCellCount, lightClusterSettings.maxNumLightsPercell.value);
            cmd.SetGlobalInt(HDShaderIDs._PunctualLightCountRT, lightCluster.GetPunctualLightCount());
            cmd.SetGlobalInt(HDShaderIDs._AreaLightCountRT, lightCluster.GetAreaLightCount());

            // Note: Just in case, we rebind the directional light data (in case they were not)
            cmd.SetGlobalBuffer(HDShaderIDs._DirectionalLightDatas, renderPipeline.m_LightLoopLightData.directionalLightData);
            cmd.SetGlobalInt(HDShaderIDs._DirectionalLightCount, renderPipeline.m_lightList.directionalLights.Count);

            // Set the data for the ray miss
            cmd.SetRayTracingTextureParam(forwardShader, HDShaderIDs._SkyTexture, m_SkyManager.skyReflection);

            // If this is the right debug mode and we have at least one light, write the first shadow to the denoise texture
            HDRenderPipeline hdrp = (RenderPipelineManager.currentPipeline as HDRenderPipeline);

            cmd.SetRayTracingTextureParam(forwardShader, HDShaderIDs._RaytracingPrimaryDebug, m_DebugRaytracingTexture);
            hdrp.PushFullScreenDebugTexture(hdCamera, cmd, m_DebugRaytracingTexture, FullScreenDebugMode.PrimaryVisibility);

            // Run the computation
            cmd.DispatchRays(forwardShader, m_RayGenShaderName, (uint)hdCamera.actualWidth, (uint)hdCamera.actualHeight, 1);
        }
コード例 #29
0
        public static void SetRenderTarget(CommandBuffer cmd, HDCamera camera, RTHandleSystem.RTHandle colorBuffer, RTHandleSystem.RTHandle depthBuffer, int miplevel = 0, CubemapFace cubemapFace = CubemapFace.Unknown, int depthSlice = 0)
        {
            int cw = colorBuffer.rt.width;
            int ch = colorBuffer.rt.height;
            int dw = depthBuffer.rt.width;
            int dh = depthBuffer.rt.height;

            Debug.Assert(cw == dw && ch == dh);

            SetRenderTarget(cmd, camera, colorBuffer, depthBuffer, ClearFlag.None, CoreUtils.clearColorAllBlack, miplevel, cubemapFace, depthSlice);
        }
コード例 #30
0
    void RenderAtmosphericsPost(ScriptableRenderContext renderContext, HDCamera hdCamera, PostProcessLayer ppLayer, RTHandleSystem.RTHandle depthHandle, RTHandleSystem.RTHandle colorHandle, CommandBuffer cmd)
    {
        // Maybe we don't need this anymore?
        renderContext.ExecuteCommandBuffer(cmd);
        cmd.Clear();

        using (new ProfilingSample(cmd, "Atmospherics")) {
            // TODO: These custom effects need to go into the PP chain. As standalone, each of them have to do an additional blit.

            // Hack patch in main camera if looking for scattering when rendering reflection probe.
            var        camera = hdCamera.camera;
            var        realCamera = camera;
            Vector3    pos = default(Vector3), scale = default(Vector3);
            Quaternion rot = default(Quaternion);
            if (camera.cameraType == CameraType.Reflection)
            {
                camera = Camera.main;
                camera = camera ? camera : realCamera;
                pos    = camera.transform.position;
                rot    = camera.transform.rotation;
                scale  = camera.transform.localScale;
                camera.transform.position     = realCamera.transform.position;
                camera.transform.rotation     = Quaternion.LookRotation(realCamera.worldToCameraMatrix.GetColumn(2), realCamera.worldToCameraMatrix.GetColumn(1));
                camera.transform.localScale   = realCamera.transform.localScale;
                realCamera.transform.rotation = Quaternion.LookRotation(realCamera.worldToCameraMatrix.GetColumn(2), realCamera.worldToCameraMatrix.GetColumn(1));
            }

            // Render atmospherics if any
            var atmosphericScattering = global::AtmosphericScattering.instance;
            var atmosphericDeferred   = camera.GetComponent <global::AtmosphericScatteringDeferred>();
            if (atmosphericScattering && atmosphericDeferred && atmosphericDeferred.enabled)
            {
                atmosphericScattering.RenderOcclusion(cmd, realCamera, hdCamera);
                RenderScattering(cmd, hdCamera, realCamera, depthHandle, colorHandle, colorHandle);
            }

            // Restore actual camera
            if (camera != realCamera)
            {
                realCamera.transform.rotation = Quaternion.identity;
                camera.transform.position     = pos;
                camera.transform.rotation     = rot;
                camera.transform.localScale   = scale;
                camera = realCamera;
            }
        }
    }