Ejemplo n.º 1
0
 public GrabBlurPassImpl(UnityEngine.Rendering.Universal.RenderTargetHandle colorHandle)
 {
     m_Blur = new CommandBufferBlur();
     m_BlurTemp1.Init("_Temp1");
     m_BlurTemp2.Init("_Temp2");
     m_ScreenCopyId.Init("_ScreenCopyTexture");
 }
Ejemplo n.º 2
0
 public GrabPassImpl(Material blurMaterial, Vector2 blurAmount, Material blitMaterial)
 {
     m_BlurMaterial = blurMaterial;
     m_ColorHandle  = UnityEngine.Rendering.Universal.RenderTargetHandle.CameraTarget;
     m_BlitMaterial = blitMaterial;
     m_BlurAmount   = blurAmount;
 }
Ejemplo n.º 3
0
 public bool Equals(RenderTargetHandle other)
 {
     if (id == -2 || other.id == -2)
     {
         return(Identifier() == other.Identifier());
     }
     return(id == other.id);
 }
Ejemplo n.º 4
0
        /// <inheritdoc/>
        public override void FrameCleanup(CommandBuffer cmd)
        {
            if (cmd == null)
            {
                throw new ArgumentNullException("cmd");
            }

            cmd.ReleaseTemporaryRT(destination.id);
            destination = RenderTargetHandle.CameraTarget;
        }
Ejemplo n.º 5
0
        /// <summary>
        /// Configure the pass
        /// </summary>
        public void Setup(
            RenderTextureDescriptor baseDescriptor,
            RenderTargetHandle depthAttachmentHandle)
        {
            this.depthAttachmentHandle     = depthAttachmentHandle;
            baseDescriptor.colorFormat     = RenderTextureFormat.Depth;
            baseDescriptor.depthBufferBits = kDepthBufferBits;

            // Depth-Only pass don't use MSAA
            baseDescriptor.msaaSamples = 1;
            descriptor = baseDescriptor;
        }
Ejemplo n.º 6
0
        private static void CreateShadowRenderTexture(IRenderPass2D pass, RenderTargetHandle rtHandle, RenderingData renderingData, CommandBuffer cmdBuffer)
        {
            var renderTextureScale = Mathf.Clamp(pass.rendererData.lightRenderTextureScale, 0.01f, 1.0f);
            var width  = (int)(renderingData.cameraData.cameraTargetDescriptor.width * renderTextureScale);
            var height = (int)(renderingData.cameraData.cameraTargetDescriptor.height * renderTextureScale);

            var descriptor = new RenderTextureDescriptor(width, height);

            descriptor.useMipMap        = false;
            descriptor.autoGenerateMips = false;
            descriptor.depthBufferBits  = 24;
            descriptor.graphicsFormat   = GraphicsFormat.R8G8B8A8_UNorm;
            descriptor.msaaSamples      = 1;
            descriptor.dimension        = TextureDimension.Tex2D;

            cmdBuffer.GetTemporaryRT(rtHandle.id, descriptor, FilterMode.Bilinear);
        }
Ejemplo n.º 7
0
 public bool Equals(RenderTargetHandle other)
 {
     return(id == other.id);
 }
Ejemplo n.º 8
0
 public void Setup(RenderTargetIdentifier source, UnityEngine.Rendering.Universal.RenderTargetHandle destination)
 {
     this.source      = source;
     this.destination = destination;
 }
Ejemplo n.º 9
0
        /// <summary>
        /// Set RenderBuffer for camera;
        /// </summary>
        private void RefreshRenderBufferForSingleCamera(ScriptableRenderContext context, ref RenderingData renderingData,
                                                        ref CameraData cameraData, out bool requiresDepthPrepass, out bool createDepthTexture)
        {
            Camera camera = renderingData.cameraData.camera;
            RenderTextureDescriptor cameraTargetDescriptor = renderingData.cameraData.cameraTargetDescriptor;
            bool applyPostProcessing = cameraData.postProcessEnabled;

            bool isSceneViewCamera    = cameraData.isSceneViewCamera;
            bool isPreviewCamera      = cameraData.isPreviewCamera;
            bool requiresDepthTexture = cameraData.requiresDepthTexture;
            bool isStereoEnabled      = cameraData.isStereoEnabled;

            // Depth prepass is generated in the following cases:
            // - If game or offscreen camera requires it we check if we can copy the depth from the rendering opaques pass and use that instead.
            // - Scene or preview cameras always require a depth texture. We do a depth pre-pass to simplify it and it shouldn't matter much for editor.
            requiresDepthPrepass  = requiresDepthTexture && !CanCopyDepth(ref renderingData.cameraData);
            requiresDepthPrepass |= isSceneViewCamera;
            requiresDepthPrepass |= isPreviewCamera;

            // The copying of depth should normally happen after rendering opaques.
            // But if we only require it for post processing or the scene camera then we do it after rendering transparent objects
            m_CopyDepthPass.renderPassEvent = (!requiresDepthTexture && (applyPostProcessing || isSceneViewCamera)) ?
                                              RenderPassEvent.AfterRenderingTransparents : RenderPassEvent.AfterRenderingOpaques;

            // TODO: CopyDepth pass is disabled in XR due to required work to handle camera matrices in URP.
            // IF this condition is removed make sure the CopyDepthPass.cs is working properly on all XR modes. This requires PureXR SDK integration.
            if (isStereoEnabled && requiresDepthTexture)
            {
                requiresDepthPrepass = true;
            }

            bool isRunningHololens = false;

#if ENABLE_VR && ENABLE_VR_MODULE
            isRunningHololens = UniversalRenderPipeline.IsRunningHololens(camera);
#endif
            bool createColorTexture = RequiresIntermediateColorTexture(ref cameraData);
            createColorTexture |= (rendererFeatures.Count != 0 && !isRunningHololens);
            createColorTexture &= !isPreviewCamera;

            // If camera requires depth and there's no depth pre-pass we create a depth texture that can be read later by effect requiring it.
            createDepthTexture  = cameraData.requiresDepthTexture && !requiresDepthPrepass;
            createDepthTexture |= (cameraData.renderType == CameraRenderType.Base && !cameraData.resolveFinalTarget);

#if UNITY_ANDROID || UNITY_WEBGL
            if (SystemInfo.graphicsDeviceType != GraphicsDeviceType.Vulkan)
            {
                // GLES can not use render texture's depth buffer with the color buffer of the backbuffer
                // in such case we create a color texture for it too.
                createColorTexture |= createDepthTexture;
            }
#endif
            // Configure all settings require to start a new camera stack (base camera only)
            if (cameraData.renderType == CameraRenderType.Base)
            {
                m_ActiveCameraColorAttachment = (createColorTexture) ? m_CameraColorAttachment : RenderTargetHandle.CameraTarget;
                m_ActiveCameraDepthAttachment = (createDepthTexture) ? m_CameraDepthAttachment : RenderTargetHandle.CameraTarget;

                bool intermediateRenderTexture = createColorTexture || createDepthTexture;

                // Doesn't create texture for Overlay cameras as they are already overlaying on top of created textures.
                bool createTextures = intermediateRenderTexture;
                if (createTextures)
                {
                    CreateCameraRenderTarget(context, ref renderingData.cameraData);
                }

                // if rendering to intermediate render texture we don't have to create msaa backbuffer
                int backbufferMsaaSamples = (intermediateRenderTexture) ? 1 : cameraTargetDescriptor.msaaSamples;

                if (Camera.main == camera && camera.cameraType == CameraType.Game && cameraData.targetTexture == null)
                {
                    SetupBackbufferFormat(backbufferMsaaSamples, isStereoEnabled);
                }
            }
            else
            {
                if (m_SplitUICameraAndSceneCameraRenderer)
                {
                    RefreshCameraColorAttachment(context, ref renderingData.cameraData);
                }
                else
                {
                    m_ActiveCameraColorAttachment = m_CameraColorAttachment;
                    m_ActiveCameraDepthAttachment = m_CameraDepthAttachment;
                }
            }

            ConfigureCameraTarget(m_ActiveCameraColorAttachment.Identifier(), m_ActiveCameraDepthAttachment.Identifier());
        }
Ejemplo n.º 10
0
 /// <summary>
 /// Configure the pass with the source and destination to execute on.
 /// </summary>
 /// <param name="source">Source Render Target</param>
 /// <param name="destination">Destination Render Target</param>
 public void Setup(RenderTargetIdentifier source, RenderTargetHandle destination)
 {
     this.source      = source;
     this.destination = destination;
 }
Ejemplo n.º 11
0
 /// <summary>
 /// Configure the pass with the source and destination to execute on.
 /// </summary>
 /// <param name="source">Source Render Target</param>
 /// <param name="destination">Destination Render Target</param>
 public void Setup(RenderTargetIdentifier source, RenderTargetHandle destination, Downsampling downsampling)
 {
     this.source          = source;
     this.destination     = destination;
     m_DownsamplingMethod = downsampling;
 }