// Here you can implement the rendering logic.
        // Use <c>ScriptableRenderContext</c> to issue drawing commands or execute command buffers
        // https://docs.unity3d.com/ScriptReference/Rendering.ScriptableRenderContext.html
        // You don't have to call ScriptableRenderContext.submit, the render pipeline will call it at specific points in the pipeline.
        public override void Execute(ScriptableRenderContext context, ref UnityEngine.Rendering.Universal.RenderingData renderingData)
        {
            CommandBuffer cmd = GetCommandBuffer();

            SetCommandBuffer(null);
            context.ExecuteCommandBuffer(cmd);
            CommandBufferPool.Release(cmd);
        }
 public override void AddRenderPasses(UnityEngine.Rendering.Universal.ScriptableRenderer renderer,
                                      ref UnityEngine.Rendering.Universal.RenderingData renderingData)
 {
     if ((IsSkip) /* || (!_enable)*/)
     {
         return;
     }
     _drawOutLinePass.Setup();
     renderer.EnqueuePass(_drawOutLinePass);
 }
        public override void Execute(ScriptableRenderContext context, ref UnityEngine.Rendering.Universal.RenderingData renderingData)
        {
            if (Effekseer.EffekseerSystem.Instance == null)
            {
                return;
            }
            prop.colorTargetDescriptor      = renderingData.cameraData.cameraTargetDescriptor;
            prop.isRequiredToCopyBackground = true;
            Effekseer.EffekseerSystem.Instance.renderer.Render(renderingData.cameraData.camera, prop, null);
            var commandBuffer = Effekseer.EffekseerSystem.Instance.renderer.GetCameraCommandBuffer(renderingData.cameraData.camera);

            if (commandBuffer != null)
            {
                context.ExecuteCommandBuffer(commandBuffer);
                context.Submit();
            }
        }
Exemple #4
0
    public override void Execute(ScriptableRenderContext context, ref UnityEngine.Rendering.Universal.RenderingData renderingData)
    {
        CommandBuffer buf = CommandBufferPool.Get();

        using (new ProfilingScope(buf, m_ProfilingSampler))
        {
            // copy screen into temporary RT
            int screenCopyID = Shader.PropertyToID("_ScreenCopyTexture");
            buf.GetTemporaryRT(screenCopyID, m_OpaqueDesc, FilterMode.Bilinear);
            buf.Blit(m_ColorHandle.Identifier(), screenCopyID);

            m_OpaqueDesc.width  /= 2;
            m_OpaqueDesc.height /= 2;

            // get two smaller RTs
            int blurredID  = Shader.PropertyToID("_BlurRT1");
            int blurredID2 = Shader.PropertyToID("_BlurRT2");
            buf.GetTemporaryRT(blurredID, m_OpaqueDesc, FilterMode.Bilinear);
            buf.GetTemporaryRT(blurredID2, m_OpaqueDesc, FilterMode.Bilinear);

            // downsample screen copy into smaller RT, release screen RT
            buf.Blit(screenCopyID, blurredID);
            buf.ReleaseTemporaryRT(screenCopyID);

            // horizontal blur
            buf.SetGlobalVector("offsets", new Vector4(m_BlurAmount.x / Screen.width, 0, 0, 0));
            buf.Blit(blurredID, blurredID2, m_BlurMaterial);
            // vertical blur
            buf.SetGlobalVector("offsets", new Vector4(0, m_BlurAmount.y / Screen.height, 0, 0));
            buf.Blit(blurredID2, blurredID, m_BlurMaterial);

            // horizontal blur
            buf.SetGlobalVector("offsets", new Vector4(m_BlurAmount.x * 2 / Screen.width, 0, 0, 0));
            buf.Blit(blurredID, blurredID2, m_BlurMaterial);
            // vertical blur
            buf.SetGlobalVector("offsets", new Vector4(0, m_BlurAmount.y * 2 / Screen.height, 0, 0));
            buf.Blit(blurredID2, blurredID, m_BlurMaterial);

            //Set Texture for Shader Graph
            buf.SetGlobalTexture("_GrabBlurTexture", blurredID);
        }

        context.ExecuteCommandBuffer(buf);
        CommandBufferPool.Release(buf);
    }
        public override void Execute(ScriptableRenderContext context,
                                     ref UnityEngine.Rendering.Universal.RenderingData renderingData)
        {
            CommandBuffer command = CommandBufferPool.Get(_profilerTag);

            using (new ProfilingSample(command, _profilerTag))
            {
                context.ExecuteCommandBuffer(command);
                command.Clear();
                var sortFlags    = renderingData.cameraData.defaultOpaqueSortFlags;
                var drawSettings = CreateDrawingSettings(_shaderTagId,
                                                         ref renderingData, sortFlags);
                context.DrawRenderers(renderingData.cullResults, ref drawSettings,
                                      ref _filteringSettings);
            }
            context.ExecuteCommandBuffer(command);
            CommandBufferPool.Release(command);
        }
        public override void Setup(ScriptableRenderContext context, ref UnityEngine.Rendering.Universal.RenderingData renderingData)
        {
            if (renderingData.cameraData.renderType == CameraRenderType.Overlay)
            {
                //直接指定ViewPort為RenderTarget.
                ConfigureCameraTarget(BuiltinRenderTextureType.CameraTarget,
                                      BuiltinRenderTextureType.CameraTarget);
                //預設URP不會自動清ViewPort depth buffer, 所以加了一個pass來做清除.
                EnqueuePass(m_clearDepthWhenRenderCameraStackPass);
                EnqueuePass(m_opaqueObjectPass);
                EnqueuePass(m_transparentObjectPass);
                //直接畫到view port,所以不用FinalBlitPass
                return;
            }

            CreateCameraRenderTarget(context, ref renderingData.cameraData);
            ConfigureCameraTarget(m_cameraColorAttachment.Identifier(), m_cameraDepthAttachment.Identifier());

            foreach (var feature in rendererFeatures)
            {
                feature.AddRenderPasses(this, ref renderingData);
            }

            //加入opaque object pass
            EnqueuePass(m_opaqueObjectPass);

            //加入transparency object pass
            EnqueuePass(m_transparentObjectPass);

            Camera camera = renderingData.cameraData.camera;

            if (camera.clearFlags == CameraClearFlags.Skybox && RenderSettings.skybox != null)
            {
                m_drawSkyBoxPass.Setup(m_cameraColorAttachment.Identifier(), m_cameraDepthAttachment.Identifier());
                //加入dissolve skybox pass
                EnqueuePass(m_drawSkyBoxPass);
            }

            RenderTextureDescriptor cameraTargetDescriptor = renderingData.cameraData.cameraTargetDescriptor;

            //加入final pass(將結果繪製到veiwport)
            m_finalBlitPass.Setup(cameraTargetDescriptor, m_cameraColorAttachment);
            EnqueuePass(m_finalBlitPass);
        }
Exemple #7
0
    public override void Execute(ScriptableRenderContext context, ref UnityEngine.Rendering.Universal.RenderingData renderingData)
    {
        CommandBuffer cmd = CommandBufferPool.Get(k_RenderGrabPassTag);

        using (new ProfilingSample(cmd, k_RenderGrabPassTag))
        {
            // copy screen into temporary RT
            Blit(cmd, m_ColorSource, m_ScreenCopyId.Identifier());

            // downsample screen copy into smaller RTs
            Blit(cmd, m_ScreenCopyId.Identifier(), m_BlurTemp1.Identifier());

            // Setup blur commands
            m_Blur.SetupCommandBuffer(cmd, m_BlurTemp1.id, m_BlurTemp2.id);

            // Set texture id so we can use it later
            cmd.SetGlobalTexture("_GrabBlurTexture", m_BlurTemp1.id);
        }

        context.ExecuteCommandBuffer(cmd);
        CommandBufferPool.Release(cmd);
    }
Exemple #8
0
        /// <inheritdoc/>
        public override void Execute(ScriptableRenderContext context, ref UnityEngine.Rendering.Universal.RenderingData renderingData)
        {
            CommandBuffer cmd = CommandBufferPool.Get(m_ProfilerTag);

            RenderTextureDescriptor opaqueDesc = renderingData.cameraData.cameraTargetDescriptor;

            opaqueDesc.depthBufferBits = 0;

            // Can't read and write to same color target, create a temp render target to blit.
            if (destination == UnityEngine.Rendering.Universal.RenderTargetHandle.CameraTarget)
            {
                cmd.GetTemporaryRT(m_TemporaryColorTexture.id, opaqueDesc, filterMode);
                Blit(cmd, source, m_TemporaryColorTexture.Identifier(), blitMaterial, blitShaderPassIndex);
                Blit(cmd, m_TemporaryColorTexture.Identifier(), source);
            }
            else
            {
                Blit(cmd, source, destination.Identifier(), blitMaterial, blitShaderPassIndex);
            }

            context.ExecuteCommandBuffer(cmd);
            CommandBufferPool.Release(cmd);
        }
        public override void Execute(ScriptableRenderContext context, ref UnityEngine.Rendering.Universal.RenderingData renderingData)
        {
            if (m_enabled)
            {
                if (m_material == null)
                {
                    return;
                }
                if (m_mesh == null)
                {
                    return;
                }

                CommandBuffer cmd = CommandBufferPool.Get(k_RenderTag);

                viewproj = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1.0f, 1.0f, 1.0f)); //set to 1x1 pixel ratio
                cmd.SetViewProjectionMatrices(Matrix4x4.identity, viewproj);

                matrices[0] = Matrix4x4.TRS(new Vector3(0f, 0.3333f, 0), Quaternion.identity, new Vector3(1, 2.0f / Screen.height, 1));
                matrices[1] = Matrix4x4.TRS(new Vector3(0f, -0.3333f, 0), Quaternion.identity, new Vector3(1, 2.0f / Screen.height, 1));
                matrices[2] = Matrix4x4.TRS(new Vector3(0f, 0.6666f, 0), Quaternion.identity, new Vector3(1, 2.0f / Screen.height, 1));
                matrices[3] = Matrix4x4.TRS(new Vector3(0f, -0.6666f, 0), Quaternion.identity, new Vector3(1, 2.0f / Screen.height, 1));
                matrices[4] = Matrix4x4.TRS(new Vector3(0.33333f, 0f, 0), Quaternion.identity, new Vector3(2.0f / Screen.width, 1, 1));
                matrices[5] = Matrix4x4.TRS(new Vector3(-0.33333f, 0f, 0), Quaternion.identity, new Vector3(2.0f / Screen.width, 1, 1));
                matrices[6] = Matrix4x4.TRS(new Vector3(0.66666f, 0f, 0), Quaternion.identity, new Vector3(2.0f / Screen.width, 1, 1));
                matrices[7] = Matrix4x4.TRS(new Vector3(-0.66666f, 0f, 0), Quaternion.identity, new Vector3(2.0f / Screen.width, 1, 1));

                matrices[8] = Matrix4x4.TRS(new Vector3(0.0f, 0.0f, 0.0f), Quaternion.identity, new Vector3(2.0f / Screen.width, 1, 1));
                matrices[9] = Matrix4x4.TRS(new Vector3(0.0f, 0.0f, 0.0f), Quaternion.identity, new Vector3(1, 2.0f / Screen.height, 1));

                cmd.DrawMeshInstanced(m_mesh, 0, m_material, 0, matrices, 10);
                cmd.SetViewProjectionMatrices(renderingData.cameraData.camera.worldToCameraMatrix, renderingData.cameraData.camera.projectionMatrix);

                context.ExecuteCommandBuffer(cmd);
                CommandBufferPool.Release(cmd);
            }
        }
        // Here you can implement the rendering logic.
        // Use <c>ScriptableRenderContext</c> to issue drawing commands or execute command buffers
        // https://docs.unity3d.com/ScriptReference/Rendering.ScriptableRenderContext.html
        // You don't have to call ScriptableRenderContext.submit, the render pipeline will call it at specific points in the pipeline.
        public override void Execute(ScriptableRenderContext context, ref UnityEngine.Rendering.Universal.RenderingData renderingData)
        {
            CommandBuffer cmd = CommandBufferPool.Get("WorldSpaceRipple Pass");

            RenderTextureDescriptor opaqueDescriptor = renderingData.cameraData.cameraTargetDescriptor;

            opaqueDescriptor.depthBufferBits = 0;

            if (destination == UnityEngine.Rendering.Universal.RenderTargetHandle.CameraTarget)
            {
                cmd.GetTemporaryRT(temporaryColorTexture.id, opaqueDescriptor, FilterMode.Point);
                Blit(cmd, source, temporaryColorTexture.Identifier(), rippleMaterial, 0);
                Blit(cmd, temporaryColorTexture.Identifier(), source);
            }
            else
            {
                Blit(cmd, source, destination.Identifier(), rippleMaterial, 0);
            }



            context.ExecuteCommandBuffer(cmd);
            CommandBufferPool.Release(cmd);
        }
 public override void AddRenderPasses(UnityEngine.Rendering.Universal.ScriptableRenderer renderer, ref UnityEngine.Rendering.Universal.RenderingData renderingData)
 {
     renderer.EnqueuePass(m_SafeAreaPass);
 }
 // Here you can inject one or multiple render passes in the renderer.
 // This method is called when setting up the renderer once per-camera.
 public override void AddRenderPasses(UnityEngine.Rendering.Universal.ScriptableRenderer renderer, ref UnityEngine.Rendering.Universal.RenderingData renderingData)
 {
     if (settings.rippleMaterial == null)
     {
         Debug.LogWarningFormat("Missing Outline Material");
         return;
     }
     ripplePass.Setup(renderer.cameraColorTarget, UnityEngine.Rendering.Universal.RenderTargetHandle.CameraTarget);
     renderer.EnqueuePass(ripplePass);
 }
Exemple #13
0
 /// <inheritdoc />
 public override void Setup(ScriptableRenderContext context, ref RenderingData renderingData)
 {
     Camera                  camera                 = renderingData.cameraData.camera;
     ref CameraData          cameraData             = ref renderingData.cameraData;
 /// <summary>
 /// Injects one or multiple <c>ScriptableRenderPass</c> in the renderer.
 /// </summary>
 /// <param name="renderPasses">List of render passes to add to.</param>
 /// <param name="renderingData">Rendering state. Use this to setup render passes.</param>
 public abstract void AddRenderPasses(ScriptableRenderer renderer,
                                      ref RenderingData renderingData);
Exemple #15
0
        /// <summary>
        /// Set RenderBuffer for camera;
        /// </summary>
        private void RefreshRenderBufferForSingleCamera(ScriptableRenderContext context, ref RenderingData renderingData,
                                                        ref CameraData cameraData, out bool requiresDepthPrepass, out bool createDepthTexture)
        {
            Camera camera = renderingData.cameraData.camera;
            RenderTextureDescriptor cameraTargetDescriptor = renderingData.cameraData.cameraTargetDescriptor;
            bool applyPostProcessing = cameraData.postProcessEnabled;

            bool isSceneViewCamera    = cameraData.isSceneViewCamera;
            bool isPreviewCamera      = cameraData.isPreviewCamera;
            bool requiresDepthTexture = cameraData.requiresDepthTexture;
            bool isStereoEnabled      = cameraData.isStereoEnabled;

            // Depth prepass is generated in the following cases:
            // - If game or offscreen camera requires it we check if we can copy the depth from the rendering opaques pass and use that instead.
            // - Scene or preview cameras always require a depth texture. We do a depth pre-pass to simplify it and it shouldn't matter much for editor.
            requiresDepthPrepass  = requiresDepthTexture && !CanCopyDepth(ref renderingData.cameraData);
            requiresDepthPrepass |= isSceneViewCamera;
            requiresDepthPrepass |= isPreviewCamera;

            // The copying of depth should normally happen after rendering opaques.
            // But if we only require it for post processing or the scene camera then we do it after rendering transparent objects
            m_CopyDepthPass.renderPassEvent = (!requiresDepthTexture && (applyPostProcessing || isSceneViewCamera)) ?
                                              RenderPassEvent.AfterRenderingTransparents : RenderPassEvent.AfterRenderingOpaques;

            // TODO: CopyDepth pass is disabled in XR due to required work to handle camera matrices in URP.
            // IF this condition is removed make sure the CopyDepthPass.cs is working properly on all XR modes. This requires PureXR SDK integration.
            if (isStereoEnabled && requiresDepthTexture)
            {
                requiresDepthPrepass = true;
            }

            bool isRunningHololens = false;

#if ENABLE_VR && ENABLE_VR_MODULE
            isRunningHololens = UniversalRenderPipeline.IsRunningHololens(camera);
#endif
            bool createColorTexture = RequiresIntermediateColorTexture(ref cameraData);
            createColorTexture |= (rendererFeatures.Count != 0 && !isRunningHololens);
            createColorTexture &= !isPreviewCamera;

            // If camera requires depth and there's no depth pre-pass we create a depth texture that can be read later by effect requiring it.
            createDepthTexture  = cameraData.requiresDepthTexture && !requiresDepthPrepass;
            createDepthTexture |= (cameraData.renderType == CameraRenderType.Base && !cameraData.resolveFinalTarget);

#if UNITY_ANDROID || UNITY_WEBGL
            if (SystemInfo.graphicsDeviceType != GraphicsDeviceType.Vulkan)
            {
                // GLES can not use render texture's depth buffer with the color buffer of the backbuffer
                // in such case we create a color texture for it too.
                createColorTexture |= createDepthTexture;
            }
#endif
            // Configure all settings require to start a new camera stack (base camera only)
            if (cameraData.renderType == CameraRenderType.Base)
            {
                m_ActiveCameraColorAttachment = (createColorTexture) ? m_CameraColorAttachment : RenderTargetHandle.CameraTarget;
                m_ActiveCameraDepthAttachment = (createDepthTexture) ? m_CameraDepthAttachment : RenderTargetHandle.CameraTarget;

                bool intermediateRenderTexture = createColorTexture || createDepthTexture;

                // Doesn't create texture for Overlay cameras as they are already overlaying on top of created textures.
                bool createTextures = intermediateRenderTexture;
                if (createTextures)
                {
                    CreateCameraRenderTarget(context, ref renderingData.cameraData);
                }

                // if rendering to intermediate render texture we don't have to create msaa backbuffer
                int backbufferMsaaSamples = (intermediateRenderTexture) ? 1 : cameraTargetDescriptor.msaaSamples;

                if (Camera.main == camera && camera.cameraType == CameraType.Game && cameraData.targetTexture == null)
                {
                    SetupBackbufferFormat(backbufferMsaaSamples, isStereoEnabled);
                }
            }
            else
            {
                if (m_SplitUICameraAndSceneCameraRenderer)
                {
                    RefreshCameraColorAttachment(context, ref renderingData.cameraData);
                }
                else
                {
                    m_ActiveCameraColorAttachment = m_CameraColorAttachment;
                    m_ActiveCameraDepthAttachment = m_CameraDepthAttachment;
                }
            }

            ConfigureCameraTarget(m_ActiveCameraColorAttachment.Identifier(), m_ActiveCameraDepthAttachment.Identifier());
        }
Exemple #16
0
        public static RenderTextureDescriptor GetBlendStyleRenderTextureDesc(this IRenderPass2D pass, RenderingData renderingData)
        {
            var renderTextureScale = Mathf.Clamp(pass.rendererData.lightRenderTextureScale, 0.01f, 1.0f);
            var width  = (int)(renderingData.cameraData.cameraTargetDescriptor.width * renderTextureScale);
            var height = (int)(renderingData.cameraData.cameraTargetDescriptor.height * renderTextureScale);

            var descriptor = new RenderTextureDescriptor(width, height);

            descriptor.graphicsFormat   = GetRenderTextureFormat();
            descriptor.useMipMap        = false;
            descriptor.autoGenerateMips = false;
            descriptor.depthBufferBits  = 0;
            descriptor.msaaSamples      = 1;
            descriptor.dimension        = TextureDimension.Tex2D;

            return(descriptor);
        }
 /// <inheritdoc/>
 public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
 {
     RenderMainLightCascadeShadowmap(ref context, ref renderingData.cullResults, ref renderingData.lightData, ref renderingData.shadowData);
 }
        internal void ExecuteNativeRenderPass(ScriptableRenderContext context, ScriptableRenderPass renderPass, CameraData cameraData, ref RenderingData renderingData)
        {
            using (new ProfilingScope(null, Profiling.execute))
            {
                int     currentPassIndex       = renderPass.renderPassQueueIndex;
                Hash128 currentPassHash        = m_PassIndexToPassHash[currentPassIndex];
                int[]   currentMergeablePasses = m_MergeableRenderPassesMap[currentPassHash];

                int validColorBuffersCount = m_RenderPassesAttachmentCount[currentPassHash];

                bool isLastPass = renderPass.isLastPass;
                // TODO: review the lastPassToBB logic to mak it work with merged passes
                // keep track if this is the current camera's last pass and the RT is the backbuffer (BuiltinRenderTextureType.CameraTarget)
                bool isLastPassToBB = isLastPass && (m_ActiveColorAttachmentDescriptors[0].loadStoreTarget ==
                                                     BuiltinRenderTextureType.CameraTarget);
                var  depthOnly = renderPass.depthOnly || (cameraData.targetTexture != null && cameraData.targetTexture.graphicsFormat == GraphicsFormat.DepthAuto);
                bool useDepth  = depthOnly || (!renderPass.overrideCameraTarget || (renderPass.overrideCameraTarget && renderPass.depthAttachment != BuiltinRenderTextureType.CameraTarget)) &&
                                 (!(isLastPassToBB || (isLastPass && cameraData.camera.targetTexture != null)));

                var attachments =
                    new NativeArray <AttachmentDescriptor>(useDepth && !depthOnly ? validColorBuffersCount + 1 : 1,
                                                           Allocator.Temp);

                for (int i = 0; i < validColorBuffersCount; ++i)
                {
                    attachments[i] = m_ActiveColorAttachmentDescriptors[i];
                }

                if (useDepth && !depthOnly)
                {
                    attachments[validColorBuffersCount] = m_ActiveDepthAttachmentDescriptor;
                }

                var rpDesc = InitializeRenderPassDescriptor(cameraData, renderPass);

                int validPassCount = GetValidPassIndexCount(currentMergeablePasses);

                var attachmentIndicesCount = GetSubPassAttachmentIndicesCount(renderPass);

                var attachmentIndices = new NativeArray <int>(!depthOnly ? (int)attachmentIndicesCount : 0, Allocator.Temp);
                if (!depthOnly)
                {
                    for (int i = 0; i < attachmentIndicesCount; ++i)
                    {
                        attachmentIndices[i] = renderPass.m_InputAttachmentIndices[i];
                    }
                }

                if (validPassCount == 1 || currentMergeablePasses[0] == currentPassIndex) // Check if it's the first pass
                {
                    context.BeginRenderPass(rpDesc.w, rpDesc.h, Math.Max(rpDesc.samples, 1), attachments,
                                            useDepth ? (!depthOnly ? validColorBuffersCount : 0) : -1);
                    attachments.Dispose();

                    context.BeginSubPass(attachmentIndices);

                    m_LastBeginSubpassPassIndex = currentPassIndex;
                }
                else
                {
                    if (!AreAttachmentIndicesCompatible(m_ActiveRenderPassQueue[m_LastBeginSubpassPassIndex], m_ActiveRenderPassQueue[currentPassIndex]))
                    {
                        context.EndSubPass();
                        context.BeginSubPass(attachmentIndices);

                        m_LastBeginSubpassPassIndex = currentPassIndex;
                    }
                }

                attachmentIndices.Dispose();

                renderPass.Execute(context, ref renderingData);

                if (validPassCount == 1 || currentMergeablePasses[validPassCount - 1] == currentPassIndex) // Check if it's the last pass
                {
                    context.EndSubPass();
                    context.EndRenderPass();

                    m_LastBeginSubpassPassIndex = 0;
                }

                for (int i = 0; i < m_ActiveColorAttachmentDescriptors.Length; ++i)
                {
                    m_ActiveColorAttachmentDescriptors[i] = RenderingUtils.emptyAttachment;
                }

                m_ActiveDepthAttachmentDescriptor = RenderingUtils.emptyAttachment;
            }
        }
 // Here you can implement the rendering logic.
 // Use <c>ScriptableRenderContext</c> to issue drawing commands or execute command buffers
 // https://docs.unity3d.com/ScriptReference/Rendering.ScriptableRenderContext.html
 // You don't have to call ScriptableRenderContext.submit, the render pipeline will call it at specific points in the pipeline.
 public override void Execute(ScriptableRenderContext context, ref UnityEngine.Rendering.Universal.RenderingData renderingData)
 {
 }
Exemple #20
0
        public static void RenderLights(this IRenderPass2D pass, RenderingData renderingData, CommandBuffer cmd, int layerToRender, ref LayerBatch layerBatch, ref RenderTextureDescriptor rtDesc)
        {
            // Before rendering the lights cache some values that are expensive to get/calculate
            var culledLights = pass.rendererData.lightCullResult.visibleLights;

            for (var i = 0; i < culledLights.Count; i++)
            {
                culledLights[i].CacheValues();
            }

            ShadowCasterGroup2DManager.CacheValues();


            var blendStyles = pass.rendererData.lightBlendStyles;

            for (var i = 0; i < blendStyles.Length; ++i)
            {
                if ((layerBatch.lightStats.blendStylesUsed & (uint)(1 << i)) == 0)
                {
                    continue;
                }

                var sampleName = blendStyles[i].name;
                cmd.BeginSample(sampleName);

                if (!Light2DManager.GetGlobalColor(layerToRender, i, out var clearColor))
                {
                    clearColor = Color.black;
                }

                var anyLights = (layerBatch.lightStats.blendStylesWithLights & (uint)(1 << i)) != 0;

                var desc = rtDesc;
                if (!anyLights) // No lights -- create tiny texture
                {
                    desc.width = desc.height = 4;
                }
                var identifier = layerBatch.GetRTId(cmd, desc, i);

                cmd.SetRenderTarget(identifier,
                                    RenderBufferLoadAction.DontCare,
                                    RenderBufferStoreAction.Store,
                                    RenderBufferLoadAction.DontCare,
                                    RenderBufferStoreAction.DontCare);
                cmd.ClearRenderTarget(false, true, clearColor);

                if (anyLights)
                {
                    RenderLightSet(
                        pass, renderingData,
                        i,
                        cmd,
                        layerToRender,
                        identifier,
                        pass.rendererData.lightCullResult.visibleLights
                        );
                }

                cmd.EndSample(sampleName);
            }
        }
Exemple #21
0
        public static void RenderNormals(this IRenderPass2D pass, ScriptableRenderContext context, RenderingData renderingData, DrawingSettings drawSettings, FilteringSettings filterSettings, RenderTargetIdentifier depthTarget, CommandBuffer cmd, LightStats lightStats)
        {
            using (new ProfilingScope(cmd, m_ProfilingSampler))
            {
                // figure out the scale
                var normalRTScale = 0.0f;

                if (depthTarget != BuiltinRenderTextureType.None)
                {
                    normalRTScale = 1.0f;
                }
                else
                {
                    normalRTScale = Mathf.Clamp(pass.rendererData.lightRenderTextureScale, 0.01f, 1.0f);
                }

                pass.CreateNormalMapRenderTexture(renderingData, cmd, normalRTScale);


                var msaaEnabled = renderingData.cameraData.cameraTargetDescriptor.msaaSamples > 1;
                var storeAction = msaaEnabled ? RenderBufferStoreAction.Resolve : RenderBufferStoreAction.Store;
                var clearFlag   = pass.rendererData.useDepthStencilBuffer ? ClearFlag.All : ClearFlag.Color;
                if (depthTarget != BuiltinRenderTextureType.None)
                {
                    CoreUtils.SetRenderTarget(cmd,
                                              pass.rendererData.normalsRenderTarget, RenderBufferLoadAction.DontCare, storeAction,
                                              depthTarget, RenderBufferLoadAction.Load, RenderBufferStoreAction.Store,
                                              clearFlag, k_NormalClearColor);
                }
                else
                {
                    CoreUtils.SetRenderTarget(cmd, pass.rendererData.normalsRenderTarget, RenderBufferLoadAction.DontCare, storeAction, clearFlag, k_NormalClearColor);
                }

                context.ExecuteCommandBuffer(cmd);
                cmd.Clear();

                drawSettings.SetShaderPassName(0, k_NormalsRenderingPassName);
                context.DrawRenderers(renderingData.cullResults, ref drawSettings, ref filterSettings);
            }
        }
Exemple #22
0
        public static void RenderLightVolumes(this IRenderPass2D pass, RenderingData renderingData, CommandBuffer cmd, int layerToRender, int endLayerValue,
                                              RenderTargetIdentifier renderTexture, RenderTargetIdentifier depthTexture, RenderBufferStoreAction intermediateStoreAction,
                                              RenderBufferStoreAction finalStoreAction, bool requiresRTInit, List <Light2D> lights)
        {
            var maxShadowLightCount = ShadowRendering.maxTextureCount * 4;  // Now encodes shadows into RGBA as well as seperate textures

            NativeArray <bool> doesLightAtIndexHaveShadows = new NativeArray <bool>(lights.Count, Allocator.Temp);

            // This case should never happen, but if it does it may cause an infinite loop later.
            if (maxShadowLightCount < 1)
            {
                Debug.LogError("maxShadowLightCount cannot be less than 1");
                return;
            }

            // Determine last light with volumetric shadows to be rendered if we want to use a different store action after using rendering its volumetric shadows
            int useFinalStoreActionAfter = lights.Count;

            if (intermediateStoreAction != finalStoreAction)
            {
                for (int i = lights.Count - 1; i >= 0; i--)
                {
                    if (lights[i].renderVolumetricShadows)
                    {
                        useFinalStoreActionAfter = i;
                        break;
                    }
                }
            }

            // Break up light rendering into batches for the purpose of shadow casting
            var lightIndex = 0;

            while (lightIndex < lights.Count)
            {
                var remainingLights = (uint)lights.Count - lightIndex;
                var batchedLights   = 0;

                // Add lights to our batch until the number of shadow textures reach the maxShadowTextureCount
                var shadowLightCount = 0;
                while (batchedLights < remainingLights && shadowLightCount < maxShadowLightCount)
                {
                    int curLightIndex = lightIndex + batchedLights;
                    var light         = lights[curLightIndex];

                    if (CanCastVolumetricShadows(light, endLayerValue))
                    {
                        doesLightAtIndexHaveShadows[curLightIndex] = false;
                        if (ShadowRendering.PrerenderShadows(pass, renderingData, cmd, layerToRender, light, shadowLightCount, light.shadowVolumeIntensity))
                        {
                            doesLightAtIndexHaveShadows[curLightIndex] = true;
                            shadowLightCount++;
                        }
                    }
                    batchedLights++;
                }

                // Set the current RT to the light RT
                if (shadowLightCount > 0 || requiresRTInit)
                {
                    var storeAction = lightIndex + batchedLights >= useFinalStoreActionAfter ? finalStoreAction : intermediateStoreAction;
                    cmd.SetRenderTarget(renderTexture, RenderBufferLoadAction.Load, storeAction, depthTexture, RenderBufferLoadAction.Load, storeAction);
                    requiresRTInit = false;
                }

                // Render all the lights.
                shadowLightCount = 0;
                for (var lightIndexOffset = 0; lightIndexOffset < batchedLights; lightIndexOffset++)
                {
                    var light = lights[(int)(lightIndex + lightIndexOffset)];

                    if (light.lightType == Light2D.LightType.Global)
                    {
                        continue;
                    }

                    if (light.volumeIntensity <= 0.0f || !light.volumeIntensityEnabled)
                    {
                        continue;
                    }

                    var topMostLayerValue = light.GetTopMostLitLayer();
                    if (endLayerValue == topMostLayerValue) // this implies the layer is correct
                    {
                        var lightVolumeMaterial = pass.rendererData.GetLightMaterial(light, true);
                        var lightMesh           = light.lightMesh;

                        // Set the shadow texture to read from.
                        if (doesLightAtIndexHaveShadows[lightIndex + lightIndexOffset])
                        {
                            ShadowRendering.SetGlobalShadowTexture(cmd, light, shadowLightCount++);
                        }
                        else
                        {
                            ShadowRendering.DisableGlobalShadowTexture(cmd);
                        }

                        if (light.lightType == Light2D.LightType.Sprite && light.lightCookieSprite != null && light.lightCookieSprite.texture != null)
                        {
                            cmd.SetGlobalTexture(k_CookieTexID, light.lightCookieSprite.texture);
                        }

                        SetGeneralLightShaderGlobals(pass, cmd, light);

                        // Is this needed
                        if (light.normalMapQuality != Light2D.NormalMapQuality.Disabled || light.lightType == Light2D.LightType.Point)
                        {
                            SetPointLightShaderGlobals(pass, cmd, light);
                        }

                        // Could be combined...
                        if (light.lightType == Light2D.LightType.Parametric || light.lightType == Light2D.LightType.Freeform || light.lightType == Light2D.LightType.Sprite)
                        {
                            cmd.DrawMesh(lightMesh, light.transform.localToWorldMatrix, lightVolumeMaterial);
                        }
                        else if (light.lightType == Light2D.LightType.Point)
                        {
                            DrawPointLight(cmd, light, lightMesh, lightVolumeMaterial);
                        }
                    }
                }


                // Release all of the temporary shadow textures
                for (var releaseIndex = shadowLightCount - 1; releaseIndex >= 0; releaseIndex--)
                {
                    ShadowRendering.ReleaseShadowRenderTexture(cmd, releaseIndex);
                }

                lightIndex += batchedLights;
            }

            doesLightAtIndexHaveShadows.Dispose();
        }
Exemple #23
0
        private static void RenderLightSet(IRenderPass2D pass, RenderingData renderingData, int blendStyleIndex, CommandBuffer cmd, int layerToRender, RenderTargetIdentifier renderTexture, List <Light2D> lights)
        {
            var maxShadowLightCount = ShadowRendering.maxTextureCount * 4;
            var requiresRTInit      = true;

            // This case should never happen, but if it does it may cause an infinite loop later.
            if (maxShadowLightCount < 1)
            {
                Debug.LogError("maxShadowTextureCount cannot be less than 1");
                return;
            }


            NativeArray <bool> doesLightAtIndexHaveShadows = new NativeArray <bool>(lights.Count, Allocator.Temp);

            // Break up light rendering into batches for the purpose of shadow casting
            var lightIndex = 0;

            while (lightIndex < lights.Count)
            {
                var remainingLights = (uint)lights.Count - lightIndex;
                var batchedLights   = 0;

                // Add lights to our batch until the number of shadow textures reach the maxShadowTextureCount
                int shadowLightCount = 0;
                while (batchedLights < remainingLights && shadowLightCount < maxShadowLightCount)
                {
                    int curLightIndex = lightIndex + batchedLights;
                    var light         = lights[curLightIndex];
                    if (CanCastShadows(light, layerToRender))
                    {
                        doesLightAtIndexHaveShadows[curLightIndex] = false;
                        if (ShadowRendering.PrerenderShadows(pass, renderingData, cmd, layerToRender, light, shadowLightCount, light.shadowIntensity))
                        {
                            doesLightAtIndexHaveShadows[curLightIndex] = true;
                            shadowLightCount++;
                        }
                    }
                    batchedLights++;
                }


                // Set the current RT to the light RT
                if (shadowLightCount > 0 || requiresRTInit)
                {
                    cmd.SetRenderTarget(renderTexture, RenderBufferLoadAction.Load, RenderBufferStoreAction.Store, RenderBufferLoadAction.DontCare, RenderBufferStoreAction.DontCare);
                    requiresRTInit = false;
                }

                // Render all the lights.
                shadowLightCount = 0;
                for (var lightIndexOffset = 0; lightIndexOffset < batchedLights; lightIndexOffset++)
                {
                    var light = lights[(int)(lightIndex + lightIndexOffset)];

                    if (light != null &&
                        light.lightType != Light2D.LightType.Global &&
                        light.blendStyleIndex == blendStyleIndex &&
                        light.IsLitLayer(layerToRender))
                    {
                        // Render light
                        var lightMaterial = pass.rendererData.GetLightMaterial(light, false);
                        if (lightMaterial == null)
                        {
                            continue;
                        }

                        var lightMesh = light.lightMesh;
                        if (lightMesh == null)
                        {
                            continue;
                        }

                        // Set the shadow texture to read from
                        if (doesLightAtIndexHaveShadows[lightIndex + lightIndexOffset])
                        {
                            ShadowRendering.SetGlobalShadowTexture(cmd, light, shadowLightCount++);
                        }
                        else
                        {
                            ShadowRendering.DisableGlobalShadowTexture(cmd);
                        }


                        if (light.lightType == Light2D.LightType.Sprite && light.lightCookieSprite != null && light.lightCookieSprite.texture != null)
                        {
                            cmd.SetGlobalTexture(k_CookieTexID, light.lightCookieSprite.texture);
                        }

                        SetGeneralLightShaderGlobals(pass, cmd, light);

                        if (light.normalMapQuality != Light2D.NormalMapQuality.Disabled || light.lightType == Light2D.LightType.Point)
                        {
                            SetPointLightShaderGlobals(pass, cmd, light);
                        }

                        // Light code could be combined...
                        if (light.lightType == (Light2D.LightType)Light2D.DeprecatedLightType.Parametric || light.lightType == Light2D.LightType.Freeform || light.lightType == Light2D.LightType.Sprite)
                        {
                            cmd.DrawMesh(lightMesh, light.transform.localToWorldMatrix, lightMaterial);
                        }
                        else if (light.lightType == Light2D.LightType.Point)
                        {
                            DrawPointLight(cmd, light, lightMesh, lightMaterial);
                        }
                    }
                }

                // Release all of the temporary shadow textures
                for (var releaseIndex = shadowLightCount - 1; releaseIndex >= 0; releaseIndex--)
                {
                    ShadowRendering.ReleaseShadowRenderTexture(cmd, releaseIndex);
                }

                lightIndex += batchedLights;
            }

            doesLightAtIndexHaveShadows.Dispose();
        }
Exemple #24
0
        public static void CreateCameraSortingLayerRenderTexture(this IRenderPass2D pass, RenderingData renderingData, CommandBuffer cmd, Downsampling downsamplingMethod)
        {
            var renderTextureScale = 1.0f;

            if (downsamplingMethod == Downsampling._2xBilinear)
            {
                renderTextureScale = 0.5f;
            }
            else if (downsamplingMethod == Downsampling._4xBox || downsamplingMethod == Downsampling._4xBilinear)
            {
                renderTextureScale = 0.25f;
            }

            var width  = (int)(renderingData.cameraData.cameraTargetDescriptor.width * renderTextureScale);
            var height = (int)(renderingData.cameraData.cameraTargetDescriptor.height * renderTextureScale);

            var descriptor = new RenderTextureDescriptor(width, height);

            descriptor.graphicsFormat   = renderingData.cameraData.cameraTargetDescriptor.graphicsFormat;
            descriptor.useMipMap        = false;
            descriptor.autoGenerateMips = false;
            descriptor.depthBufferBits  = 0;
            descriptor.msaaSamples      = 1;
            descriptor.dimension        = TextureDimension.Tex2D;

            cmd.GetTemporaryRT(pass.rendererData.cameraSortingLayerRenderTargetId, descriptor, FilterMode.Bilinear);
        }
    public override void AddRenderPasses(UnityEngine.Rendering.Universal.ScriptableRenderer renderer, ref UnityEngine.Rendering.Universal.RenderingData renderingData)
    {
        if (InteractiveWindManager.Instance == null || !InteractiveWindManager.Instance.IsReady() || !settings.IsReady())
        {
            return;
        }

        computeWindPass.Setup(InteractiveWindManager.Instance.windDataLODs);
        renderer.EnqueuePass(computeWindPass);
        finalizePass.Setup(InteractiveWindManager.Instance.windDataLODs);
        renderer.EnqueuePass(finalizePass);
    }
        private void CopyCameraSortingLayerRenderTexture(ScriptableRenderContext context, RenderingData renderingData, RenderBufferStoreAction mainTargetStoreAction)
        {
            var cmd = CommandBufferPool.Get();

            cmd.Clear();
            this.CreateCameraSortingLayerRenderTexture(renderingData, cmd, m_Renderer2DData.cameraSortingLayerDownsamplingMethod);

            Material copyMaterial = m_Renderer2DData.cameraSortingLayerDownsamplingMethod == Downsampling._4xBox ? m_SamplingMaterial : m_BlitMaterial;

            RenderingUtils.Blit(cmd, colorAttachment, m_Renderer2DData.cameraSortingLayerRenderTarget.id, copyMaterial, 0, false, RenderBufferLoadAction.DontCare, RenderBufferStoreAction.Store, RenderBufferLoadAction.DontCare, RenderBufferStoreAction.DontCare);
            cmd.SetRenderTarget(colorAttachment, RenderBufferLoadAction.Load, mainTargetStoreAction,
                                depthAttachment, RenderBufferLoadAction.Load, mainTargetStoreAction);
            cmd.SetGlobalTexture(k_CameraSortingLayerTextureID, m_Renderer2DData.cameraSortingLayerRenderTarget.id);
            context.ExecuteCommandBuffer(cmd);
            CommandBufferPool.Release(cmd);
        }
Exemple #27
0
        public override void AddRenderPasses(UnityEngine.Rendering.Universal.ScriptableRenderer renderer, ref UnityEngine.Rendering.Universal.RenderingData renderingData)
        {
            var src  = renderer.cameraColorTarget;
            var dest = (settings.destination == Target.Color) ? UnityEngine.Rendering.Universal.RenderTargetHandle.CameraTarget : m_RenderTextureHandle;

            if (settings.blitMaterial == null)
            {
                Debug.LogWarningFormat("Missing Blit Material. {0} blit pass will not execute. Check for missing reference in the assigned renderer. Lui.", GetType().Name);
                return;
            }
            blitPass.Setup(src, dest);
            renderer.EnqueuePass(blitPass);
        }
Exemple #28
0
 public override void SetupLights(ScriptableRenderContext context, ref RenderingData renderingData)
 {
     throw new NotSupportedException(k_ErrorMessage);
 }
 // Here you can inject one or multiple render passes in the renderer.
 // This method is called when setting up the renderer once per-camera.
 public override void AddRenderPasses(UnityEngine.Rendering.Universal.ScriptableRenderer renderer, ref UnityEngine.Rendering.Universal.RenderingData renderingData)
 {
     global::backend.Draw.unityUrpAddRenderPasses(renderer, renderingData);
     //renderer.EnqueuePass(m_ScriptablePass);
 }
            /// <inheritdoc/>
            public override void OnCameraSetup(CommandBuffer cmd, ref RenderingData renderingData)
            {
                RenderTextureDescriptor cameraTargetDescriptor = renderingData.cameraData.cameraTargetDescriptor;
                int downsampleDivider = m_CurrentSettings.Downsample ? 2 : 1;

                // Update SSAO parameters in the material
                Vector4 ssaoParams = new Vector4(
                    m_CurrentSettings.Intensity,   // Intensity
                    m_CurrentSettings.Radius,      // Radius
                    1.0f / downsampleDivider,      // Downsampling
                    m_CurrentSettings.SampleCount  // Sample count
                    );

                material.SetVector(s_SSAOParamsID, ssaoParams);

#if ENABLE_VR && ENABLE_XR_MODULE
                int eyeCount = renderingData.cameraData.xr.enabled && renderingData.cameraData.xr.singlePassEnabled ? 2 : 1;
#else
                int eyeCount = 1;
#endif
                for (int eyeIndex = 0; eyeIndex < eyeCount; eyeIndex++)
                {
                    Matrix4x4 view = renderingData.cameraData.GetViewMatrix(eyeIndex);
                    Matrix4x4 proj = renderingData.cameraData.GetProjectionMatrix(eyeIndex);
                    m_CameraViewProjections[eyeIndex] = proj * view;

                    // camera view space without translation, used by SSAO.hlsl ReconstructViewPos() to calculate view vector.
                    Matrix4x4 cview = view;
                    cview.SetColumn(3, new Vector4(0.0f, 0.0f, 0.0f, 1.0f));
                    Matrix4x4 cviewProj    = proj * cview;
                    Matrix4x4 cviewProjInv = cviewProj.inverse;

                    Vector4 topLeftCorner    = cviewProjInv.MultiplyPoint(new Vector4(-1, 1, -1, 1));
                    Vector4 topRightCorner   = cviewProjInv.MultiplyPoint(new Vector4(1, 1, -1, 1));
                    Vector4 bottomLeftCorner = cviewProjInv.MultiplyPoint(new Vector4(-1, -1, -1, 1));
                    Vector4 farCentre        = cviewProjInv.MultiplyPoint(new Vector4(0, 0, 1, 1));
                    m_CameraTopLeftCorner[eyeIndex] = topLeftCorner;
                    m_CameraXExtent[eyeIndex]       = topRightCorner - topLeftCorner;
                    m_CameraYExtent[eyeIndex]       = bottomLeftCorner - topLeftCorner;
                    m_CameraZExtent[eyeIndex]       = farCentre;
                }

                material.SetVector(s_ProjectionParams2ID, new Vector4(1.0f / renderingData.cameraData.camera.nearClipPlane, 0.0f, 0.0f, 0.0f));
                material.SetMatrixArray(s_CameraViewProjectionsID, m_CameraViewProjections);
                material.SetVectorArray(s_CameraViewTopLeftCornerID, m_CameraTopLeftCorner);
                material.SetVectorArray(s_CameraViewXExtentID, m_CameraXExtent);
                material.SetVectorArray(s_CameraViewYExtentID, m_CameraYExtent);
                material.SetVectorArray(s_CameraViewZExtentID, m_CameraZExtent);

                // Update keywords
                CoreUtils.SetKeyword(material, k_OrthographicCameraKeyword, renderingData.cameraData.camera.orthographic);

                ScreenSpaceAmbientOcclusionSettings.DepthSource source = this.isRendererDeferred
                    ? ScreenSpaceAmbientOcclusionSettings.DepthSource.DepthNormals
                    : m_CurrentSettings.Source;

                if (source == ScreenSpaceAmbientOcclusionSettings.DepthSource.Depth)
                {
                    switch (m_CurrentSettings.NormalSamples)
                    {
                    case ScreenSpaceAmbientOcclusionSettings.NormalQuality.Low:
                        CoreUtils.SetKeyword(material, k_NormalReconstructionLowKeyword, true);
                        CoreUtils.SetKeyword(material, k_NormalReconstructionMediumKeyword, false);
                        CoreUtils.SetKeyword(material, k_NormalReconstructionHighKeyword, false);
                        break;

                    case ScreenSpaceAmbientOcclusionSettings.NormalQuality.Medium:
                        CoreUtils.SetKeyword(material, k_NormalReconstructionLowKeyword, false);
                        CoreUtils.SetKeyword(material, k_NormalReconstructionMediumKeyword, true);
                        CoreUtils.SetKeyword(material, k_NormalReconstructionHighKeyword, false);
                        break;

                    case ScreenSpaceAmbientOcclusionSettings.NormalQuality.High:
                        CoreUtils.SetKeyword(material, k_NormalReconstructionLowKeyword, false);
                        CoreUtils.SetKeyword(material, k_NormalReconstructionMediumKeyword, false);
                        CoreUtils.SetKeyword(material, k_NormalReconstructionHighKeyword, true);
                        break;

                    default:
                        throw new ArgumentOutOfRangeException();
                    }
                }

                switch (source)
                {
                case ScreenSpaceAmbientOcclusionSettings.DepthSource.DepthNormals:
                    CoreUtils.SetKeyword(material, k_SourceDepthKeyword, false);
                    CoreUtils.SetKeyword(material, k_SourceDepthNormalsKeyword, true);
                    break;

                default:
                    CoreUtils.SetKeyword(material, k_SourceDepthKeyword, true);
                    CoreUtils.SetKeyword(material, k_SourceDepthNormalsKeyword, false);
                    break;
                }

                // Get temporary render textures
                m_Descriptor                 = cameraTargetDescriptor;
                m_Descriptor.msaaSamples     = 1;
                m_Descriptor.depthBufferBits = 0;
                m_Descriptor.width          /= downsampleDivider;
                m_Descriptor.height         /= downsampleDivider;
                m_Descriptor.colorFormat     = RenderTextureFormat.ARGB32;
                cmd.GetTemporaryRT(s_SSAOTexture1ID, m_Descriptor, FilterMode.Bilinear);

                m_Descriptor.width  *= downsampleDivider;
                m_Descriptor.height *= downsampleDivider;
                cmd.GetTemporaryRT(s_SSAOTexture2ID, m_Descriptor, FilterMode.Bilinear);
                cmd.GetTemporaryRT(s_SSAOTexture3ID, m_Descriptor, FilterMode.Bilinear);

                // Configure targets and clear color
                ConfigureTarget(m_CurrentSettings.AfterOpaque ? m_Renderer.cameraColorTarget : s_SSAOTexture2ID);
                ConfigureClear(ClearFlag.None, Color.white);
            }