static RenderTextureDescriptor CreateRenderTextureDescriptor(Camera camera, float renderScale,
                                                                     bool isStereoEnabled, bool isHdrEnabled, int msaaSamples)
        {
            RenderTextureDescriptor desc;
            RenderTextureFormat     renderTextureFormatDefault = RenderTextureFormat.Default;

            if (isStereoEnabled)
            {
                desc = XRGraphics.eyeTextureDesc;
                renderTextureFormatDefault = desc.colorFormat;
            }
            else
            {
                desc        = new RenderTextureDescriptor(camera.pixelWidth, camera.pixelHeight);
                desc.width  = (int)((float)desc.width * renderScale);
                desc.height = (int)((float)desc.height * renderScale);
            }

            // TODO: when preserve framebuffer alpha is enabled we can't use RGB111110Float format.
            bool useRGB111110             = Application.isMobilePlatform && RenderingUtils.SupportsRenderTextureFormat(RenderTextureFormat.RGB111110Float);
            RenderTextureFormat hdrFormat = (useRGB111110) ? RenderTextureFormat.RGB111110Float : RenderTextureFormat.DefaultHDR;

            desc.colorFormat       = isHdrEnabled ? hdrFormat : renderTextureFormatDefault;
            desc.depthBufferBits   = 32;
            desc.enableRandomWrite = false;
            desc.sRGB            = (QualitySettings.activeColorSpace == ColorSpace.Linear);
            desc.msaaSamples     = msaaSamples;
            desc.bindMS          = false;
            desc.useDynamicScale = camera.allowDynamicResolution;
            return(desc);
        }
Example #2
0
        private static GraphicsFormat GetDefaultGraphicsFormat(CameraData cameraData, bool isDepth = false)
        {
            if (isDepth)
            {
                return(SystemInfo.GetGraphicsFormat(DefaultFormat.DepthStencil));
            }
            if (cameraData.isHdrEnabled)
            {
                GraphicsFormat hdrFormat = GraphicsFormat.None;

                if (!Graphics.preserveFramebufferAlpha &&
                    RenderingUtils.SupportsGraphicsFormat(GraphicsFormat.B10G11R11_UFloatPack32,
                                                          FormatUsage.Linear | FormatUsage.Render))
                {
                    hdrFormat = GraphicsFormat.B10G11R11_UFloatPack32;
                }
                else if (RenderingUtils.SupportsGraphicsFormat(GraphicsFormat.R16G16B16A16_SFloat,
                                                               FormatUsage.Linear | FormatUsage.Render))
                {
                    hdrFormat = GraphicsFormat.R16G16B16A16_SFloat;
                }
                else
                {
                    hdrFormat = SystemInfo.GetGraphicsFormat(DefaultFormat.HDR);
                }

                return(hdrFormat);
            }

            return(SystemInfo.GetGraphicsFormat(DefaultFormat.LDR));
        }
Example #3
0
        public UniversalRenderPipeline(UniversalRenderPipelineAsset asset)
        {
            SetSupportedRenderingFeatures();

            PerFrameBuffer._GlossyEnvironmentColor = Shader.PropertyToID("_GlossyEnvironmentColor");
            PerFrameBuffer._SubtractiveShadowColor = Shader.PropertyToID("_SubtractiveShadowColor");

            PerFrameBuffer._Time           = Shader.PropertyToID("_Time");
            PerFrameBuffer._SinTime        = Shader.PropertyToID("_SinTime");
            PerFrameBuffer._CosTime        = Shader.PropertyToID("_CosTime");
            PerFrameBuffer.unity_DeltaTime = Shader.PropertyToID("unity_DeltaTime");
            PerFrameBuffer._TimeParameters = Shader.PropertyToID("_TimeParameters");

            PerCameraBuffer._InvCameraViewProj   = Shader.PropertyToID("_InvCameraViewProj");
            PerCameraBuffer._ScreenParams        = Shader.PropertyToID("_ScreenParams");
            PerCameraBuffer._ScaledScreenParams  = Shader.PropertyToID("_ScaledScreenParams");
            PerCameraBuffer._WorldSpaceCameraPos = Shader.PropertyToID("_WorldSpaceCameraPos");

            // Let engine know we have MSAA on for cases where we support MSAA backbuffer
            if (QualitySettings.antiAliasing != asset.msaaSampleCount)
            {
                QualitySettings.antiAliasing = asset.msaaSampleCount;
            }

            // For compatibility reasons we also match old LightweightPipeline tag.
            Shader.globalRenderPipeline = "UniversalPipeline,LightweightPipeline";

            Lightmapping.SetDelegate(lightsDelegate);

            CameraCaptureBridge.enabled = true;

            RenderingUtils.ClearSystemInfoCache();
        }
Example #4
0
 static ShadowUtils()
 {
     m_ShadowmapFormat = RenderingUtils.SupportsRenderTextureFormat(RenderTextureFormat.Shadowmap) && (SystemInfo.graphicsDeviceType != GraphicsDeviceType.OpenGLES2)
         ? RenderTextureFormat.Shadowmap
         : RenderTextureFormat.Depth;
     m_ForceShadowPointSampling = SystemInfo.graphicsDeviceType == GraphicsDeviceType.Metal &&
                                  GraphicsSettings.HasShaderDefine(Graphics.activeTier, BuiltinShaderDefine.UNITY_METAL_SHADOWS_USE_POINT_FILTERING);
 }
Example #5
0
 public void Setup(RenderTextureDescriptor baseDescriptor)
 {
     m_RenderTextureDescriptor = baseDescriptor;
     m_RenderTextureDescriptor.depthBufferBits = 0;
     m_RenderTextureDescriptor.msaaSamples     = 1;
     m_RenderTextureDescriptor.colorFormat     = RenderingUtils.SupportsRenderTextureFormat(RenderTextureFormat.R8)
         ? RenderTextureFormat.R8
         : RenderTextureFormat.ARGB32;
 }
Example #6
0
        private static RenderTextureDescriptor GetTemporaryShadowTextureDescriptor(int width, int height, int bits)
        {
            var format = Experimental.Rendering.GraphicsFormatUtility.GetDepthStencilFormat(bits, 0);
            RenderTextureDescriptor rtd = new RenderTextureDescriptor(width, height, Experimental.Rendering.GraphicsFormat.None, format);

            rtd.shadowSamplingMode = (RenderingUtils.SupportsRenderTextureFormat(RenderTextureFormat.Shadowmap) &&
                                      (SystemInfo.graphicsDeviceType != GraphicsDeviceType.OpenGLES2)) ?
                                     ShadowSamplingMode.CompareDepths : ShadowSamplingMode.None;
            return(rtd);
        }
Example #7
0
 private void PlanarReflectionTexture(Camera cam)
 {
     if (_reflectionTexture == null)
     {
         var  res      = ReflectionResolution(cam, UniversalRenderPipeline.asset.renderScale);
         bool useHdr10 = RenderingUtils.SupportsRenderTextureFormat(RenderTextureFormat.RGB111110Float);
         RenderTextureFormat hdrFormat = useHdr10 ? RenderTextureFormat.RGB111110Float : RenderTextureFormat.DefaultHDR;
         _reflectionTexture = RenderTexture.GetTemporary(res.x, res.y, 16,
                                                         GraphicsFormatUtility.GetGraphicsFormat(hdrFormat, true));
     }
     _reflectionCamera.targetTexture = _reflectionTexture;
 }
Example #8
0
        static RenderTextureDescriptor CreateRenderTextureDescriptor(Camera camera, float renderScale,
                                                                     bool isStereoEnabled, bool isHdrEnabled, int msaaSamples, bool needsAlpha)
        {
            RenderTextureDescriptor desc;
            RenderTextureFormat     renderTextureFormatDefault = RenderTextureFormat.Default;

            // NB: There's a weird case about XR and render texture
            // In test framework currently we render stereo tests to target texture
            // The descriptor in that case needs to be initialized from XR eyeTexture not render texture
            // Otherwise current tests will fail. Check: Do we need to update the test images instead?
            if (isStereoEnabled)
            {
                desc = XRGraphics.eyeTextureDesc;
                renderTextureFormatDefault = desc.colorFormat;
            }
            else if (camera.targetTexture == null)
            {
                desc        = new RenderTextureDescriptor(camera.pixelWidth, camera.pixelHeight);
                desc.width  = (int)((float)desc.width * renderScale);
                desc.height = (int)((float)desc.height * renderScale);
            }
            else
            {
                desc = camera.targetTexture.descriptor;
            }

            if (camera.targetTexture != null)
            {
                desc.colorFormat     = camera.targetTexture.descriptor.colorFormat;
                desc.depthBufferBits = camera.targetTexture.descriptor.depthBufferBits;
                desc.msaaSamples     = camera.targetTexture.descriptor.msaaSamples;
                desc.sRGB            = camera.targetTexture.descriptor.sRGB;
            }
            else
            {
                bool use32BitHDR = !needsAlpha && RenderingUtils.SupportsRenderTextureFormat(RenderTextureFormat.RGB111110Float);
                RenderTextureFormat hdrFormat = (use32BitHDR) ? RenderTextureFormat.RGB111110Float : RenderTextureFormat.DefaultHDR;

                desc.colorFormat     = isHdrEnabled ? hdrFormat : renderTextureFormatDefault;
                desc.depthBufferBits = 32;
                desc.msaaSamples     = msaaSamples;
                desc.sRGB            = (QualitySettings.activeColorSpace == ColorSpace.Linear);
            }

            desc.enableRandomWrite = false;
            desc.bindMS            = false;
            desc.useDynamicScale   = camera.allowDynamicResolution;
            return(desc);
        }
Example #9
0
            /// <inheritdoc/>
            public override void OnCameraSetup(CommandBuffer cmd, ref RenderingData renderingData)
            {
                var desc = renderingData.cameraData.cameraTargetDescriptor;

                desc.depthBufferBits = 0;
                desc.msaaSamples     = 1;
                desc.graphicsFormat  = RenderingUtils.SupportsGraphicsFormat(GraphicsFormat.R8_UNorm, FormatUsage.Linear | FormatUsage.Render)
                    ? GraphicsFormat.R8_UNorm
                    : GraphicsFormat.B8G8R8A8_UNorm;

                RenderingUtils.ReAllocateIfNeeded(ref m_RenderTarget, desc, FilterMode.Point, TextureWrapMode.Clamp, name: "_ScreenSpaceShadowmapTexture");

                ConfigureTarget(m_RenderTarget);
                ConfigureClear(ClearFlag.None, Color.white);
            }
Example #10
0
        private void CopyCameraSortingLayerRenderTexture(ScriptableRenderContext context, RenderingData renderingData, RenderBufferStoreAction mainTargetStoreAction)
        {
            var cmd = CommandBufferPool.Get();

            cmd.Clear();
            this.CreateCameraSortingLayerRenderTexture(renderingData, cmd, m_Renderer2DData.cameraSortingLayerDownsamplingMethod);

            Material copyMaterial = m_Renderer2DData.cameraSortingLayerDownsamplingMethod == Downsampling._4xBox ? m_SamplingMaterial : m_BlitMaterial;

            RenderingUtils.Blit(cmd, colorAttachment, m_Renderer2DData.cameraSortingLayerRenderTarget.id, copyMaterial, 0, false, RenderBufferLoadAction.DontCare, RenderBufferStoreAction.Store, RenderBufferLoadAction.DontCare, RenderBufferStoreAction.DontCare);
            cmd.SetRenderTarget(colorAttachment, RenderBufferLoadAction.Load, mainTargetStoreAction,
                                depthAttachment, RenderBufferLoadAction.Load, mainTargetStoreAction);
            cmd.SetGlobalTexture(k_CameraSortingLayerTextureID, m_Renderer2DData.cameraSortingLayerRenderTarget.id);
            context.ExecuteCommandBuffer(cmd);
            CommandBufferPool.Release(cmd);
        }
Example #11
0
            /// <inheritdoc/>
            public override void OnCameraSetup(CommandBuffer cmd, ref RenderingData renderingData)
            {
                m_RenderTextureDescriptor = renderingData.cameraData.cameraTargetDescriptor;
                m_RenderTextureDescriptor.depthBufferBits = 0;
                m_RenderTextureDescriptor.msaaSamples     = 1;
                m_RenderTextureDescriptor.graphicsFormat  = RenderingUtils.SupportsGraphicsFormat(GraphicsFormat.R8_UNorm, FormatUsage.Linear | FormatUsage.Render)
                    ? GraphicsFormat.R8_UNorm
                    : GraphicsFormat.B8G8R8A8_UNorm;

                cmd.GetTemporaryRT(m_RenderTarget.id, m_RenderTextureDescriptor, FilterMode.Point);

                RenderTargetIdentifier renderTargetTexture = m_RenderTarget.Identifier();

                ConfigureTarget(renderTargetTexture);
                ConfigureClear(ClearFlag.None, Color.white);
            }
Example #12
0
        /// <inheritdoc/>
        public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
        {
            CommandBuffer cmd = CommandBufferPool.Get(m_ProfilerTag);

            using (new ProfilingSample(cmd, m_ProfilerTag))
            {
                context.ExecuteCommandBuffer(cmd);
                cmd.Clear();

                Camera camera       = renderingData.cameraData.camera;
                var    sortFlags    = (m_IsOpaque) ? renderingData.cameraData.defaultOpaqueSortFlags : SortingCriteria.CommonTransparent;
                var    drawSettings = CreateDrawingSettings(m_ShaderTagIdList, ref renderingData, sortFlags);
                context.DrawRenderers(renderingData.cullResults, ref drawSettings, ref m_FilteringSettings, ref m_RenderStateBlock);

                // Render objects that did not match any shader pass with error shader
                RenderingUtils.RenderObjectsWithError(context, ref renderingData.cullResults, camera, m_FilteringSettings, SortingCriteria.None);
            }
            context.ExecuteCommandBuffer(cmd);
            CommandBufferPool.Release(cmd);
        }
Example #13
0
        static RenderTextureDescriptor CreateRenderTextureDescriptor(Camera camera, float renderScale,
                                                                     bool isStereoEnabled, bool isHdrEnabled, int msaaSamples, bool needsAlpha)
        {
            RenderTextureDescriptor desc;
            RenderTextureFormat     renderTextureFormatDefault = RenderTextureFormat.Default;

            if (isStereoEnabled)
            {
                desc = XRGraphics.eyeTextureDesc;
                renderTextureFormatDefault = desc.colorFormat;
            }
            else
            {
                desc        = new RenderTextureDescriptor(camera.pixelWidth, camera.pixelHeight);
                desc.width  = (int)((float)desc.width * renderScale);
                desc.height = (int)((float)desc.height * renderScale);
            }

            bool use32BitHDR = !needsAlpha && RenderingUtils.SupportsRenderTextureFormat(RenderTextureFormat.RGB111110Float);
            RenderTextureFormat hdrFormat = (use32BitHDR) ? RenderTextureFormat.RGB111110Float : RenderTextureFormat.DefaultHDR;

            if (camera.targetTexture != null)
            {
                desc.colorFormat     = camera.targetTexture.descriptor.colorFormat;
                desc.depthBufferBits = camera.targetTexture.descriptor.depthBufferBits;
                desc.msaaSamples     = camera.targetTexture.descriptor.msaaSamples;
                desc.sRGB            = camera.targetTexture.descriptor.sRGB;
            }
            else
            {
                desc.colorFormat     = isHdrEnabled ? hdrFormat : renderTextureFormatDefault;
                desc.depthBufferBits = 32;
                desc.msaaSamples     = msaaSamples;
                desc.sRGB            = (QualitySettings.activeColorSpace == ColorSpace.Linear);
            }

            desc.enableRandomWrite = false;
            desc.bindMS            = false;
            desc.useDynamicScale   = camera.allowDynamicResolution;
            return(desc);
        }
Example #14
0
        /// <summary>
        /// Return true if handle does not match the requirements
        /// </summary>
        /// <param name="handle">RTHandle to check (can be null).</param>
        /// <param name="width">Width of the RTHandle to match.</param>
        /// <param name="height">Height of the RTHandle to match.</param>
        /// <param name="bits">Depth bits of the RTHandle to match.</param>
        /// <param name="anisoLevel">Anisotropic filtering level of the RTHandle to match.</param>
        /// <param name="mipMapBias">Bias applied to mipmaps during filtering of the RTHandle to match.</param>
        /// <param name="name">Name of the RTHandle of the RTHandle to match.</param>
        /// <returns>If the RTHandle needs to be re-allocated</returns>
        public static bool ShadowRTNeedsReAlloc(RTHandle handle, int width, int height, int bits, int anisoLevel, float mipMapBias, string name)
        {
            if (handle == null)
            {
                return(true);
            }
            var descriptor = GetTemporaryShadowTextureDescriptor(width, height, bits);

            if (m_ForceShadowPointSampling)
            {
                if (handle.rt.filterMode != FilterMode.Point)
                {
                    return(true);
                }
            }
            else
            {
                if (handle.rt.filterMode != FilterMode.Bilinear)
                {
                    return(true);
                }
            }
            return(RenderingUtils.RTHandleNeedsReAlloc(handle, descriptor, m_ForceShadowPointSampling ? FilterMode.Point : FilterMode.Bilinear, TextureWrapMode.Clamp, true, anisoLevel, mipMapBias, name, false));
        }
Example #15
0
        internal void SetNativeRenderPassMRTAttachmentList(ScriptableRenderPass renderPass, ref CameraData cameraData, bool needCustomCameraColorClear, ClearFlag clearFlag)
        {
            using (new ProfilingScope(null, Profiling.setMRTAttachmentsList))
            {
                int     currentPassIndex       = renderPass.renderPassQueueIndex;
                Hash128 currentPassHash        = m_PassIndexToPassHash[currentPassIndex];
                int[]   currentMergeablePasses = m_MergeableRenderPassesMap[currentPassHash];

                // Not the first pass
                if (currentMergeablePasses.First() != currentPassIndex)
                {
                    return;
                }

                m_RenderPassesAttachmentCount[currentPassHash] = 0;

                int currentAttachmentIdx = 0;
                foreach (var passIdx in currentMergeablePasses)
                {
                    if (passIdx == -1)
                    {
                        break;
                    }
                    ScriptableRenderPass pass = m_ActiveRenderPassQueue[passIdx];

                    for (int i = 0; i < pass.m_ColorAttachmentIndices.Length; ++i)
                    {
                        pass.m_ColorAttachmentIndices[i] = -1;
                    }

                    for (int i = 0; i < pass.m_InputAttachmentIndices.Length; ++i)
                    {
                        pass.m_InputAttachmentIndices[i] = -1;
                    }

                    uint validColorBuffersCount = RenderingUtils.GetValidColorBufferCount(pass.colorAttachments);
                    // TODO: review the lastPassToBB logic to mak it work with merged passes
                    bool isLastPassToBB = false;

                    for (int i = 0; i < validColorBuffersCount; ++i)
                    {
                        AttachmentDescriptor currentAttachmentDescriptor =
                            new AttachmentDescriptor(pass.renderTargetFormat[i] != GraphicsFormat.None ? pass.renderTargetFormat[i] : GetDefaultGraphicsFormat(cameraData));

                        // if this is the current camera's last pass, also check if one of the RTs is the backbuffer (BuiltinRenderTextureType.CameraTarget)
                        isLastPassToBB |= pass.isLastPass && (pass.colorAttachments[i] == BuiltinRenderTextureType.CameraTarget);

                        int existingAttachmentIndex = FindAttachmentDescriptorIndexInList(pass.colorAttachments[i], m_ActiveColorAttachmentDescriptors);

                        if (existingAttachmentIndex == -1)
                        {
                            // add a new attachment
                            m_ActiveColorAttachmentDescriptors[currentAttachmentIdx] = currentAttachmentDescriptor;
                            m_ActiveColorAttachmentDescriptors[currentAttachmentIdx].ConfigureTarget(pass.colorAttachments[i], (clearFlag & ClearFlag.Color) == 0, true);

                            if (pass.colorAttachments[i] == m_CameraColorTarget && needCustomCameraColorClear && (clearFlag & ClearFlag.Color) != 0)
                            {
                                m_ActiveColorAttachmentDescriptors[currentAttachmentIdx].ConfigureClear(CoreUtils.ConvertSRGBToActiveColorSpace(cameraData.camera.backgroundColor), 1.0f, 0);
                            }
                            else if ((pass.clearFlag & ClearFlag.Color) != 0)
                            {
                                m_ActiveColorAttachmentDescriptors[currentAttachmentIdx].ConfigureClear(CoreUtils.ConvertSRGBToActiveColorSpace(pass.clearColor), 1.0f, 0);
                            }

                            pass.m_ColorAttachmentIndices[i] = currentAttachmentIdx;
                            currentAttachmentIdx++;
                            m_RenderPassesAttachmentCount[currentPassHash]++;
                        }
                        else
                        {
                            // attachment was already present
                            pass.m_ColorAttachmentIndices[i] = existingAttachmentIndex;
                        }
                    }

                    if (PassHasInputAttachments(pass))
                    {
                        SetupInputAttachmentIndices(pass);
                    }

                    // TODO: this is redundant and is being setup for each attachment. Needs to be done only once per mergeable pass list (we need to make sure mergeable passes use the same depth!)
                    m_ActiveDepthAttachmentDescriptor = new AttachmentDescriptor(GraphicsFormat.DepthAuto);
                    m_ActiveDepthAttachmentDescriptor.ConfigureTarget(pass.depthAttachment, (clearFlag & ClearFlag.DepthStencil) == 0, !isLastPassToBB);
                    if ((clearFlag & ClearFlag.DepthStencil) != 0)
                    {
                        m_ActiveDepthAttachmentDescriptor.ConfigureClear(Color.black, 1.0f, 0);
                    }
                }
            }
        }
Example #16
0
        internal void SetNativeRenderPassAttachmentList(ScriptableRenderPass renderPass, ref CameraData cameraData, RTHandle passColorAttachment, RTHandle passDepthAttachment, ClearFlag finalClearFlag, Color finalClearColor)
        {
            using (new ProfilingScope(null, Profiling.setAttachmentList))
            {
                int     currentPassIndex       = renderPass.renderPassQueueIndex;
                Hash128 currentPassHash        = m_PassIndexToPassHash[currentPassIndex];
                int[]   currentMergeablePasses = m_MergeableRenderPassesMap[currentPassHash];

                // Skip if not the first pass
                if (currentMergeablePasses.First() != currentPassIndex)
                {
                    return;
                }

                m_RenderPassesAttachmentCount[currentPassHash] = 0;

                UpdateFinalStoreActions(currentMergeablePasses, cameraData);

                int currentAttachmentIdx = 0;
                foreach (var passIdx in currentMergeablePasses)
                {
                    if (passIdx == -1)
                    {
                        break;
                    }
                    ScriptableRenderPass pass = m_ActiveRenderPassQueue[passIdx];

                    for (int i = 0; i < pass.m_ColorAttachmentIndices.Length; ++i)
                    {
                        pass.m_ColorAttachmentIndices[i] = -1;
                    }

                    AttachmentDescriptor currentAttachmentDescriptor;
                    var usesTargetTexture = cameraData.targetTexture != null;
                    var depthOnly         = (pass.colorAttachmentHandle.rt != null && IsDepthOnlyRenderTexture(pass.colorAttachmentHandle.rt)) || (usesTargetTexture && IsDepthOnlyRenderTexture(cameraData.targetTexture));

                    int samples;
                    RenderTargetIdentifier colorAttachmentTarget;
                    // We are not rendering to Backbuffer so we have the RT and the information with it
                    // while also creating a new RenderTargetIdentifier to ignore the current depth slice (which might get bypassed in XR setup eventually)
                    if (new RenderTargetIdentifier(passColorAttachment.nameID, 0, depthSlice: 0) != BuiltinRenderTextureType.CameraTarget)
                    {
                        currentAttachmentDescriptor = new AttachmentDescriptor(depthOnly ? passColorAttachment.rt.descriptor.depthStencilFormat : passColorAttachment.rt.descriptor.graphicsFormat);
                        samples = passColorAttachment.rt.descriptor.msaaSamples;
                        colorAttachmentTarget = passColorAttachment.nameID;
                    }
                    else // In this case we might be rendering the the targetTexture or the Backbuffer, so less information is available
                    {
                        currentAttachmentDescriptor = new AttachmentDescriptor(GetDefaultGraphicsFormat(cameraData, depthOnly));
                        samples = cameraData.cameraTargetDescriptor.msaaSamples;
                        colorAttachmentTarget = usesTargetTexture ? new RenderTargetIdentifier(cameraData.targetTexture) : BuiltinRenderTextureType.CameraTarget;
                    }

                    currentAttachmentDescriptor.ConfigureTarget(colorAttachmentTarget, ((uint)finalClearFlag & (uint)ClearFlag.Color) == 0, true);

                    if (PassHasInputAttachments(pass))
                    {
                        SetupInputAttachmentIndices(pass);
                    }

                    // TODO: this is redundant and is being setup for each attachment. Needs to be done only once per mergeable pass list (we need to make sure mergeable passes use the same depth!)
                    m_ActiveDepthAttachmentDescriptor = new AttachmentDescriptor(SystemInfo.GetGraphicsFormat(DefaultFormat.DepthStencil));
                    m_ActiveDepthAttachmentDescriptor.ConfigureTarget(passDepthAttachment.nameID != BuiltinRenderTextureType.CameraTarget ? passDepthAttachment.nameID :
                                                                      (usesTargetTexture ? new RenderTargetIdentifier(cameraData.targetTexture.depthBuffer) : BuiltinRenderTextureType.Depth),
                                                                      ((uint)finalClearFlag & (uint)ClearFlag.Depth) == 0, true);

                    if (finalClearFlag != ClearFlag.None)
                    {
                        // We don't clear color for Overlay render targets, however pipeline set's up depth only render passes as color attachments which we do need to clear
                        if ((cameraData.renderType != CameraRenderType.Overlay || depthOnly && ((uint)finalClearFlag & (uint)ClearFlag.Color) != 0))
                        {
                            currentAttachmentDescriptor.ConfigureClear(finalClearColor, 1.0f, 0);
                        }
                        if (((uint)finalClearFlag & (uint)ClearFlag.Depth) != 0)
                        {
                            m_ActiveDepthAttachmentDescriptor.ConfigureClear(Color.black, 1.0f, 0);
                        }
                    }

                    // resolving to the implicit color target's resolve surface TODO: handle m_CameraResolveTarget if present?
                    if (samples > 1)
                    {
                        currentAttachmentDescriptor.ConfigureResolveTarget(colorAttachmentTarget);
                        if (RenderingUtils.MultisampleDepthResolveSupported())
                        {
                            m_ActiveDepthAttachmentDescriptor.ConfigureResolveTarget(m_ActiveDepthAttachmentDescriptor.loadStoreTarget);
                        }
                    }


                    if (m_UseOptimizedStoreActions)
                    {
                        currentAttachmentDescriptor.storeAction       = m_FinalColorStoreAction[0];
                        m_ActiveDepthAttachmentDescriptor.storeAction = m_FinalDepthStoreAction;
                    }

                    int existingAttachmentIndex = FindAttachmentDescriptorIndexInList(currentAttachmentIdx,
                                                                                      currentAttachmentDescriptor, m_ActiveColorAttachmentDescriptors);

                    if (existingAttachmentIndex == -1)
                    {
                        // add a new attachment
                        pass.m_ColorAttachmentIndices[0] = currentAttachmentIdx;
                        m_ActiveColorAttachmentDescriptors[currentAttachmentIdx] = currentAttachmentDescriptor;
                        currentAttachmentIdx++;
                        m_RenderPassesAttachmentCount[currentPassHash]++;
                    }
                    else
                    {
                        // attachment was already present
                        pass.m_ColorAttachmentIndices[0] = existingAttachmentIndex;
                    }
                }
            }
        }
Example #17
0
        internal void SetNativeRenderPassMRTAttachmentList(ScriptableRenderPass renderPass, ref CameraData cameraData, bool needCustomCameraColorClear, ClearFlag cameraClearFlag)
        {
            using (new ProfilingScope(null, Profiling.setMRTAttachmentsList))
            {
                int     currentPassIndex       = renderPass.renderPassQueueIndex;
                Hash128 currentPassHash        = m_PassIndexToPassHash[currentPassIndex];
                int[]   currentMergeablePasses = m_MergeableRenderPassesMap[currentPassHash];

                // Not the first pass
                if (currentMergeablePasses.First() != currentPassIndex)
                {
                    return;
                }

                m_RenderPassesAttachmentCount[currentPassHash] = 0;

                UpdateFinalStoreActions(currentMergeablePasses, cameraData);

                int  currentAttachmentIdx = 0;
                bool hasInput             = false;
                foreach (var passIdx in currentMergeablePasses)
                {
                    if (passIdx == -1)
                    {
                        break;
                    }
                    ScriptableRenderPass pass = m_ActiveRenderPassQueue[passIdx];

                    for (int i = 0; i < pass.m_ColorAttachmentIndices.Length; ++i)
                    {
                        pass.m_ColorAttachmentIndices[i] = -1;
                    }

                    for (int i = 0; i < pass.m_InputAttachmentIndices.Length; ++i)
                    {
                        pass.m_InputAttachmentIndices[i] = -1;
                    }

                    uint validColorBuffersCount = RenderingUtils.GetValidColorBufferCount(pass.colorAttachmentHandles);

                    for (int i = 0; i < validColorBuffersCount; ++i)
                    {
                        AttachmentDescriptor currentAttachmentDescriptor =
                            new AttachmentDescriptor(pass.renderTargetFormat[i] != GraphicsFormat.None ? pass.renderTargetFormat[i] : GetDefaultGraphicsFormat(cameraData));

                        var colorHandle = pass.overrideCameraTarget ? pass.colorAttachmentHandles[i] : m_CameraColorTarget.handle;

                        int existingAttachmentIndex = FindAttachmentDescriptorIndexInList(colorHandle.nameID, m_ActiveColorAttachmentDescriptors);

                        if (m_UseOptimizedStoreActions)
                        {
                            currentAttachmentDescriptor.storeAction = m_FinalColorStoreAction[i];
                        }

                        if (existingAttachmentIndex == -1)
                        {
                            // add a new attachment
                            m_ActiveColorAttachmentDescriptors[currentAttachmentIdx] = currentAttachmentDescriptor;
                            bool passHasClearColor = (pass.clearFlag & ClearFlag.Color) != 0;
                            m_ActiveColorAttachmentDescriptors[currentAttachmentIdx].ConfigureTarget(colorHandle.nameID, !passHasClearColor, true);

                            if (pass.colorAttachmentHandles[i] == m_CameraColorTarget.handle && needCustomCameraColorClear && (cameraClearFlag & ClearFlag.Color) != 0)
                            {
                                m_ActiveColorAttachmentDescriptors[currentAttachmentIdx].ConfigureClear(cameraData.backgroundColor, 1.0f, 0);
                            }
                            else if (passHasClearColor)
                            {
                                m_ActiveColorAttachmentDescriptors[currentAttachmentIdx].ConfigureClear(CoreUtils.ConvertSRGBToActiveColorSpace(pass.clearColor), 1.0f, 0);
                            }

                            pass.m_ColorAttachmentIndices[i] = currentAttachmentIdx;
                            currentAttachmentIdx++;
                            m_RenderPassesAttachmentCount[currentPassHash]++;
                        }
                        else
                        {
                            // attachment was already present
                            pass.m_ColorAttachmentIndices[i] = existingAttachmentIndex;
                        }
                    }

                    if (PassHasInputAttachments(pass))
                    {
                        hasInput = true;
                        SetupInputAttachmentIndices(pass);
                    }

                    // TODO: this is redundant and is being setup for each attachment. Needs to be done only once per mergeable pass list (we need to make sure mergeable passes use the same depth!)
                    m_ActiveDepthAttachmentDescriptor = new AttachmentDescriptor(SystemInfo.GetGraphicsFormat(DefaultFormat.DepthStencil));
                    bool passHasClearDepth = (cameraClearFlag & ClearFlag.DepthStencil) != 0;
                    m_ActiveDepthAttachmentDescriptor.ConfigureTarget(pass.overrideCameraTarget ? pass.depthAttachmentHandle.nameID : m_CameraDepthTarget.nameID, !passHasClearDepth, true);

                    if (passHasClearDepth)
                    {
                        m_ActiveDepthAttachmentDescriptor.ConfigureClear(Color.black, 1.0f, 0);
                    }

                    if (m_UseOptimizedStoreActions)
                    {
                        m_ActiveDepthAttachmentDescriptor.storeAction = m_FinalDepthStoreAction;
                    }
                }

                if (hasInput)
                {
                    SetupTransientInputAttachments(m_RenderPassesAttachmentCount[currentPassHash]);
                }
            }
        }
Example #18
0
        private static Texture2D CreatePointLightLookupTexture()
        {
            const int WIDTH  = 256;
            const int HEIGHT = 256;

            var textureFormat = GraphicsFormat.R8G8B8A8_UNorm;

            if (RenderingUtils.SupportsGraphicsFormat(GraphicsFormat.R16G16B16A16_SFloat, FormatUsage.SetPixels))
            {
                textureFormat = GraphicsFormat.R16G16B16A16_SFloat;
            }
            else if (RenderingUtils.SupportsGraphicsFormat(GraphicsFormat.R32G32B32A32_SFloat, FormatUsage.SetPixels))
            {
                textureFormat = GraphicsFormat.R32G32B32A32_SFloat;
            }

            var texture = new Texture2D(WIDTH, HEIGHT, textureFormat, TextureCreationFlags.None);

            texture.filterMode = FilterMode.Bilinear;
            texture.wrapMode   = TextureWrapMode.Clamp;
            var center = new Vector2(WIDTH / 2.0f, HEIGHT / 2.0f);

            for (var y = 0; y < HEIGHT; y++)
            {
                for (var x = 0; x < WIDTH; x++)
                {
                    var pos       = new Vector2(x, y);
                    var distance  = Vector2.Distance(pos, center);
                    var relPos    = pos - center;
                    var direction = center - pos;
                    direction.Normalize();

                    // red   = 1-0 distance
                    // green  = 1-0 angle
                    // blue = direction.x
                    // alpha = direction.y

                    float red;
                    if (x == WIDTH - 1 || y == HEIGHT - 1)
                    {
                        red = 0;
                    }
                    else
                    {
                        red = Mathf.Clamp(1 - (2.0f * distance / WIDTH), 0.0f, 1.0f);
                    }

                    var cosAngle = Vector2.Dot(Vector2.down, relPos.normalized);
                    var angle    = Mathf.Acos(cosAngle) / Mathf.PI; // 0-1

                    var green = Mathf.Clamp(1 - angle, 0.0f, 1.0f);
                    var blue  = direction.x;
                    var alpha = direction.y;

                    var color = new Color(red, green, blue, alpha);

                    texture.SetPixel(x, y, color);
                }
            }
            texture.Apply();
            return(texture);
        }