public override void Execute(LightweightForwardRenderer renderer, ref ScriptableRenderContext context, ref CullResults cullResults, ref RenderingData renderingData) { CommandBuffer cmd = CommandBufferPool.Get(k_DepthPrepassTag); using (new ProfilingSample(cmd, k_DepthPrepassTag)) { cmd.GetTemporaryRT(depthAttachmentHandle.id, descriptor, FilterMode.Point); SetRenderTarget( cmd, depthAttachmentHandle.Identifier(), RenderBufferLoadAction.DontCare, RenderBufferStoreAction.Store, ClearFlag.Depth, Color.black, descriptor.dimension); context.ExecuteCommandBuffer(cmd); cmd.Clear(); var drawSettings = CreateDrawRendererSettings(renderingData.cameraData.camera, SortFlags.CommonOpaque, RendererConfiguration.None, renderingData.supportsDynamicBatching); if (renderingData.cameraData.isStereoEnabled) { Camera camera = renderingData.cameraData.camera; context.StartMultiEye(camera); context.DrawRenderers(cullResults.visibleRenderers, ref drawSettings, opaqueFilterSettings); context.StopMultiEye(camera); } else context.DrawRenderers(cullResults.visibleRenderers, ref drawSettings, opaqueFilterSettings); } context.ExecuteCommandBuffer(cmd); CommandBufferPool.Release(cmd); }
public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData) { CommandBuffer cmd = CommandBufferPool.Get(profilerTag); var shaderTag = new ShaderTagId("DepthOnly"); var sortFlags = renderingData.cameraData.defaultOpaqueSortFlags; var drawSettings = CreateDrawingSettings(shaderTag, ref renderingData, sortFlags); drawSettings.perObjectData = PerObjectData.None; if (renderingData.cameraData.isStereoEnabled) { context.StartMultiEye(renderingData.cameraData.camera); } drawSettings.overrideMaterial = material; context.DrawRenderers(renderingData.cullResults, ref drawSettings, ref filteringSettings); cmd.SetGlobalTexture("_CameraDepthNormalsTexture", depthNormalsHandle.id); context.ExecuteCommandBuffer(cmd); cmd.Clear(); CommandBufferPool.Release(cmd); }
// Here you can implement the rendering logic. // Use <c>ScriptableRenderContext</c> to issue drawing commands or execute command buffers // https://docs.unity3d.com/ScriptReference/Rendering.ScriptableRenderContext.html // You don't have to call ScriptableRenderContext.submit, the render pipeline will call it at specific points in the pipeline. public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData) { CommandBuffer cmd = CommandBufferPool.Get(m_ProfilerTag); using (new ProfilingSample(cmd, m_ProfilerTag)) { context.ExecuteCommandBuffer(cmd); cmd.Clear(); var sortFlags = renderingData.cameraData.defaultOpaqueSortFlags; var drawSettings = CreateDrawingSettings(m_ShaderTagId, ref renderingData, sortFlags); drawSettings.perObjectData = PerObjectData.None; ref CameraData cameraData = ref renderingData.cameraData; Camera camera = cameraData.camera; if (cameraData.isStereoEnabled) { context.StartMultiEye(camera); } drawSettings.overrideMaterial = depthNormalsMaterial; context.DrawRenderers(renderingData.cullResults, ref drawSettings, ref m_FilteringSettings); cmd.SetGlobalTexture("_CameraDepthNormalsTexture", depthAttachmentHandle.id); }
public void Render(ScriptableRenderContext renderContext, Camera camera, VXGIRenderer renderer) { UpdateResolution(); float realtime = Time.realtimeSinceStartup; bool tracingThrottled = throttleTracing; #if UNITY_EDITOR tracingThrottled &= UnityEditor.EditorApplication.isPlaying; #endif if (tracingThrottled) { if (_previousTrace + 1f / tracingRate < realtime) { _previousTrace = realtime; PrePass(renderContext, renderer); } } else { PrePass(renderContext, renderer); } renderContext.SetupCameraProperties(camera, camera.stereoEnabled); if (camera.stereoEnabled) { renderContext.StartMultiEye(camera); } _command.ClearRenderTarget( (camera.clearFlags & CameraClearFlags.Depth) != 0, camera.clearFlags == CameraClearFlags.Color, camera.backgroundColor ); renderContext.ExecuteCommandBuffer(_command); _command.Clear(); if (camera.clearFlags == CameraClearFlags.Skybox) { renderContext.DrawSkybox(camera); } if (visualizeMipmap) { renderer.RenderMipmap(renderContext, camera, this); } else { SetupShader(renderContext); renderer.RenderDeferred(renderContext, camera, this); } if (camera.stereoEnabled) { renderContext.StopMultiEye(camera); renderContext.StereoEndRender(camera); } }
public static void StartStereoRendering(Camera camera, ref ScriptableRenderContext context, FrameRenderingConfiguration renderingConfiguration) { if (CoreUtils.HasFlag(renderingConfiguration, FrameRenderingConfiguration.Stereo)) { context.StartMultiEye(camera); } }
public override void Execute(ref ScriptableRenderContext context, ref CullResults cullResults, ref RenderingData renderingData) { CommandBuffer cmd = CommandBufferPool.Get(k_DepthPrepassTag); using (new ProfilingSample(cmd, k_SetupRenderTargetTag)) { SetRenderTarget(cmd, GetSurface(depthAttachmentHandle), RenderBufferLoadAction.DontCare, RenderBufferStoreAction.Store, ClearFlag.Depth, Color.black); context.ExecuteCommandBuffer(cmd); cmd.Clear(); var drawSettings = CreateDrawRendererSettings(renderingData.cameraData.camera, SortFlags.CommonOpaque, RendererConfiguration.None, renderingData.supportsDynamicBatching); if (renderingData.cameraData.isStereoEnabled) { Camera camera = renderingData.cameraData.camera; context.StartMultiEye(camera); context.DrawRenderers(cullResults.visibleRenderers, ref drawSettings, renderer.opaqueFilterSettings); context.StopMultiEye(camera); } else { context.DrawRenderers(cullResults.visibleRenderers, ref drawSettings, renderer.opaqueFilterSettings); } } context.ExecuteCommandBuffer(cmd); CommandBufferPool.Release(cmd); }
public static void Start(ref ScriptableRenderContext context, FrameRenderingConfiguration renderingConfiguration, Camera currentCamera) { if (LightweightUtils.HasFlag(renderingConfiguration, FrameRenderingConfiguration.Stereo)) { context.StartMultiEye(currentCamera); } }
public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData) { CommandBuffer gbufferCommands = CommandBufferPool.Get(); using (new ProfilingScope(gbufferCommands, m_ProfilingSampler)) { if (m_DeferredLights.AccurateGbufferNormals) { gbufferCommands.EnableShaderKeyword(ShaderKeywordStrings._GBUFFER_NORMALS_OCT); } else { gbufferCommands.DisableShaderKeyword(ShaderKeywordStrings._GBUFFER_NORMALS_OCT); } gbufferCommands.SetViewProjectionMatrices(renderingData.cameraData.camera.worldToCameraMatrix, renderingData.cameraData.camera.projectionMatrix); // Note: a special case might be required if(renderingData.cameraData.isStereoEnabled) - see reference in ScreenSpaceShadowResolvePass.Execute context.ExecuteCommandBuffer(gbufferCommands); // send the gbufferCommands to the scriptableRenderContext - this should be done *before* calling scriptableRenderContext.DrawRenderers gbufferCommands.Clear(); DrawingSettings drawingSettings = CreateDrawingSettings(m_ShaderTagId, ref renderingData, renderingData.cameraData.defaultOpaqueSortFlags); ref CameraData cameraData = ref renderingData.cameraData; Camera camera = cameraData.camera; if (cameraData.isStereoEnabled) { context.StartMultiEye(camera, eyeIndex); } context.DrawRenderers(renderingData.cullResults, ref drawingSettings, ref m_FilteringSettings /*, ref m_RenderStateBlock*/); }
/// <inheritdoc/> public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData) { CommandBuffer cmd = CommandBufferPool.Get(m_ProfilerTag); using (new ProfilingScope(cmd, m_ProfilingSampler)) { context.ExecuteCommandBuffer(cmd); cmd.Clear(); var sortFlags = renderingData.cameraData.defaultOpaqueSortFlags; var drawSettings = CreateDrawingSettings(m_ShaderTagId, ref renderingData, sortFlags); drawSettings.perObjectData = PerObjectData.None; ref CameraData cameraData = ref renderingData.cameraData; Camera camera = cameraData.camera; if (cameraData.isStereoEnabled) { context.StartMultiEye(camera, eyeIndex); } context.DrawRenderers(renderingData.cullResults, ref drawSettings, ref m_FilteringSettings); //SetRenderTarget(cmd, depthAttachmentHandle.id,RenderBufferLoadAction.Load,RenderBufferStoreAction.Store,ClearFlag.All,Color.black,TextureDimension.Tex2D); //context.ExecuteCommandBuffer(cmd); //cmd.Clear(); //cmd.DrawMesh(RenderingUtils.fullscreenMesh, Matrix4x4.identity, TestMaterial); //context.ExecuteCommandBuffer(cmd); //cmd.Clear(); }
// Here you can implement the rendering logic. // Use <c>ScriptableRenderContext</c> to issue drawing commands or execute command buffers // https://docs.unity3d.com/ScriptReference/Rendering.ScriptableRenderContext.html // You don't have to call ScriptableRenderContext.submit, the render pipeline will call it at specific points in the pipeline. public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData) { CommandBuffer cmd = CommandBufferPool.Get("Scene Normals Prepass"); using (new ProfilingScope(cmd, m_ProfilingSampler)) { context.ExecuteCommandBuffer(cmd); cmd.Clear(); var sortFlags = renderingData.cameraData.defaultOpaqueSortFlags; var drawSettings = CreateDrawingSettings(m_ShaderTagId, ref renderingData, sortFlags); drawSettings.perObjectData = PerObjectData.None; ref CameraData cameraData = ref renderingData.cameraData; Camera camera = cameraData.camera; if (cameraData.isStereoEnabled) { context.StartMultiEye(camera); } drawSettings.overrideMaterial = normalsMaterial; m_FilteringSettings.layerMask = camera.cullingMask; context.DrawRenderers(renderingData.cullResults, ref drawSettings, ref m_FilteringSettings); }
public override void Execute(ref ScriptableRenderContext context, ref CullResults cullResults, ref RenderingData renderingData) { if (renderingData.shadowData.renderedDirectionalShadowQuality == LightShadows.None) { return; } CommandBuffer cmd = CommandBufferPool.Get("Collect Shadows"); SetShadowCollectPassKeywords(cmd, ref renderingData.shadowData); // Note: The source isn't actually 'used', but there's an engine peculiarity (bug) that // doesn't like null sources when trying to determine a stereo-ized blit. So for proper // stereo functionality, we use the screen-space shadow map as the source (until we have // a better solution). // An alternative would be DrawProcedural, but that would require further changes in the shader. RenderTargetIdentifier screenSpaceOcclusionTexture = GetSurface(colorAttachmentHandle); SetRenderTarget(cmd, screenSpaceOcclusionTexture, RenderBufferLoadAction.DontCare, RenderBufferStoreAction.Store, ClearFlag.Color | ClearFlag.Depth, Color.white); cmd.Blit(screenSpaceOcclusionTexture, screenSpaceOcclusionTexture, m_ScreenSpaceShadowsMaterial); if (renderingData.cameraData.isStereoEnabled) { Camera camera = renderingData.cameraData.camera; context.StartMultiEye(camera); context.ExecuteCommandBuffer(cmd); context.StopMultiEye(camera); } else { context.ExecuteCommandBuffer(cmd); } CommandBufferPool.Release(cmd); }
public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData) { CommandBuffer cmd = CommandBufferPool.Get(m_ProfilerTag); using (new ProfilingSample(cmd, m_ProfilerTag)) { //cmd.GetTemporaryRT(normalAttachmentHandle.id, descriptor, FilterMode.Point); //context.ExecuteCommandBuffer(cmd); //ConfigureClear(ClearFlag.Color, Color.black); context.ExecuteCommandBuffer(cmd); cmd.Clear(); var sortFlags = renderingData.cameraData.defaultOpaqueSortFlags; var drawSettings = CreateDrawingSettings(m_ShaderTagId, ref renderingData, sortFlags); drawSettings.perObjectData = PerObjectData.None; ref CameraData cameraData = ref renderingData.cameraData; Camera camera = cameraData.camera; if (cameraData.isStereoEnabled) { context.StartMultiEye(camera); } drawSettings.overrideMaterial = normalsMaterial; //m_FilteringSettings.layerMask = 1 << LayerMask.NameToLayer("Character"); context.DrawRenderers(renderingData.cullResults, ref drawSettings, ref m_FilteringSettings); cmd.SetGlobalTexture("_CameraNormalsTexture", normalAttachmentHandle.id); //cmd.SetRenderTarget(cameraColorAttachmentHandle.Identifier(), RenderBufferLoadAction.Load, RenderBufferStoreAction.StoreAndResolve, cameraDepthAttachmentHandle.Identifier(), RenderBufferLoadAction.Load, RenderBufferStoreAction.StoreAndResolve); //cmd.ReleaseTemporaryRT(normalAttachmentHandle.id); }
public override void Execute(LightweightForwardRenderer renderer, ref ScriptableRenderContext context, ref CullResults cullResults, ref RenderingData renderingData) { Camera camera = renderingData.cameraData.camera; context.StartMultiEye(camera); }
public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData) { int kernel = GetComputeShaderKernel(ref renderingData.shadowData); if (kernel == -1) { return; } var shadowLightIndex = renderingData.lightData.mainLightIndex; var shadowLight = renderingData.lightData.visibleLights[shadowLightIndex]; var light = shadowLight.light; dirVxShadowMap = light.GetComponent <DirectionalVxShadowMap>(); CommandBuffer cmd = CommandBufferPool.Get(k_CollectShadowsTag); if (mainLightDynamicShadows) { mainLightShadowCasterPass.SetMainLightShadowReceiverConstantsOnComputeShader( cmd, ref renderingData.shadowData, shadowLight, m_ScreenSpaceShadowsComputeShader); } SetupVxShadowReceiverConstants( cmd, kernel, ref m_ScreenSpaceShadowsComputeShader, ref renderingData.cameraData.camera, ref shadowLight); int x = (renderingData.cameraData.camera.pixelWidth + TileAdditive) / TileSize; int y = (renderingData.cameraData.camera.pixelHeight + TileAdditive) / TileSize; cmd.DispatchCompute(m_ScreenSpaceShadowsComputeShader, kernel, x, y, 1); // even if the main light doesn't have dynamic shadows, // cascades keyword is needed for screen space shadow map texture in opaque rendering pass. if (mainLightDynamicShadows == false) { CoreUtils.SetKeyword(cmd, ShaderKeywordStrings.MainLightShadows, true); CoreUtils.SetKeyword(cmd, ShaderKeywordStrings.MainLightShadowCascades, true); } else { CoreUtils.SetKeyword(cmd, ShaderKeywordStrings.MainLightShadowCascades, true); } if (renderingData.cameraData.isStereoEnabled) { Camera camera = renderingData.cameraData.camera; context.StartMultiEye(camera); context.ExecuteCommandBuffer(cmd); context.StopMultiEye(camera); } else { context.ExecuteCommandBuffer(cmd); } CommandBufferPool.Release(cmd); }
protected override void Execute(ScriptableRenderContext renderContext, CommandBuffer cmd, HDCamera hdCamera, CullingResults cullingResult) { if (injectionPoint != CustomPassInjectionPoint.AfterPostProcess) { Debug.LogError("CustomPassInjectionPoint shouild be set on AfterPostProcess"); return; } if (render && hdCamera.camera != bakeCamera && hdCamera.camera.cameraType != CameraType.SceneView) { bakeCamera.TryGetCullingParameters(out var cullingParams); cullingParams.cullingOptions = CullingOptions.ShadowCasters; cullingResult = renderContext.Cull(ref cullingParams); var result = new RendererListDesc(shaderTags, cullingResult, bakeCamera) { rendererConfiguration = PerObjectData.None, //renderQueueRange = RenderQueueRange.all, renderQueueRange = GetRenderQueueRange(RenderQueueType.AllOpaque), sortingCriteria = SortingCriteria.BackToFront, excludeObjectMotionVectors = false, layerMask = -1, overrideMaterial = depthMaterial, overrideMaterialPassIndex = depthMaterial.FindPass("ForwardOnly"), }; //renderContext.StereoEndRender(hdCamera.camera); renderContext.ExecuteCommandBuffer(cmd); cmd.Clear(); renderContext.StopMultiEye(hdCamera.camera); var p = GL.GetGPUProjectionMatrix(bakeCamera.projectionMatrix, true); Matrix4x4 scaleMatrix = Matrix4x4.identity; scaleMatrix.m22 = -1.0f; var v = scaleMatrix * bakeCamera.transform.localToWorldMatrix.inverse; var vp = p * v; cmd.SetGlobalMatrix("_ViewMatrix", v); cmd.SetGlobalMatrix("_InvViewMatrix", v.inverse); cmd.SetGlobalMatrix("_ProjMatrix", p); cmd.SetGlobalMatrix("_InvProjMatrix", p.inverse); cmd.SetGlobalMatrix("_ViewProjMatrix", vp); cmd.SetGlobalMatrix("_InvViewProjMatrix", vp.inverse); cmd.SetGlobalMatrix("_CameraViewProjMatrix", vp); cmd.SetGlobalVector("_WorldSpaceCameraPos", Vector3.zero); cmd.SetGlobalVector("_ShadowClipPlanes", Vector3.zero); CoreUtils.SetRenderTarget(cmd, depthFromCam, ClearFlag.All); HDUtils.DrawRendererList(renderContext, cmd, RendererList.Create(result)); renderContext.StartMultiEye(hdCamera.camera); renderContext.ExecuteCommandBuffer(cmd); cmd.Clear(); } }
/// <inheritdoc/> public override void Execute(ScriptableRenderer renderer, ScriptableRenderContext context, ref RenderingData renderingData) { if (renderer == null) { throw new ArgumentNullException("renderer"); } Camera camera = renderingData.cameraData.camera; context.StartMultiEye(camera); }
/// <inheritdoc/> public override void Execute(ScriptableRenderer renderer, ScriptableRenderContext context, ref RenderingData renderingData) { if (m_ScreenSpaceShadowsMaterial == null) { Debug.LogErrorFormat("Missing {0}. {1} render pass will not execute. Check for missing reference in the renderer resources.", m_ScreenSpaceShadowsMaterial, GetType().Name); return; } if (renderer == null) { throw new ArgumentNullException("renderer"); } if (renderingData.lightData.mainLightIndex == -1) { return; } CommandBuffer cmd = CommandBufferPool.Get(k_CollectShadowsTag); cmd.GetTemporaryRT(colorAttachmentHandle.id, descriptor, FilterMode.Bilinear); // Note: The source isn't actually 'used', but there's an engine peculiarity (bug) that // doesn't like null sources when trying to determine a stereo-ized blit. So for proper // stereo functionality, we use the screen-space shadow map as the source (until we have // a better solution). // An alternative would be DrawProcedural, but that would require further changes in the shader. RenderTargetIdentifier screenSpaceOcclusionTexture = colorAttachmentHandle.Identifier(); SetRenderTarget(cmd, screenSpaceOcclusionTexture, RenderBufferLoadAction.DontCare, RenderBufferStoreAction.Store, ClearFlag.Color | ClearFlag.Depth, Color.white, descriptor.dimension); // This blit is troublesome. When MSAA is enabled it will render a fullscreen quad + store resolved MSAA + extra blit // This consumes about 10MB of extra unnecessary bandwidth on boat attack. // In order to avoid it we can do a cmd.DrawMesh instead, however because LWRP doesn't setup camera matrices itself, // we would need to call an extra SetupCameraProperties here just to setup those matrices which is also troublesome. // We need get rid of SetupCameraProperties and setup camera matrices in LWRP ASAP. cmd.Blit(screenSpaceOcclusionTexture, screenSpaceOcclusionTexture, m_ScreenSpaceShadowsMaterial); if (renderingData.cameraData.isStereoEnabled) { Camera camera = renderingData.cameraData.camera; context.StartMultiEye(camera); context.ExecuteCommandBuffer(cmd); context.StopMultiEye(camera); } else { context.ExecuteCommandBuffer(cmd); } CommandBufferPool.Release(cmd); }
protected override void Execute(ScriptableRenderContext context, CommandBuffer cmd, HDCamera hdCamera, CullingResults cullingResult) { if (OceanRenderer.Instance == null || OceanRenderer.Instance._lodDataShadow == null) { return; } var camera = hdCamera.camera; // Custom passes execute for every camera. We only support one camera for now. if (!ReferenceEquals(camera.transform, OceanRenderer.Instance.Viewpoint)) { return; } if (context == null) { throw new System.ArgumentNullException("context"); } // TODO: bail when not executing for main light or when no main light exists? // if (renderingData.lightData.mainLightIndex == -1) return; var commandBuffer = OceanRenderer.Instance._lodDataShadow.BufCopyShadowMap; if (commandBuffer == null) { return; } // Target is not multi-eye so stop mult-eye rendering for this command buffer. Breaks registered shadow // inputs without this. if (camera.stereoEnabled) { context.StopMultiEye(camera); } context.ExecuteCommandBuffer(commandBuffer); // Even if we do not call StopMultiEye, it is necessary to call StartMultiEye otherwise one eye no longer // renders. if (camera.stereoEnabled) { context.StartMultiEye(camera); } else { // Restore matrices otherwise remaining render will have incorrect matrices. Each pass is responsible // for restoring matrices if required. commandBuffer.Clear(); commandBuffer.SetViewProjectionMatrices(camera.worldToCameraMatrix, camera.projectionMatrix); context.ExecuteCommandBuffer(commandBuffer); } }
public override void Execute(ref ScriptableRenderContext context, ref CullResults cullResults, ref RenderingData renderingData) { Camera camera = renderingData.cameraData.camera; bool dynamicBatching = renderingData.supportsDynamicBatching; SetupShaderConstants(ref context, ref renderingData.cameraData, ref renderingData.lightData, ref renderingData.shadowData); RendererConfiguration rendererConfiguration = GetRendererConfiguration(renderingData.lightData.totalAdditionalLightsCount); if (renderingData.cameraData.isStereoEnabled) { context.StartMultiEye(camera); } RenderOpaques(ref context, ref cullResults, ref renderingData.cameraData, rendererConfiguration, dynamicBatching); if (renderingData.cameraData.postProcessEnabled && renderingData.cameraData.postProcessLayer.HasOpaqueOnlyEffects(renderer.postProcessRenderContext)) { OpaquePostProcessSubPass(ref context, ref renderingData.cameraData); } if (depthAttachmentHandle != -1) { CopyDepthSubPass(ref context, ref renderingData.cameraData); } if (renderingData.cameraData.requiresOpaqueTexture) { CopyColorSubpass(ref context, ref renderingData.cameraData); } RenderTransparents(ref context, ref cullResults, ref renderingData.cameraData, rendererConfiguration, dynamicBatching); if (renderingData.cameraData.postProcessEnabled) { PostProcessPass(ref context, ref renderingData.cameraData); } else if (!renderingData.cameraData.isOffscreenRender && colorAttachmentHandle != -1) { FinalBlitPass(ref context, ref renderingData.cameraData); } if (renderingData.cameraData.isStereoEnabled) { context.StopMultiEye(camera); context.StereoEndRender(camera); } }
public override void Execute(ScriptableRenderer renderer, ScriptableRenderContext context, ref RenderingData renderingData) { CommandBuffer cmd = CommandBufferPool.Get(k_RenderWaterFXTag); RenderTextureDescriptor descriptor = ScriptableRenderer.CreateRenderTextureDescriptor(ref renderingData.cameraData); descriptor.width = (int)(descriptor.width * 0.5f); descriptor.height = (int)(descriptor.height * 0.5f); descriptor.colorFormat = RenderTextureFormat.Default; using (new ProfilingSample(cmd, k_RenderWaterFXTag)) { cmd.GetTemporaryRT(m_WaterFX.id, descriptor, FilterMode.Bilinear); SetRenderTarget( cmd, m_WaterFX.Identifier(), RenderBufferLoadAction.DontCare, RenderBufferStoreAction.Store, ClearFlag.Color, new Color(0.0f, 0.5f, 0.5f, 0.5f), descriptor.dimension); context.ExecuteCommandBuffer(cmd); cmd.Clear(); var drawSettings = CreateDrawingSettings(renderingData.cameraData.camera, SortingCriteria.CommonTransparent, PerObjectData.None, renderingData.supportsDynamicBatching); var filteringSettings = transparentFilterSettings; if (renderingData.cameraData.isStereoEnabled) { Camera camera = renderingData.cameraData.camera; context.StartMultiEye(camera); context.DrawRenderers(renderingData.cullResults, ref drawSettings, ref filteringSettings); context.StopMultiEye(camera); } else { context.DrawRenderers(renderingData.cullResults, ref drawSettings, ref filteringSettings); } } context.ExecuteCommandBuffer(cmd); CommandBufferPool.Release(cmd); }
/// <inheritdoc/> public override void Execute(ScriptableRenderer renderer, ScriptableRenderContext context, ref RenderingData renderingData) { if (renderer == null) { throw new ArgumentNullException("renderer"); } if (renderingData.lightData.mainLightIndex == -1) { return; } CommandBuffer cmd = CommandBufferPool.Get(k_CollectShadowsTag); cmd.GetTemporaryRT(colorAttachmentHandle.id, descriptor, FilterMode.Bilinear); VisibleLight shadowLight = renderingData.lightData.visibleLights[renderingData.lightData.mainLightIndex]; SetShadowCollectPassKeywords(cmd, ref shadowLight, ref renderingData.shadowData); // Note: The source isn't actually 'used', but there's an engine peculiarity (bug) that // doesn't like null sources when trying to determine a stereo-ized blit. So for proper // stereo functionality, we use the screen-space shadow map as the source (until we have // a better solution). // An alternative would be DrawProcedural, but that would require further changes in the shader. RenderTargetIdentifier screenSpaceOcclusionTexture = colorAttachmentHandle.Identifier(); SetRenderTarget(cmd, screenSpaceOcclusionTexture, RenderBufferLoadAction.DontCare, RenderBufferStoreAction.Store, ClearFlag.Color | ClearFlag.Depth, Color.white, descriptor.dimension); cmd.Blit(screenSpaceOcclusionTexture, screenSpaceOcclusionTexture, renderer.GetMaterial(MaterialHandle.ScreenSpaceShadow)); if (renderingData.cameraData.isStereoEnabled) { Camera camera = renderingData.cameraData.camera; context.StartMultiEye(camera); context.ExecuteCommandBuffer(cmd); context.StopMultiEye(camera); } else { context.ExecuteCommandBuffer(cmd); } CommandBufferPool.Release(cmd); }
/// <inheritdoc/> public override void Execute(ScriptableRenderer renderer, ScriptableRenderContext context, ref RenderingData renderingData) { if (renderer == null) { throw new ArgumentNullException("renderer"); } CommandBuffer cmd = CommandBufferPool.Get(k_DepthPrepassTag); using (new ProfilingSample(cmd, k_DepthPrepassTag)) { cmd.GetTemporaryRT(depthAttachmentHandle.id, descriptor, FilterMode.Point); SetRenderTarget( cmd, depthAttachmentHandle.Identifier(), RenderBufferLoadAction.DontCare, RenderBufferStoreAction.Store, ClearFlag.Depth, Color.black, descriptor.dimension); context.ExecuteCommandBuffer(cmd); cmd.Clear(); var sortFlags = renderingData.cameraData.defaultOpaqueSortFlags; var drawSettings = CreateDrawingSettings(renderingData.cameraData.camera, sortFlags, PerObjectData.None, renderingData.supportsDynamicBatching); var filteringSettings = opaqueFilterSettings; if (renderingData.cameraData.isStereoEnabled) { Camera camera = renderingData.cameraData.camera; context.StartMultiEye(camera); context.DrawRenderers(renderingData.cullResults, ref drawSettings, ref filteringSettings); context.StopMultiEye(camera); } else { context.DrawRenderers(renderingData.cullResults, ref drawSettings, ref filteringSettings); } } context.ExecuteCommandBuffer(cmd); CommandBufferPool.Release(cmd); }
// Here you can implement the rendering logic. // Use <c>ScriptableRenderContext</c> to issue drawing commands or execute command buffers // https://docs.unity3d.com/ScriptReference/Rendering.ScriptableRenderContext.html // You don't have to call ScriptableRenderContext.submit, the render pipeline will call it at specific points in the pipeline. public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData) { CommandBuffer cmd = CommandBufferPool.Get(m_ProfilerTag); using (new ProfilingSample(cmd, m_ProfilerTag)) { //context.ExecuteCommandBuffer(cmd); //cmd.Clear(); var cam = renderingData.cameraData; var w = cam.camera.pixelWidth / _host.Downscale; var h = cam.camera.pixelHeight / _host.Downscale; var rtd = renderingData.cameraData.cameraTargetDescriptor; rtd.depthBufferBits = 0; //rtd.graphicsFormat = UnityEngine.Experimental.Rendering.GraphicsFormat. //cmd.GetTemporaryRT(123, rtd); var sortFlags = renderingData.cameraData.defaultOpaqueSortFlags; var drawSettings = CreateDrawingSettings(m_ShaderTagId, ref renderingData, sortFlags); drawSettings.perObjectData = PerObjectData.None; ref CameraData cameraData = ref renderingData.cameraData; Camera camera = cameraData.camera; if (cameraData.isStereoEnabled) { context.StartMultiEye(camera); } var mat = CoreUtils.CreateEngineMaterial("SDF"); drawSettings.overrideMaterialPassIndex = mat.FindPass("CamSeed"); drawSettings.overrideMaterial = mat; context.DrawRenderers(renderingData.cullResults, ref drawSettings, ref m_FilteringSettings); cmd.BakeSDF(TargetRT, SdfRT, w, h); cmd.SetGlobalTexture("_MainTex", SdfRT); //SDF.BakeCommandBuffer() //cmd.SetGlobalTexture("_CameraDepthNormalsTexture", depthAttachmentHandle.id); }
/// <inheritdoc/> public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData) { // NOTE: Do NOT mix ProfilingScope with named CommandBuffers i.e. CommandBufferPool.Get("name"). // Currently there's an issue which results in mismatched markers. CommandBuffer cmd = CommandBufferPool.Get(); using (new ProfilingScope(cmd, m_ProfilingSampler)) { context.ExecuteCommandBuffer(cmd); cmd.Clear(); var sortFlags = renderingData.cameraData.defaultOpaqueSortFlags; var drawSettings = CreateDrawingSettings(m_ShaderTagId, ref renderingData, sortFlags); drawSettings.perObjectData = PerObjectData.None; ref CameraData cameraData = ref renderingData.cameraData; Camera camera = cameraData.camera; if (cameraData.isStereoEnabled) { context.StartMultiEye(camera, eyeIndex); } context.DrawRenderers(renderingData.cullResults, ref drawSettings, ref m_FilteringSettings); }
//这里你可以实现渲染逻辑。 //使用<c>ScriptableRenderContext</c>来发出绘图命令或执行命令缓冲区 // https://docs.unity3d.com/ScriptReference/Rendering.ScriptableRenderContext.html //你不必调用ScriptableRenderContext。提交时,渲染管道会在管道中的特定点调用它。 public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData) { // 获取命令缓冲区 CommandBuffer cmd = CommandBufferPool.Get(m_ProfilerTag); using (new ProfilingScope(cmd, new ProfilingSampler(m_ProfilerTag))) { // 执行命令缓存 context.ExecuteCommandBuffer(cmd); // 清楚数据缓存 cmd.Clear(); // 相机的排序标志 var sortFlags = renderingData.cameraData.defaultOpaqueSortFlags; // 创建绘制设置 var drawSettings = CreateDrawingSettings(m_ShaderTagId, ref renderingData, sortFlags); // 设置对象数据 drawSettings.perObjectData = PerObjectData.None; // 检测是否是VR设备 ref CameraData cameraData = ref renderingData.cameraData; Camera camera = cameraData.camera; if (cameraData.isStereoEnabled) { context.StartMultiEye(camera); } // 设置覆盖材质 drawSettings.overrideMaterial = depthNormalsMaterial; // 绘制渲染器 context.DrawRenderers(renderingData.cullResults, ref drawSettings, ref m_FilteringSettings); // 设置全局纹理 cmd.SetGlobalTexture("_CameraDepthNormalsTexture", depthAttachmentHandle.id); }
// Main entry point for our scriptable render loop public static void Render(ScriptableRenderContext context, IEnumerable <Camera> cameras, bool useIntermediateBlitPath) { bool stereoEnabled = XRSettings.isDeviceActive; foreach (var camera in cameras) { // Culling ScriptableCullingParameters cullingParams; // Stereo-aware culling parameters are configured to perform a single cull for both eyes if (!CullResults.GetCullingParameters(camera, stereoEnabled, out cullingParams)) { continue; } CullResults cull = new CullResults(); CullResults.Cull(ref cullingParams, context, ref cull); // Setup camera for rendering (sets render target, view/projection matrices and other // per-camera built-in shader variables). // If stereo is enabled, we also configure stereo matrices, viewports, and XR device render targets context.SetupCameraProperties(camera, stereoEnabled); // Draws in-between [Start|Stop]MultiEye are stereo-ized by engine if (stereoEnabled) { context.StartMultiEye(camera); } RenderTargetIdentifier intermediateRTID = new RenderTargetIdentifier(BuiltinRenderTextureType.CurrentActive); bool isIntermediateRTTexArray = false; if (useIntermediateBlitPath) { ConfigureAndBindIntermediateRenderTarget(context, camera, stereoEnabled, out intermediateRTID, out isIntermediateRTTexArray); } // clear depth buffer var cmd = CommandBufferPool.Get(); cmd.ClearRenderTarget(true, false, Color.black); context.ExecuteCommandBuffer(cmd); CommandBufferPool.Release(cmd); // Setup global lighting shader variables SetupLightShaderVariables(cull.visibleLights, context); // Draw opaque objects using BasicPass shader pass var drawSettings = new DrawRendererSettings(camera, new ShaderPassName("BasicPass")) { sorting = { flags = SortFlags.CommonOpaque } }; var filterSettings = new FilterRenderersSettings(true) { renderQueueRange = RenderQueueRange.opaque }; context.DrawRenderers(cull.visibleRenderers, ref drawSettings, filterSettings); // Draw skybox context.DrawSkybox(camera); // Draw transparent objects using BasicPass shader pass drawSettings.sorting.flags = SortFlags.CommonTransparent; filterSettings.renderQueueRange = RenderQueueRange.transparent; context.DrawRenderers(cull.visibleRenderers, ref drawSettings, filterSettings); if (useIntermediateBlitPath) { BlitFromIntermediateToCameraTarget(context, intermediateRTID, isIntermediateRTTexArray); } if (stereoEnabled) { context.StopMultiEye(camera); // StereoEndRender will reset state on the camera to pre-Stereo settings, // and invoke XR based events/callbacks. context.StereoEndRender(camera); } context.Submit(); } }
/// <inheritdoc/> public override void Execute(ScriptableRenderer renderer, ScriptableRenderContext context, ref RenderingData renderingData) { if (renderer == null) { throw new ArgumentNullException("renderer"); } CommandBuffer cmd = CommandBufferPool.Get(k_OITDepthPrepassTag); using (new ProfilingSample(cmd, k_OITDepthPrepassTag)) { //cmd.GetTemporaryRT(depthAttachmentHandle.id, descriptor, FilterMode.Point); SetRenderTarget( cmd, depthAttachmentHandle.Identifier(), RenderBufferLoadAction.Load, RenderBufferStoreAction.Store, ClearFlag.None, Color.black, descriptor.dimension); if (descriptor.msaaSamples > 1) { cmd.DisableShaderKeyword(ShaderKeywordStrings.DepthNoMsaa); if (descriptor.msaaSamples == 4) { cmd.DisableShaderKeyword(ShaderKeywordStrings.DepthMsaa2); cmd.EnableShaderKeyword(ShaderKeywordStrings.DepthMsaa4); } else { cmd.EnableShaderKeyword(ShaderKeywordStrings.DepthMsaa2); cmd.DisableShaderKeyword(ShaderKeywordStrings.DepthMsaa4); } } else { cmd.EnableShaderKeyword(ShaderKeywordStrings.DepthNoMsaa); cmd.DisableShaderKeyword(ShaderKeywordStrings.DepthMsaa2); cmd.DisableShaderKeyword(ShaderKeywordStrings.DepthMsaa4); } CoreUtils.SetKeyword(cmd, "_ALPHATEST_ON", true); context.ExecuteCommandBuffer(cmd); cmd.Clear(); var drawSettings = CreateDrawRendererSettings(renderingData.cameraData.camera, SortFlags.None, RendererConfiguration.None, renderingData.supportsDynamicBatching); if (renderingData.cameraData.isStereoEnabled) { Camera camera = renderingData.cameraData.camera; context.StartMultiEye(camera); context.DrawRenderers(renderingData.cullResults.visibleRenderers, ref drawSettings, oitFilterSettings); context.StopMultiEye(camera); } else { context.DrawRenderers(renderingData.cullResults.visibleRenderers, ref drawSettings, oitFilterSettings); } CoreUtils.SetKeyword(cmd, "_ALPHATEST_ON", false); } context.ExecuteCommandBuffer(cmd); CommandBufferPool.Release(cmd); }
public override void Execute(ScriptableRenderer renderer, ScriptableRenderContext context, ref RenderingData renderingData) { Camera camera = renderingData.cameraData.camera; context.StartMultiEye(camera); }
private void BeginForwardRendering(Camera camera, ref ScriptableRenderContext context, bool stereoEnabled) { if (stereoEnabled) { context.StartMultiEye(camera); } m_RenderToIntermediateTarget = GetRenderToIntermediateTarget(camera); var cmd = CommandBufferPool.Get("SetCameraRenderTarget"); if (m_RenderToIntermediateTarget) { if (camera.activeTexture == null) { m_IntermediateTextureArray = false; if (stereoEnabled) { RenderTextureDescriptor xrDesc = XRSettings.eyeTextureDesc; xrDesc.depthBufferBits = kCameraDepthBufferBits; xrDesc.colorFormat = RenderTextureFormat.ARGB32; xrDesc.msaaSamples = m_Asset.MSAASampleCount; m_IntermediateTextureArray = (xrDesc.dimension == TextureDimension.Tex2DArray); cmd.GetTemporaryRT(m_CameraRTProperty, xrDesc, FilterMode.Bilinear); } else { cmd.GetTemporaryRT(m_CameraRTProperty, Screen.width, Screen.height, kCameraDepthBufferBits, FilterMode.Bilinear, RenderTextureFormat.ARGB32, RenderTextureReadWrite.Default, m_Asset.MSAASampleCount); } if (m_IntermediateTextureArray) { cmd.SetRenderTarget(m_CameraRTID, 0, CubemapFace.Unknown, -1); } else { cmd.SetRenderTarget(m_CameraRTID); } } else { cmd.SetRenderTarget(new RenderTargetIdentifier(camera.activeTexture)); } } else { cmd.SetRenderTarget(BuiltinRenderTextureType.CurrentActive); } // Clear RenderTarget to avoid tile initialization on mobile GPUs // https://community.arm.com/graphics/b/blog/posts/mali-performance-2-how-to-correctly-handle-framebuffers if (camera.clearFlags != CameraClearFlags.Nothing) { bool clearDepth = (camera.clearFlags != CameraClearFlags.Nothing); bool clearColor = (camera.clearFlags == CameraClearFlags.Color); cmd.ClearRenderTarget(clearDepth, clearColor, camera.backgroundColor); } context.ExecuteCommandBuffer(cmd); CommandBufferPool.Release(cmd); }
/// <inheritdoc/> public override void Execute(ScriptableRenderer renderer, ScriptableRenderContext context, ref RenderingData renderingData) { if (renderer == null) { throw new ArgumentNullException("renderer"); } CommandBuffer cmd = CommandBufferPool.Get(k_DepthNormalsTag); using (new ProfilingSample(cmd, k_DepthNormalsTag)) { cmd.GetTemporaryRT(depthNormalsHandle.id, descriptor, FilterMode.Bilinear); if (isDepthPrepassEnabled) { SetRenderTarget( cmd, depthNormalsHandle.Identifier(), RenderBufferLoadAction.DontCare, RenderBufferStoreAction.Store, depthAttachmentHandle.Identifier(), RenderBufferLoadAction.Load, RenderBufferStoreAction.DontCare, ClearFlag.Color, Color.black, TextureDimension.Tex2D); cmd.DisableShaderKeyword("_ALPHATEST_ON"); } else { SetRenderTarget( cmd, depthNormalsHandle.Identifier(), RenderBufferLoadAction.DontCare, RenderBufferStoreAction.Store, ClearFlag.Color | ClearFlag.Depth, Color.black, TextureDimension.Tex2D); cmd.EnableShaderKeyword("_ALPHATEST_ON"); } context.ExecuteCommandBuffer(cmd); cmd.Clear(); cmd.SetGlobalInt("_DepthNormalsZWrite", isDepthPrepassEnabled ? 0 : 1); cmd.SetGlobalInt("_DepthNormalsZTest", (int)(isDepthPrepassEnabled ? ZTest.Equal : ZTest.LEqual)); var sortFlags = renderingData.cameraData.defaultOpaqueSortFlags; var drawSettings = CreateDrawRendererSettings(renderingData.cameraData.camera, sortFlags, RendererConfiguration.None, renderingData.supportsDynamicBatching); if (renderingData.cameraData.isStereoEnabled) { Camera camera = renderingData.cameraData.camera; context.StartMultiEye(camera); context.DrawRenderers(renderingData.cullResults.visibleRenderers, ref drawSettings, opaqueFilterSettings); context.StopMultiEye(camera); } else { context.DrawRenderers(renderingData.cullResults.visibleRenderers, ref drawSettings, opaqueFilterSettings); } } context.ExecuteCommandBuffer(cmd); CommandBufferPool.Release(cmd); }