public OcclusionCullingScreen(IServiceLocator services) : base(services) { _sceneNodes = new List <SceneNode>(); // Create new occlusion buffer with default settings. OcclusionBuffer = new OcclusionBuffer(GraphicsService); OcclusionBuffer.ProgressiveShadowCasterCulling = true; EnableCulling = true; // Create a second camera for rendering a top-down view of the scene. var topDownPerspective = new PerspectiveProjection(); topDownPerspective.SetFieldOfView(MathHelper.ToRadians(90), 1, 1, 512); _topDownCameraNode = new CameraNode(new Camera(topDownPerspective)); _topDownCameraNode.PoseWorld = new Pose(new Vector3F(-10, 120, -10)); _topDownCameraNode.LookAt(new Vector3F(-10, 0, -10), Vector3F.UnitZ); _sceneQuery = new CustomSceneQuery(); _debugRenderer = new DebugRenderer(GraphicsService, null, null); // The DigitalRune Profiler is used to measure execution times. Profiler.SetFormat("Occlusion.Render", 1e3f, "[ms]"); Profiler.SetFormat("Occlusion.Query", 1e3f, "[ms]"); }
private static bool IsCameraUnderwater(CustomSceneQuery query, CameraNode cameraNode) { var cameraPosition = cameraNode.PoseWorld.Position; foreach (var node in query.RenderableNodes) { var waterNode = node as WaterNode; if (waterNode != null && waterNode.IsUnderwater(cameraPosition)) { return(true); } } return(false); }
protected override void OnRender(RenderContext context) { // This screen expects two cameras. if (ActiveCameraNode == null || ActiveCameraNodeB == null) { return; } var renderTargetPool = GraphicsService.RenderTargetPool; var graphicsDevice = GraphicsService.GraphicsDevice; var originalRenderTarget = context.RenderTarget; var fullViewport = context.Viewport; // Get a render target for the first camera. Use half the width because we split // the screen horizontally. var format = new RenderTargetFormat(context.RenderTarget) { Width = fullViewport.Width / 2 }; var renderTargetA = renderTargetPool.Obtain2D(format); context.Scene = Scene; context.LodHysteresis = 0.5f; context.LodBias = 1.0f; context.LodBlendingEnabled = true; for (int i = 0; i < 2; i++) { if (i == 0) { // The first camera renders into renderTargetA. context.CameraNode = ActiveCameraNode; context.Viewport = new Viewport(0, 0, fullViewport.Width / 2, fullViewport.Height); context.RenderTarget = renderTargetA; } else { // The second camera renders into the right half of the final render target. context.CameraNode = ActiveCameraNodeB; context.Viewport = new Viewport(fullViewport.X + fullViewport.Width / 2, fullViewport.Y, fullViewport.Width / 2, fullViewport.Height); context.RenderTarget = originalRenderTarget; } context.LodCameraNode = context.CameraNode; // Get all scene nodes which overlap the camera frustum. CustomSceneQuery sceneQuery = Scene.Query <CustomSceneQuery>(context.CameraNode, context); // Render the scene nodes of the sceneQuery. RenderScene(sceneQuery, context, true, true, true, true); // ----- Copy image of first camera. if (i == 1) { // Copy the upper screen from the temporary render target back into the back buffer. context.Viewport = fullViewport; graphicsDevice.Viewport = fullViewport; SpriteBatch.Begin(SpriteSortMode.Immediate, BlendState.Opaque, SamplerState.PointClamp, DepthStencilState.None, RasterizerState.CullNone); SpriteBatch.Draw( renderTargetA, new Rectangle(0, 0, fullViewport.Width / 2, fullViewport.Height), Color.White); SpriteBatch.End(); renderTargetPool.Recycle(renderTargetA); } } // Clean-up context.Scene = null; context.CameraNode = null; context.LodCameraNode = null; context.RenderPass = null; }
// Renders the graphics screen. - This method is called in GraphicsManager.Render(). protected override void OnRender(RenderContext context) { // Abort if no active camera is set. if (ActiveCameraNode == null) { return; } var renderTargetPool = GraphicsService.RenderTargetPool; var graphicsDevice = GraphicsService.GraphicsDevice; var screenRenderTarget = context.RenderTarget; var viewport = context.Viewport; // All intermediate render targets have the size of the target viewport. int width = context.Viewport.Width; int height = context.Viewport.Height; context.Viewport = new Viewport(0, 0, width, height); // Our scene and the camera must be set in the render context. This info is // required by many renderers. context.Scene = Scene; context.CameraNode = ActiveCameraNode; // LOD (level of detail) settings are also specified in the context. context.LodCameraNode = ActiveCameraNode; context.LodHysteresis = 0.5f; context.LodBias = EnableLod ? 1.0f : 0.0f; context.LodBlendingEnabled = false; // Get all scene nodes which overlap the camera frustum. CustomSceneQuery sceneQuery = Scene.Query <CustomSceneQuery>(ActiveCameraNode, context); // Generate cloud maps. // (Note: Only necessary if LayeredCloudMaps are used. If the cloud maps are // static and the settings do not change, it is not necessary to generate the // cloud maps in every frame. But in this example we use animated cloud maps.) _cloudMapRenderer.Render(sceneQuery.SkyNodes, context); // ----- G-Buffer Pass // The GBufferRenderer creates context.GBuffer0 and context.GBuffer1. _gBufferRenderer.Render(sceneQuery.RenderableNodes, sceneQuery.DecalNodes, context); // ----- Shadow Pass // The ShadowMapRenderer renders the shadow maps which are stored in the light nodes. context.RenderPass = "******"; _shadowMapRenderer.Render(sceneQuery.Lights, context); context.RenderPass = null; // The ShadowMaskRenderer renders the shadows and stores them in one or more render // targets ("shadows masks"). _shadowMaskRenderer.Render(sceneQuery.Lights, context); // In this render pipeline we do not need most shadow maps anymore and can // recycle them. The exception is the DirectionalLight shadow map which // might still be needed for forward rendering of alpha-blended objects. foreach (var node in sceneQuery.Lights) { var lightNode = (LightNode)node; if (lightNode.Shadow != null && !(lightNode.Light is DirectionalLight)) { renderTargetPool.Recycle(lightNode.Shadow.ShadowMap); lightNode.Shadow.ShadowMap = null; } } // ----- Light Buffer Pass // The LightBufferRenderer creates context.LightBuffer0 (diffuse light) and // context.LightBuffer1 (specular light). LightBufferRenderer.Render(sceneQuery.Lights, context); // ----- Material Pass // In the material pass we render all meshes and decals into a single full-screen // render target. The shaders combine the material properties (diffuse texture, etc.) // with the light buffer info. context.RenderTarget = renderTargetPool.Obtain2D(new RenderTargetFormat(width, height, false, SurfaceFormat.HdrBlendable, DepthFormat.Depth24Stencil8)); graphicsDevice.SetRenderTarget(context.RenderTarget); context.Viewport = graphicsDevice.Viewport; graphicsDevice.Clear(Color.Black); graphicsDevice.DepthStencilState = DepthStencilState.Default; graphicsDevice.RasterizerState = RasterizerState.CullCounterClockwise; graphicsDevice.BlendState = BlendState.Opaque; context.RenderPass = "******"; _meshRenderer.Render(sceneQuery.RenderableNodes, context); _decalRenderer.Render(sceneQuery.DecalNodes, context); context.RenderPass = null; // The meshes rendered in the last step might use additional floating-point // textures (e.g. the light buffers) in the different graphics texture stages. // We reset the texture stages (setting all GraphicsDevice.Textures to null), // otherwise XNA might throw exceptions. graphicsDevice.ResetTextures(); // ----- Occlusion Queries _lensFlareRenderer.UpdateOcclusion(sceneQuery.LensFlareNodes, context); // ----- Sky _skyRenderer.Render(sceneQuery.SkyNodes, context); // ----- Fog _fogRenderer.Render(sceneQuery.FogNodes, context); // ----- Forward Rendering of Alpha-Blended Meshes and Particles graphicsDevice.DepthStencilState = DepthStencilState.DepthRead; graphicsDevice.RasterizerState = RasterizerState.CullCounterClockwise; graphicsDevice.BlendState = BlendState.AlphaBlend; context.RenderPass = "******"; AlphaBlendSceneRenderer.Render(sceneQuery.RenderableNodes, context, RenderOrder.BackToFront); context.RenderPass = null; graphicsDevice.ResetTextures(); // The shadow maps could be used by some shaders of the alpha-blended // objects - but now, we can recycle all shadow maps. foreach (var node in sceneQuery.Lights) { var lightNode = (LightNode)node; if (lightNode.Shadow != null) { renderTargetPool.Recycle(lightNode.Shadow.ShadowMap); lightNode.Shadow.ShadowMap = null; } } // ----- Post Processors // The post-processors modify the scene image and the result is written into // the final render target - which is usually the back buffer (but this could // also be another off-screen render target used in another graphics screen). context.SourceTexture = context.RenderTarget; context.RenderTarget = screenRenderTarget; context.Viewport = viewport; PostProcessors.Process(context); renderTargetPool.Recycle((RenderTarget2D)context.SourceTexture); context.SourceTexture = null; // ----- Lens Flares _lensFlareRenderer.Render(sceneQuery.LensFlareNodes, context); // ----- Optional: Restore the Z-Buffer // Currently, the hardware depth buffer is not initialized with useful data because // every time we change the render target, XNA deletes the depth buffer. If we want // the debug rendering to use correct depth buffer, we can restore the depth buffer // using the RebuildZBufferRenderer. If we remove this step, then the DebugRenderer // graphics will overlay the whole 3D scene. _rebuildZBufferRenderer.Render(context, true); // ----- Debug Output // Render debug info added by game objects. DebugRenderer.Render(context); // ----- Draw Reticle if (DrawReticle) { _spriteBatch.Begin(SpriteSortMode.Immediate, BlendState.AlphaBlend); _spriteBatch.Draw( _reticle, new Vector2(viewport.Width / 2 - _reticle.Width / 2, viewport.Height / 2 - _reticle.Height / 2), Color.Black); _spriteBatch.End(); } // Render intermediate render targets for debugging. // We do not use the public DebugRenderer here because the public DebugRenderer // might not be cleared every frame (the game logic can choose how it wants to // use the public renderer). if (VisualizeIntermediateRenderTargets) { _internalDebugRenderer.DrawTexture(context.GBuffer0, new Rectangle(0, 0, 200, 200)); _internalDebugRenderer.DrawTexture(context.GBuffer1, new Rectangle(200, 0, 200, 200)); _internalDebugRenderer.DrawTexture(context.LightBuffer0, new Rectangle(400, 0, 200, 200)); _internalDebugRenderer.DrawTexture(context.LightBuffer1, new Rectangle(600, 0, 200, 200)); for (int i = 0; i < _shadowMaskRenderer.ShadowMasks.Count; i++) { var shadowMask = _shadowMaskRenderer.ShadowMasks[i]; if (shadowMask != null) { _internalDebugRenderer.DrawTexture(shadowMask, new Rectangle((i) * 200, 200, 200, 200)); } } _internalDebugRenderer.Render(context); _internalDebugRenderer.Clear(); } // ----- Clean-up // It is very important to give every intermediate render target back to the // render target pool! renderTargetPool.Recycle(context.GBuffer0); context.GBuffer0 = null; renderTargetPool.Recycle(context.GBuffer1); context.GBuffer1 = null; renderTargetPool.Recycle((RenderTarget2D)context.Data[RenderContextKeys.DepthBufferHalf]); context.Data.Remove(RenderContextKeys.DepthBufferHalf); renderTargetPool.Recycle(context.LightBuffer0); context.LightBuffer0 = null; renderTargetPool.Recycle(context.LightBuffer1); context.LightBuffer1 = null; _shadowMaskRenderer.RecycleShadowMasks(); context.Scene = null; context.CameraNode = null; context.LodHysteresis = 0; context.LodCameraNode = null; context.RenderPass = null; }
// Renders the graphics screen. - This method is called in GraphicsManager.Render(). protected override void OnRender(RenderContext context) { if (ActiveCameraNode == null) { return; } // Our scene and the camera must be set in the render context. This info is // required by many renderers. context.Scene = Scene; context.CameraNode = ActiveCameraNode; // LOD (level of detail) settings are also specified in the context. context.LodCameraNode = ActiveCameraNode; context.LodHysteresis = 0.5f; context.LodBias = EnableLod ? 1.0f : 0.0f; context.LodBlendingEnabled = false; // ----- Preprocessing // For some scene nodes we have to update some off-screen render targets before the // actual scene is rendered. // // We only have to do this for the scene nodes which are visible // by the camera frustum: PreprocessingSceneQuery preprocessingQuery = Scene.Query <PreprocessingSceneQuery>(context.CameraNode, context); // Generate cloud maps. // Only necessary if LayeredCloudMaps are used. If the cloud maps are static // and the settings do not change, it is not necessary to generate the // cloud maps in every frame. But in the SkySample we use animated cloud maps. // The CloudMapRenderer can be called several times per frame, it will only // do the work once per frame. // See also SkySample. _cloudMapRenderer.Render(preprocessingQuery.CloudLayerNodes, context); // Compute ocean waves. // Only necessary if WaterNodes with OceanWaves are used. _waterWavesRenderer.Render(preprocessingQuery.WaterNodes, context); // Perform render-to-texture operations. // Only necessary if SceneCaptureNodes are used. // See also SceneCapture2DSample. _sceneCaptureRenderer.Render(preprocessingQuery.SceneCaptureNodes, context); // Render reflections. // Only necessary if PlanarReflectionNodes are used. // See also PlanarReflectionSample. _planarReflectionRenderer.Render(preprocessingQuery.PlanarReflectionNodes, context); // ----- Scene Rendering // Get all scene nodes which overlap the camera frustum. CustomSceneQuery sceneQuery = Scene.Query <CustomSceneQuery>(context.CameraNode, context); // Render the scene nodes of the sceneQuery. RenderScene(sceneQuery, context, true, true, true, true); // ----- Clean-up context.Scene = null; context.CameraNode = null; context.LodCameraNode = null; context.LodHysteresis = 0; }
//-------------------------------------------------------------- #region Creation & Cleanup //-------------------------------------------------------------- public DeferredGraphicsScreen(IServiceLocator services) : base(services.GetInstance <IGraphicsService>()) { var contentManager = services.GetInstance <ContentManager>(); SpriteBatch = new SpriteBatch(GraphicsService.GraphicsDevice); // Let's create the necessary scene node renderers: // The current sample contains MeshNodes (opaque and transparent), DecalNodes // and ParticleSystemNodes (transparent). MeshRenderer = new MeshRenderer(); _decalRenderer = new DecalRenderer(GraphicsService); _billboardRenderer = new BillboardRenderer(GraphicsService, 2048) { EnableSoftParticles = true, // If you have an extreme amount of particles that cover the entire screen, // you can turn on offscreen rendering to improve performance. //EnableOffscreenRendering = true, }; // The _alphaBlendSceneRenderer combines all renderers for transparent // (= alpha blended) objects. AlphaBlendSceneRenderer = new SceneRenderer(); AlphaBlendSceneRenderer.Renderers.Add(MeshRenderer); AlphaBlendSceneRenderer.Renderers.Add(_billboardRenderer); AlphaBlendSceneRenderer.Renderers.Add(new WaterRenderer(GraphicsService)); #if !TRIAL // The FogSphereSample is not included in the trial version. AlphaBlendSceneRenderer.Renderers.Add(new FogSphereRenderer(GraphicsService)); #endif // Renderer for cloud maps. (Only necessary if LayeredCloudMaps are used.) _cloudMapRenderer = new CloudMapRenderer(GraphicsService); // Renderer for SceneCaptureNodes. See also SceneCapture2DSample. // In the constructor we specify a method is called in SceneCaptureRenderer.Render() // when the scene must be rendered for the SceneCaptureNodes. _sceneCaptureRenderer = new SceneCaptureRenderer(context => { // Get scene nodes which are visible by the current camera. CustomSceneQuery sceneQuery = Scene.Query <CustomSceneQuery>(context.CameraNode, context); // Render scene (with post-processing, with lens flares, no debug rendering, no reticle). RenderScene(sceneQuery, context, true, true, false, false); }); // Renderer for PlanarReflectionNodes. See also PlanarReflectionSample. // In the constructor we specify a method is called in PlanarReflectionRenderer.Render() // to create the reflection images. _planarReflectionRenderer = new PlanarReflectionRenderer(context => { // Get scene nodes which are visible by the current camera. CustomSceneQuery sceneQuery = Scene.Query <CustomSceneQuery>(context.CameraNode, context); // Planar reflections are often for WaterNodes. These nodes should not be rendered // into their own reflection map. But when the water surface is displaced by waves, // some waves could be visible in the reflection. // Simple solution: Do not render any water nodes into the reflection map. for (int i = 0; i < sceneQuery.RenderableNodes.Count; i++) { if (sceneQuery.RenderableNodes[i] is WaterNode) { sceneQuery.RenderableNodes[i] = null; } } // Render scene (no post-processing, no lens flares, no debug rendering, no reticle). RenderScene(sceneQuery, context, false, false, false, false); }); _waterWavesRenderer = new WaterWavesRenderer(GraphicsService); // Shadows _shadowMapRenderer = new ShadowMapRenderer(MeshRenderer); _shadowMaskRenderer = new ShadowMaskRenderer(GraphicsService, 2); // Renderers which create the intermediate render targets: // Those 2 renderers are implemented in this sample. Those functions could // be implemented directly in this class but we have created separate classes // to make the code more readable. _gBufferRenderer = new GBufferRenderer(GraphicsService, MeshRenderer, _decalRenderer); LightBufferRenderer = new LightBufferRenderer(GraphicsService); // Other specialized renderers: _lensFlareRenderer = new LensFlareRenderer(GraphicsService, SpriteBatch); _skyRenderer = new SkyRenderer(GraphicsService); _fogRenderer = new FogRenderer(GraphicsService); _internalDebugRenderer = new DebugRenderer(GraphicsService, SpriteBatch, null); _rebuildZBufferRenderer = new RebuildZBufferRenderer(GraphicsService); Scene = new Scene(); // This screen needs a HDR filter to map high dynamic range values back to // low dynamic range (LDR). PostProcessors = new PostProcessorChain(GraphicsService); PostProcessors.Add(new HdrFilter(GraphicsService) { EnableBlueShift = true, BlueShiftCenter = 0.00007f, BlueShiftRange = 0.5f, BlueShiftColor = new Vector3F(0, 0, 2f), MinExposure = 0, MaxExposure = 10, BloomIntensity = 1, BloomThreshold = 0.6f, }); _underwaterPostProcessor = new UnderwaterPostProcessor(GraphicsService, contentManager); PostProcessors.Add(_underwaterPostProcessor); // Use 2D texture for reticle. _reticle = contentManager.Load <Texture2D>("Reticle"); // Use the sprite font of the GUI. var uiContentManager = services.GetInstance <ContentManager>("UIContent"); var spriteFont = uiContentManager.Load <SpriteFont>("Default"); DebugRenderer = new DebugRenderer(GraphicsService, SpriteBatch, spriteFont) { DefaultColor = new Color(0, 0, 0), DefaultTextPosition = new Vector2F(10), }; EnableLod = true; }
protected override void OnRender(RenderContext context) { // This screen expects two cameras. if (ActiveCameraNodeA == null || ActiveCameraNodeB == null) { return; } var renderTargetPool = GraphicsService.RenderTargetPool; var graphicsDevice = GraphicsService.GraphicsDevice; var originalRenderTarget = context.RenderTarget; var fullViewport = context.Viewport; // Get a render target for the first camera. Use half the width because we split // the screen horizontally. var format = new RenderTargetFormat(context.RenderTarget) { Width = fullViewport.Width / 2 }; var renderTargetA = renderTargetPool.Obtain2D(format); context.Scene = Scene; context.LodHysteresis = 0.5f; context.LodBias = 1.0f; context.LodBlendingEnabled = true; for (int i = 0; i < 2; i++) { Viewport halfViewport; RenderTarget2D currentRenderTarget; if (i == 0) { // The first camera renders into renderTargetA. context.CameraNode = ActiveCameraNodeA; halfViewport = new Viewport(0, 0, fullViewport.Width / 2, fullViewport.Height); currentRenderTarget = renderTargetA; } else { // The second camera renders into the right half of the final render target. context.CameraNode = ActiveCameraNodeB; halfViewport = new Viewport(fullViewport.X + fullViewport.Width / 2, fullViewport.Y, fullViewport.Width / 2, fullViewport.Height); currentRenderTarget = originalRenderTarget; } context.LodCameraNode = context.CameraNode; CustomSceneQuery sceneQuery = Scene.Query <CustomSceneQuery>(context.CameraNode, context); // Cloud maps need to be updated only once. if (i == 0) { _cloudMapRenderer.Render(sceneQuery.SkyNodes, context); } // ----- G-Buffer Pass _gBufferRenderer.Render(sceneQuery.RenderableNodes, sceneQuery.DecalNodes, context); // ----- Shadow Pass context.RenderPass = "******"; _shadowMapRenderer.Render(sceneQuery.Lights, context); context.RenderPass = null; context.Viewport = halfViewport; _shadowMaskRenderer.Render(sceneQuery.Lights, context); // Recycle shadow maps. foreach (var node in sceneQuery.Lights) { var lightNode = (LightNode)node; if (lightNode.Shadow != null) { renderTargetPool.Recycle(lightNode.Shadow.ShadowMap); lightNode.Shadow.ShadowMap = null; } } // ----- Light Buffer Pass _lightBufferRenderer.Render(sceneQuery.Lights, context); // ----- Material Pass context.RenderTarget = renderTargetPool.Obtain2D(new RenderTargetFormat( context.Viewport.Width, context.Viewport.Height, false, SurfaceFormat.HdrBlendable, DepthFormat.Depth24Stencil8)); graphicsDevice.SetRenderTarget(context.RenderTarget); context.Viewport = graphicsDevice.Viewport; graphicsDevice.Clear(Color.Black); graphicsDevice.DepthStencilState = DepthStencilState.Default; graphicsDevice.RasterizerState = RasterizerState.CullCounterClockwise; graphicsDevice.BlendState = BlendState.Opaque; context.RenderPass = "******"; _meshRenderer.Render(sceneQuery.RenderableNodes, context); _decalRenderer.Render(sceneQuery.DecalNodes, context); context.RenderPass = null; graphicsDevice.ResetTextures(); // ----- Occlusion Queries _lensFlareRenderer.UpdateOcclusion(sceneQuery.LensFlareNodes, context); // ----- Sky _skyRenderer.Render(sceneQuery.SkyNodes, context); // ----- Fog _fogRenderer.Render(sceneQuery.FogNodes, context); // ----- Forward Rendering of Alpha-Blended Meshes and Particles graphicsDevice.DepthStencilState = DepthStencilState.DepthRead; graphicsDevice.RasterizerState = RasterizerState.CullCounterClockwise; graphicsDevice.BlendState = BlendState.AlphaBlend; context.RenderPass = "******"; _transparentSceneRenderer.Render(sceneQuery.RenderableNodes, context, RenderOrder.BackToFront); context.RenderPass = null; graphicsDevice.ResetTextures(); // ----- Lens Flares _lensFlareRenderer.Render(sceneQuery.LensFlareNodes, context); // ----- Post Processors context.SourceTexture = context.RenderTarget; context.RenderTarget = currentRenderTarget; context.Viewport = halfViewport; PostProcessors.Process(context); renderTargetPool.Recycle((RenderTarget2D)context.SourceTexture); context.SourceTexture = null; // ----- Optional: Restore the Z-Buffer _rebuildZBufferRenderer.Render(context, true); // ----- Debug Output DebugRenderer.Render(context); // ----- Draw Reticle if (DrawReticle) { _spriteBatch.Begin(SpriteSortMode.Immediate, BlendState.AlphaBlend); _spriteBatch.Draw( _reticle, new Vector2(halfViewport.Width / 2 - _reticle.Width / 2, halfViewport.Height / 2 - _reticle.Height / 2), Color.Black); _spriteBatch.End(); } // ----- Clean-up renderTargetPool.Recycle(context.GBuffer0); context.GBuffer0 = null; renderTargetPool.Recycle(context.GBuffer1); context.GBuffer1 = null; renderTargetPool.Recycle((RenderTarget2D)context.Data[RenderContextKeys.DepthBufferHalf]); context.Data.Remove(RenderContextKeys.DepthBufferHalf); renderTargetPool.Recycle(context.LightBuffer0); context.LightBuffer0 = null; renderTargetPool.Recycle(context.LightBuffer1); context.LightBuffer1 = null; _shadowMaskRenderer.RecycleShadowMasks(); // ----- Copy image of first camera. if (i == 1) { // Copy the upper screen from the temporary render target back into the back buffer. context.Viewport = fullViewport; graphicsDevice.Viewport = fullViewport; _spriteBatch.Begin(SpriteSortMode.Immediate, BlendState.Opaque, SamplerState.PointClamp, DepthStencilState.None, RasterizerState.CullNone); _spriteBatch.Draw( renderTargetA, new Rectangle(0, 0, fullViewport.Width / 2, fullViewport.Height), Color.White); _spriteBatch.End(); renderTargetPool.Recycle(renderTargetA); } } context.Scene = null; context.CameraNode = null; context.LodCameraNode = null; context.RenderPass = null; }