public SceneCaptureCubeSample(Microsoft.Xna.Framework.Game game) : base(game) { SampleFramework.IsMouseVisible = false; // Create a graphics screen. This screen has to call the SceneCaptureRenderer // to handle the SceneCaptureNodes! _graphicsScreen = new DeferredGraphicsScreen(Services) { DrawReticle = true }; GraphicsService.Screens.Insert(0, _graphicsScreen); GameObjectService.Objects.Add(new DeferredGraphicsOptionsObject(Services)); Services.Register(typeof(DebugRenderer), null, _graphicsScreen.DebugRenderer); Services.Register(typeof(IScene), null, _graphicsScreen.Scene); // Add gravity and damping to the physics Simulation. Simulation.ForceEffects.Add(new Gravity()); Simulation.ForceEffects.Add(new Damping()); // Add a custom game object which controls the camera. var cameraGameObject = new CameraObject(Services); GameObjectService.Objects.Add(cameraGameObject); _graphicsScreen.ActiveCameraNode = cameraGameObject.CameraNode; // More standard objects. GameObjectService.Objects.Add(new GrabObject(Services)); GameObjectService.Objects.Add(new ObjectCreatorObject(Services)); //GameObjectService.Objects.Add(new StaticSkyObject(Services)); GameObjectService.Objects.Add(new DynamicSkyObject(Services, true, false, true)); GameObjectService.Objects.Add(new GroundObject(Services)); GameObjectService.Objects.Add(new DudeObject(Services)); GameObjectService.Objects.Add(new DynamicObject(Services, 1)); GameObjectService.Objects.Add(new DynamicObject(Services, 2)); GameObjectService.Objects.Add(new DynamicObject(Services, 5)); GameObjectService.Objects.Add(new DynamicObject(Services, 6)); GameObjectService.Objects.Add(new DynamicObject(Services, 7)); GameObjectService.Objects.Add(new FogObject(Services) { AttachToCamera = true }); GameObjectService.Objects.Add(new LavaBallsObject(Services)); // Add a few palm trees. Random random = new Random(12345); for (int i = 0; i < 10; i++) { Vector3F position = new Vector3F(random.NextFloat(-3, -8), 0, random.NextFloat(0, -5)); Matrix33F orientation = Matrix33F.CreateRotationY(random.NextFloat(0, ConstantsF.TwoPi)); float scale = random.NextFloat(0.5f, 1.2f); GameObjectService.Objects.Add(new StaticObject(Services, "PalmTree/palm_tree", scale, new Pose(position, orientation))); } // Load the "Bubble" mesh and place it at a fixed position in the scene. var modelNode = ContentManager.Load<ModelNode>("Bubble/Bubble"); var meshNode = modelNode.GetDescendants().OfType<MeshNode>().First().Clone(); meshNode.PoseWorld = new Pose(new Vector3F(0, 1, 0)); _graphicsScreen.Scene.Children.Add(meshNode); // Surface of the mesh should reflect the scene in real-time. Reflections are // created using environment mapping: The scene is rendered into a cube map, // which is then applied to the mesh. // To render the scene into a cube map, we need to define a CameraNode and a // SceneCaptureNode: The CameraNode defines the point from where the scene is // captured. The SceneCaptureNode defines where and in which format the captured // image is needed. // Attach a camera to the center of the mesh. var projection = new PerspectiveProjection(); projection.SetFieldOfView(ConstantsF.PiOver2, 1, 0.1f, 20); var captureCameraNode = new CameraNode(new Camera(projection)); meshNode.Children = new SceneNodeCollection { captureCameraNode }; // Attach a SceneCaptureNode with a cube map render target to the mesh. var renderToTexture = new RenderToTexture { Texture = new RenderTargetCube( GraphicsService.GraphicsDevice, 256, true, SurfaceFormat.Color, DepthFormat.None), }; var sceneCaptureNode = new SceneCaptureNode(renderToTexture) { Shape = meshNode.Shape, CameraNode = captureCameraNode, }; meshNode.Children.Add(sceneCaptureNode); // The bubble model uses a special effect and is rendered in the "AlphaBlend" // render pass. Let's modify the effect parameters to use the created cube map // as the reflection map of the bubble. var effectBinding = meshNode.Mesh.Materials[0]["AlphaBlend"]; effectBinding.Set("ReflectionStrength", 0.5f); effectBinding.Set("RefractionStrength", 0.0f); effectBinding.Set("FresnelBias", 1.0f); effectBinding.Set("BlendMode", 1.0f); effectBinding.Set("Alpha", 1.0f); effectBinding.Set("CustomEnvironmentMap", (TextureCube)renderToTexture.Texture); }
/// <summary> /// Renders the environment maps for the image-based lights. /// </summary> /// <remarks> /// This method uses the current DeferredGraphicsScreen to render new environment maps at /// runtime. The DeferredGraphicsScreen has a SceneCaptureRenderer which we can use to /// capture environment maps of the current scene. /// To capture new environment maps the flag _updateEnvironmentMaps must be set to true. /// When this flag is set, SceneCaptureNodes are added to the scene. When the graphics /// screen calls the SceneCaptureRenderer the next time, the new environment maps will be /// captured. /// The flag _updateEnvironmentMaps remains true until the new environment maps are available. /// This method checks the SceneCaptureNode.LastFrame property to check if new environment maps /// have been computed. Usually, the environment maps will be available in the next frame. /// (However, the XNA Game class can skip graphics rendering if the game is running slowly. /// Then we would have to wait more than 1 frame.) /// When environment maps are being rendered, the image-based lights are disabled to capture /// only the scene with ambient and directional lights. Dynamic objects are also disabled /// to capture only the static scene. /// </remarks> private void UpdateEnvironmentMaps() { if (!_updateEnvironmentMaps) return; // One-time initializations: if (_sceneCaptureNodes[0] == null) { // Create cube maps and scene capture nodes. // (Note: A cube map size of 256 is enough for surfaces with a specular power // in the range [0, 200000].) for (int i = 0; i < _sceneCaptureNodes.Length; i++) { var renderTargetCube = new RenderTargetCube( GraphicsService.GraphicsDevice, 256, true, SurfaceFormat.Color, DepthFormat.None); var renderToTexture = new RenderToTexture { Texture = renderTargetCube }; var projection = new PerspectiveProjection(); projection.SetFieldOfView(ConstantsF.PiOver2, 1, 1, 100); _sceneCaptureNodes[i] = new SceneCaptureNode(renderToTexture) { CameraNode = new CameraNode(new Camera(projection)) { PoseWorld = _lightNodes[i].PoseWorld, }, }; _imageBasedLights[i].Texture = renderTargetCube; } // We use a ColorEncoder to encode a HDR image in a normal Color texture. _colorEncoder = new ColorEncoder(GraphicsService) { SourceEncoding = ColorEncoding.Rgb, TargetEncoding = ColorEncoding.Rgbm, }; // The SceneCaptureRenderer has a render callback which defines what is rendered // into the scene capture render targets. _graphicsScreen.SceneCaptureRenderer.RenderCallback = context => { var graphicsDevice = GraphicsService.GraphicsDevice; var renderTargetPool = GraphicsService.RenderTargetPool; // Get scene nodes which are visible by the current camera. CustomSceneQuery sceneQuery = context.Scene.Query<CustomSceneQuery>(context.CameraNode, context); // The final image has to be rendered into this render target. var ldrTarget = context.RenderTarget; // Use an intermediate HDR render target with the same resolution as the final target. var format = new RenderTargetFormat(ldrTarget) { SurfaceFormat = SurfaceFormat.HdrBlendable, DepthStencilFormat = DepthFormat.Depth24Stencil8 }; var hdrTarget = renderTargetPool.Obtain2D(format); graphicsDevice.SetRenderTarget(hdrTarget); context.RenderTarget = hdrTarget; // Render scene (without post-processing, without lens flares, no debug rendering, no reticle). _graphicsScreen.RenderScene(sceneQuery, context, false, false, false, false); // Convert the HDR image to RGBM image. context.SourceTexture = hdrTarget; context.RenderTarget = ldrTarget; _colorEncoder.Process(context); context.SourceTexture = null; // Clean up. renderTargetPool.Recycle(hdrTarget); context.RenderTarget = ldrTarget; }; } if (_sceneCaptureNodes[0].Parent == null) { // Add the scene capture nodes to the scene. for (int i = 0; i < _sceneCaptureNodes.Length; i++) _graphicsScreen.Scene.Children.Add(_sceneCaptureNodes[i]); // Remember the old time stamp of the nodes. _oldEnvironmentMapTimeStamp = _sceneCaptureNodes[0].LastFrame; // Disable all lights except ambient and directional lights. // We do not capture the image-based lights or any other lights (e.g. point lights) // in the cube map. foreach (var lightNode in _graphicsScreen.Scene.GetDescendants().OfType<LightNode>()) lightNode.IsEnabled = (lightNode.Light is AmbientLight) || (lightNode.Light is DirectionalLight); // Disable dynamic objects. foreach (var node in _graphicsScreen.Scene.GetDescendants()) if (node is MeshNode || node is LodGroupNode) if (!node.IsStatic) node.IsEnabled = false; } else { // The scene capture nodes are part of the scene. Check if they have been // updated. if (_sceneCaptureNodes[0].LastFrame != _oldEnvironmentMapTimeStamp) { // We have new environment maps. Restore the normal scene. for (int i = 0; i < _sceneCaptureNodes.Length; i++) _graphicsScreen.Scene.Children.Remove(_sceneCaptureNodes[i]); _updateEnvironmentMaps = false; foreach (var lightNode in _graphicsScreen.Scene.GetDescendants().OfType<LightNode>()) lightNode.IsEnabled = true; foreach (var node in _graphicsScreen.Scene.GetDescendants()) if (node is MeshNode || node is LodGroupNode) if (!node.IsStatic) node.IsEnabled = true; } } }