public OcclusionCullingScreen(IServiceLocator services) : base(services) { _sceneNodes = new List<SceneNode>(); // Create new occlusion buffer with default settings. OcclusionBuffer = new OcclusionBuffer(GraphicsService); OcclusionBuffer.ProgressiveShadowCasterCulling = true; EnableCulling = true; // Create a second camera for rendering a top-down view of the scene. var topDownPerspective = new PerspectiveProjection(); topDownPerspective.SetFieldOfView(MathHelper.ToRadians(90), 1, 1, 512); _topDownCameraNode = new CameraNode(new Camera(topDownPerspective)); _topDownCameraNode.PoseWorld = new Pose(new Vector3F(-10, 120, -10)); _topDownCameraNode.LookAt(new Vector3F(-10, 0, -10), Vector3F.UnitZ); _sceneQuery = new CustomSceneQuery(); _debugRenderer = new DebugRenderer(GraphicsService, null); // The DigitalRune Profiler is used to measure execution times. Profiler.SetFormat("Occlusion.Render", 1e3f, "[ms]"); Profiler.SetFormat("Occlusion.Query", 1e3f, "[ms]"); }
//-------------------------------------------------------------- /// <summary> /// Initializes a new instance of the <see cref="PlanarReflectionNode" /> class. /// </summary> /// <param name="renderToTexture">The render texture target.</param> /// <exception cref="ArgumentNullException"> /// <paramref name="renderToTexture"/> is <see langword="null"/>. /// </exception> public PlanarReflectionNode(RenderToTexture renderToTexture) : base(renderToTexture) { CameraNode = new CameraNode(new Camera(new PerspectiveProjection())); FieldOfViewScale = 1; _normalLocal = new Vector3F(0, 0, 1); }
public MyGraphicsScreen(IGraphicsService graphicsService) : base(graphicsService) { _meshRenderer = new MeshRenderer(); var contentManager = ServiceLocator.Current.GetInstance<ContentManager>(); var spriteFont = contentManager.Load<SpriteFont>("SpriteFont1"); _debugRenderer = new DebugRenderer(graphicsService, spriteFont); Scene = new Scene(); // Add a camera with a perspective projection. var projection = new PerspectiveProjection(); projection.SetFieldOfView( ConstantsF.PiOver4, graphicsService.GraphicsDevice.Viewport.AspectRatio, 0.1f, 100.0f); CameraNode = new CameraNode(new Camera(projection)) { Name = "CameraPerspective", PoseWorld = Pose.FromMatrix(Matrix44F.CreateLookAt(new Vector3F(10, 5, 10), new Vector3F(0, 1, 0), new Vector3F(0, 1, 0)).Inverse), }; Scene.Children.Add(CameraNode); }
/// <overloads> /// <summary> /// Gets a scissor rectangle that encloses the specified object. /// </summary> /// </overloads> /// /// <summary> /// Gets a scissor rectangle that encloses the specified sphere. /// </summary> /// <param name="cameraNode">The camera node.</param> /// <param name="viewport">The viewport.</param> /// <param name="positionWorld">The sphere center in world space.</param> /// <param name="radius">The sphere radius.</param> /// <returns>The scissor rectangle.</returns> /// <exception cref="ArgumentNullException"> /// <paramref name="cameraNode"/> is <see langword="null"/>. /// </exception> public static Rectangle GetScissorRectangle(CameraNode cameraNode, Viewport viewport, Vector3F positionWorld, float radius) { var rectangle = GetViewportRectangle(cameraNode, viewport, positionWorld, radius); rectangle.X += viewport.X; rectangle.Y += viewport.Y; return rectangle; }
/// <summary> /// Gets a scissor rectangle that encloses the specified geometric object. /// </summary> /// <param name="cameraNode">The camera node.</param> /// <param name="viewport">The viewport.</param> /// <param name="geometricObject">The geometric object.</param> /// <returns>The scissor rectangle.</returns> /// <exception cref="ArgumentNullException"> /// <paramref name="cameraNode"/> or <paramref name="geometricObject"/> is /// <see langword="null"/>. /// </exception> public static Rectangle GetScissorRectangle(CameraNode cameraNode, Viewport viewport, IGeometricObject geometricObject) { var rectangle = GetViewportRectangle(cameraNode, viewport, geometricObject); rectangle.X += viewport.X; rectangle.Y += viewport.Y; return rectangle; }
//-------------------------------------------------------------- /// <summary> /// Initializes a new instance of the <see cref="CubeMapShadowMapRenderer"/> class. /// </summary> /// <param name="renderCallback"> /// The method which renders the scene into the shadow map. Must not be <see langword="null"/>. /// See <see cref="RenderCallback"/> for more information. /// </param> public CubeMapShadowMapRenderer(Func<RenderContext, bool> renderCallback) { if (renderCallback == null) throw new ArgumentNullException("renderCallback"); RenderCallback = renderCallback; _perspectiveCameraNode = new CameraNode(new Camera(new PerspectiveProjection())); }
//-------------------------------------------------------------- /// <summary> /// Initializes a new instance of the <see cref="CascadedShadowMapRenderer"/> class. /// </summary> /// <param name="renderCallback"> /// The method which renders the scene into the shadow map. Must not be <see langword="null"/>. /// See <see cref="RenderCallback"/> for more information. /// </param> public CascadedShadowMapRenderer(Func<RenderContext, bool> renderCallback) { if (renderCallback == null) throw new ArgumentNullException("renderCallback"); RenderCallback = renderCallback; _splitVolume = new PerspectiveViewVolume(); _orthographicCameraNode = new CameraNode(new Camera(new OrthographicProjection())); }
public CharacterControllerSample(Microsoft.Xna.Framework.Game game) : base(game) { SampleFramework.IsMouseVisible = false; GraphicsScreen.ClearBackground = true; GraphicsScreen.BackgroundColor = Color.CornflowerBlue; // Create a camera. var projection = new PerspectiveProjection(); projection.SetFieldOfView( ConstantsF.PiOver4, GraphicsService.GraphicsDevice.Viewport.AspectRatio, 0.1f, 1000.0f); _cameraNode = new CameraNode(new Camera(projection)); GraphicsScreen.CameraNode = _cameraNode; // We use one collision domain that computes collision info for all game objects. _domain = new CollisionDomain(new CollisionDetection()); // Create collision objects for a test level. CharacterControllerLevel.Load(_domain); // Add collision filter: // The _domain contains a lot of collision objects for obstacles in the level. // We do not need to compute contacts between these static obstacles. To avoid // this, the CharacterControllerLevel puts all level collision objects into // the collision group 1. We add a broad phase collision filter which filters out // collision checks between objects of collision group 1. _domain.BroadPhase.Filter = new DelegatePairFilter<CollisionObject>( pair => { if (pair.First.CollisionGroup == 1 && pair.Second.CollisionGroup == 1) return false; return true; }); // Create character controller. _character = new CharacterController(_domain); _character.Position = new Vector3F(0, 0, 1); // Create the trigger volume. _triggerVolume = new CollisionObject( new GeometricObject(new SphereShape(3), new Pose(new Vector3F(-5, 0, 5)))) { // We do not want to compute detailed contact information (contact points, contact // normal vectors, etc.). We are only interested if the object touches another object or not. // Therefore, we set the collision object type to "trigger". Trigger objects are better for // performance than normal collision objects. Additionally, the character controller should // be able to walk through the trigger volume. The character controller treats objects as // solids if it finds contact information (contact positions with contact normal vectors). Type = CollisionObjectType.Trigger }; _domain.CollisionObjects.Add(_triggerVolume); }
public SampleGraphicsScreen(IServiceLocator services) : base(services.GetInstance<IGraphicsService>()) { _sampleFramework = services.GetInstance<SampleFramework>(); Name = "SampleScreen"; ClearBackground = false; BackgroundColor = new Color(220, 220, 220); DrawReticle = false; UseFixedWidthFont = false; // Use 2D texture for reticle. var contentManager = services.GetInstance<ContentManager>(); _reticle = contentManager.Load<Texture2D>("Reticle"); // Get the sprite fonts used in the UI theme. var uiContentManager = services.GetInstance<ContentManager>("UIContent"); _defaultFont = uiContentManager.Load<SpriteFont>("UI Themes/BlendBlue/Default"); _fixedWidthFont = uiContentManager.Load<SpriteFont>("UI Themes/BlendBlue/Console"); // Set up 2D camera such that (0, 0) is upper, left corner of screen and // (screenWidth, screenHeight) is lower, right corner of screen. var graphicsDevice = GraphicsService.GraphicsDevice; int screenWidth = graphicsDevice.PresentationParameters.BackBufferWidth; int screenHeight = graphicsDevice.PresentationParameters.BackBufferHeight; var projection = new OrthographicProjection { Near = 0, Far = 2000, Left = 0, Right = screenWidth, Top = 0, Bottom = screenHeight, }; var camera = new Camera(projection); _cameraNode2D = new CameraNode(camera) { PoseWorld = new Pose(new Vector3F(0, 0, 1000)), }; // Initialize renderers. _spriteBatch = new SpriteBatch(graphicsDevice); _meshRenderer = new MeshRenderer(); _billboardRenderer = new BillboardRenderer(GraphicsService, 2048); DebugRenderer2D = new DebugRenderer(GraphicsService, _defaultFont) { SpriteFont = _defaultFont, DefaultColor = new Color(0, 0, 0), DefaultTextPosition = new Vector2F(10) }; DebugRenderer = new DebugRenderer(GraphicsService, _defaultFont) { SpriteFont = _defaultFont, DefaultColor = new Color(0, 0, 0), DefaultTextPosition = new Vector2F(10) }; Scene = new Scene(); }
public void GetScreenSizeWithOrthographic() { // Camera var projection = new OrthographicProjection(); projection.SetOffCenter(0, 4, 0, 2); var camera = new Camera(projection); var cameraNode = new CameraNode(camera); cameraNode.PoseWorld = new Pose(new Vector3F(123, 456, -789), Matrix33F.CreateRotation(new Vector3F(1, -2, 3), MathHelper.ToRadians(75))); // 2:1 viewport var viewport = new Viewport(10, 10, 200, 100); // Test object var shape = new SphereShape(); var geometricObject = new GeometricObject(shape); // Empty sphere at camera position. shape.Radius = 0; geometricObject.Pose = cameraNode.PoseWorld; Vector2F screenSize = GraphicsHelper.GetScreenSize(cameraNode, viewport, geometricObject); Assert.AreEqual(0, screenSize.X); Assert.AreEqual(0, screenSize.Y); // Empty sphere centered at near plane. shape.Radius = 0; geometricObject.Pose = cameraNode.PoseWorld * new Pose(new Vector3F(0.123f, -0.543f, -1)); screenSize = GraphicsHelper.GetScreenSize(cameraNode, viewport, geometricObject); Assert.AreEqual(0, screenSize.X); Assert.AreEqual(0, screenSize.Y); // Create sphere which as a bounding sphere of ~1 unit diameter: // Since the bounding sphere is based on the AABB, we need to make the // actual sphere a bit smaller. shape.Radius = 1 / (2 * (float)Math.Sqrt(3)) + Numeric.EpsilonF; // Sphere at camera position. geometricObject.Pose = cameraNode.PoseWorld; screenSize = GraphicsHelper.GetScreenSize(cameraNode, viewport, geometricObject); Assert.IsTrue(Numeric.AreEqual(screenSize.X, 50.0f, 10f)); Assert.IsTrue(Numeric.AreEqual(screenSize.Y, 50.0f, 10f)); // Sphere at near plane. geometricObject.Pose = cameraNode.PoseWorld * new Pose(new Vector3F(0.123f, -0.543f, -1)); screenSize = GraphicsHelper.GetScreenSize(cameraNode, viewport, geometricObject); Assert.IsTrue(Numeric.AreEqual(screenSize.X, 50.0f, 10f)); Assert.IsTrue(Numeric.AreEqual(screenSize.Y, 50.0f, 10f)); // Double distance --> same size geometricObject.Pose = cameraNode.PoseWorld * new Pose(new Vector3F(0.123f, -0.543f, -2)); screenSize = GraphicsHelper.GetScreenSize(cameraNode, viewport, geometricObject); Assert.IsTrue(Numeric.AreEqual(screenSize.X, 50.0f, 10f)); Assert.IsTrue(Numeric.AreEqual(screenSize.Y, 50.0f, 10f)); }
public SplitScreenSample(Microsoft.Xna.Framework.Game game) : base(game) { SampleFramework.IsMouseVisible = false; _graphicsScreen = new SplitScreen(Services); _graphicsScreen.DrawReticle = true; GraphicsService.Screens.Insert(0, _graphicsScreen); Services.Register(typeof(DebugRenderer), null, _graphicsScreen.DebugRenderer); Services.Register(typeof(IScene), null, _graphicsScreen.Scene); // Add gravity and damping to the physics Simulation. Simulation.ForceEffects.Add(new Gravity()); Simulation.ForceEffects.Add(new Damping()); // Add a custom game object which controls the camera of player A. var cameraGameObject = new CameraObject(Services); GameObjectService.Objects.Add(cameraGameObject); _graphicsScreen.ActiveCameraNode = cameraGameObject.CameraNode; var projection = (PerspectiveProjection)cameraGameObject.CameraNode.Camera.Projection; projection.SetFieldOfView( projection.FieldOfViewY, GraphicsService.GraphicsDevice.Viewport.AspectRatio / 2, projection.Near, projection.Far); cameraGameObject.CameraNode.Camera = new Camera(projection); // A second camera for player B. _cameraNodeB = new CameraNode(cameraGameObject.CameraNode.Camera); _graphicsScreen.ActiveCameraNodeB = _cameraNodeB; GameObjectService.Objects.Add(new GrabObject(Services)); GameObjectService.Objects.Add(new GroundObject(Services)); GameObjectService.Objects.Add(new DudeObject(Services)); GameObjectService.Objects.Add(new ObjectCreatorObject(Services)); GameObjectService.Objects.Add(new LavaBallsObject(Services)); GameObjectService.Objects.Add(new FogObject(Services)); GameObjectService.Objects.Add(new StaticObject(Services, "Barrier/Barrier", 0.9f, new Pose(new Vector3F(0, 0, -2)))); GameObjectService.Objects.Add(new StaticObject(Services, "Barrier/Cylinder", 0.9f, new Pose(new Vector3F(3, 0, 0), QuaternionF.CreateRotationY(MathHelper.ToRadians(-20))))); GameObjectService.Objects.Add(new StaticSkyObject(Services)); // Add a few palm trees. Random random = new Random(12345); for (int i = 0; i < 10; i++) { Vector3F position = new Vector3F(random.NextFloat(-3, -8), 0, random.NextFloat(0, -5)); Matrix33F orientation = Matrix33F.CreateRotationY(random.NextFloat(0, ConstantsF.TwoPi)); float scale = random.NextFloat(0.5f, 1.2f); GameObjectService.Objects.Add(new StaticObject(Services, "PalmTree/palm_tree", scale, new Pose(position, orientation))); } }
public CameraComponent(GameScreen sc) { PerspectiveProjection projection = new PerspectiveProjection(); projection.SetFieldOfView(MathHelper.PiOver4, sc._graphicsService.GraphicsDevice.Viewport.AspectRatio, 0.1f, 1000.0f); _cameraNode = new CameraNode(new Camera(projection)); _position = new Vector3F(50f, 120.0f, 50f); _yaw = MathHelper.ToRadians(45.0f); }
//-------------------------------------------------------------- #region Methods //-------------------------------------------------------------- protected override void OnLoad() { var graphicsService = _services.GetInstance<IGraphicsService>(); // Define camera projection. var projection = new PerspectiveProjection(); projection.SetFieldOfView( ConstantsF.PiOver4, graphicsService.GraphicsDevice.Viewport.AspectRatio, 0.1f, 1000.0f); // Create a camera node. CameraNode = new CameraNode(new Camera(projection)); }
// OnLoad() is called when the GameObject is added to the IGameObjectService. protected override void OnLoad() { // Create a camera node. CameraNode = new CameraNode(new Camera(new OrthographicProjection())) { Name = "PlayerCamera" }; // Add to scene. // (This is usually optional. Since cameras do not have a visual representation, // it makes no difference if the camera is actually part of the scene graph or // not. - Except when other scene nodes are attached to the camera. In this case // the camera needs to be in the scene.) var scene = _services.GetInstance<IScene>(); if (scene != null) scene.Children.Add(CameraNode); ResetPose(); ResetProjection(); }
// OnLoad() is called when the GameObject is added to the IGameObjectService. protected override void OnLoad() { // Create a camera node. CameraNode = new CameraNode(new Camera(new PerspectiveProjection())) { Name = "PlayerCamera" }; // Add to scene. // (This is usually optional. Since cameras do not have a visual representation, // it makes no difference if the camera is actually part of the scene graph or // not. - Except when other scene nodes are attached to the camera. In this case // the camera needs to be in the scene.) var scene = _services.GetInstance<IScene>(); if (scene != null) scene.Children.Add(CameraNode); ResetPose(); ResetProjection(); // Add GUI controls to the Options window. var sampleFramework = _services.GetInstance<SampleFramework>(); var optionsPanel = sampleFramework.AddOptions("Game Objects"); var panel = SampleHelper.AddGroupBox(optionsPanel, "CameraObject"); SampleHelper.AddSlider( panel, "Camera far distance", "F0", 1, 5000, _farDistance, value => { _farDistance = value; ResetProjection(); }); }
public CameraObject() { base.Name = "Camera"; _inputService = ServiceLocator.Current.GetInstance<IInputService>(); var graphicsService = ServiceLocator.Current.GetInstance<IGraphicsService>(); var gameObjectManager = ServiceLocator.Current.GetInstance<IGameObjectService>(); var screen = ((BasicScreen)graphicsService.Screens["Default"]); //_debugRenderer = screen.DebugRenderer; PerspectiveProjection projection = new PerspectiveProjection(); projection.SetFieldOfView(Microsoft.Xna.Framework.MathHelper.PiOver4, graphicsService.GraphicsDevice.Viewport.AspectRatio, 0.1f, 1000.0f); _cameraNode = new CameraNode(new Camera(projection)); screen.Scene.Children.Add(_cameraNode); screen.ActiveCamera = _cameraNode; ResetCamera(); View = _cameraNode.View.ToXna(); Projection = _cameraNode.Camera.Projection.ToXna(); }
/// <summary> /// Gets the rectangle that encloses the specified geometric object in the viewport. /// </summary> /// <param name="cameraNode">The camera node.</param> /// <param name="viewport">The viewport.</param> /// <param name="geometricObject">The geometric object.</param> /// <returns>The rectangle that encloses <paramref name="geometricObject"/>.</returns> /// <exception cref="ArgumentNullException"> /// <paramref name="cameraNode"/> or <paramref name="geometricObject"/> is /// <see langword="null"/>. /// </exception> internal static Rectangle GetViewportRectangle(CameraNode cameraNode, Viewport viewport, IGeometricObject geometricObject) { if (cameraNode == null) throw new ArgumentNullException("cameraNode"); if (geometricObject == null) throw new ArgumentNullException("geometricObject"); // For a uniformly scaled sphere we can use the specialized GetScissorRectangle method. var sphereShape = geometricObject.Shape as SphereShape; if (sphereShape != null) { Vector3F scale = geometricObject.Scale; if (scale.X == scale.Y && scale.Y == scale.Z) { return GetViewportRectangle(cameraNode, viewport, geometricObject.Pose.Position, scale.X * sphereShape.Radius); } } // Relative bounds: (left, top, right, bottom). Vector4F bounds = GetBounds(cameraNode, geometricObject); // Rectangle in viewport. int left = (int)(bounds.X * viewport.Width); // implicit floor() int top = (int)(bounds.Y * viewport.Height); // implicit floor() int right = (int)Math.Ceiling(bounds.Z * viewport.Width); int bottom = (int)Math.Ceiling(bounds.W * viewport.Height); return new Rectangle(left, top, right - left, bottom - top); }
public void GetScreenSizeException2() { var cameraNode = new CameraNode(new Camera(new PerspectiveProjection())); var viewport = new Viewport(10, 10, 200, 100); GraphicsHelper.GetScreenSize(cameraNode, viewport, null); }
/// <overloads> /// <summary> /// Gets a rectangle that encloses the specified object in the viewport. /// </summary> /// </overloads> /// /// <summary> /// Gets the rectangle that encloses the specified sphere in the viewport. /// </summary> /// <param name="cameraNode">The camera node.</param> /// <param name="viewport">The viewport.</param> /// <param name="positionWorld">The sphere center in world space.</param> /// <param name="radius">The sphere radius.</param> /// <returns>The rectangle that encloses the sphere.</returns> /// <exception cref="ArgumentNullException"> /// <paramref name="cameraNode"/> is <see langword="null"/>. /// </exception> internal static Rectangle GetViewportRectangle(CameraNode cameraNode, Viewport viewport, Vector3F positionWorld, float radius) { if (cameraNode == null) throw new ArgumentNullException("cameraNode"); // Relative bounds: (left, top, right, bottom). Vector4F bounds = GetBounds(cameraNode, positionWorld, radius); // Rectangle in viewport. int left = (int)(bounds.X * viewport.Width); // implicit floor() int top = (int)(bounds.Y * viewport.Height); // implicit floor() int right = (int)Math.Ceiling(bounds.Z * viewport.Width); int bottom = (int)Math.Ceiling(bounds.W * viewport.Height); return new Rectangle(left, top, right - left, bottom - top); }
/// <overloads> /// <summary> /// Gets a the bounds of the specified object relative to the viewport. /// </summary> /// </overloads> /// /// <summary> /// Gets a the bounds of the specified geometric object relative to the viewport. /// </summary> /// <param name="cameraNode">The camera node.</param> /// <param name="geometricObject">The geometric object.</param> /// <returns> /// The bounds (left, top, right, bottom) where each entry is in the range [0, 1]. /// </returns> /// <exception cref="ArgumentNullException"> /// <paramref name="cameraNode"/> or <paramref name="geometricObject"/> is /// <see langword="null"/>. /// </exception> internal static Vector4F GetBounds(CameraNode cameraNode, IGeometricObject geometricObject) { // Notes: // Do not call this GetBounds() method for spheres. Use the other overload for spheres. // // At first this problem seems trivial, we only have to get the support // points of the geometric object's shape in the directions of the frustum // plane normal vectors. The we project these points to the near plane... // But this does not work because which can be seen if you draw simple // drop down sketch. Actually each eye ray has its own direction therefore // it has its normal and its own support direction! if (cameraNode == null) throw new ArgumentNullException("cameraNode"); if (geometricObject == null) throw new ArgumentNullException("geometricObject"); Debug.Assert(!(geometricObject.Shape is SphereShape), "Call a different GetBounds() overload for spheres!"); // Projection properties. var camera = cameraNode.Camera; var projection = camera.Projection; float near = projection.Near; float left = projection.Left; float right = projection.Right; float width = projection.Width; float top = projection.Top; float bottom = projection.Bottom; float height = projection.Height; // Get AABB in view space. Pose localToViewPose = cameraNode.PoseWorld.Inverse * geometricObject.Pose; Aabb aabb = geometricObject.Shape.GetAabb(geometricObject.Scale, localToViewPose); // Is the AABB in front of the near plane (= totally clipped)? if (aabb.Minimum.Z >= -near) return new Vector4F(0); // Does the AABB contain the origin? if (GeometryHelper.HaveContact(aabb, Vector3F.Zero)) return new Vector4F(0, 0, 1, 1); // Project the AABB far face to the near plane. Vector2F min; min.X = aabb.Minimum.X / -aabb.Minimum.Z * near; min.Y = aabb.Minimum.Y / -aabb.Minimum.Z * near; Vector2F max; max.X = aabb.Maximum.X / -aabb.Minimum.Z * near; max.Y = aabb.Maximum.Y / -aabb.Minimum.Z * near; // If the AABB z extent overlaps the origin, some results are invalid. if (aabb.Maximum.Z > -Numeric.EpsilonF) { if (aabb.Minimum.X < 0) min.X = left; if (aabb.Maximum.X > 0) max.X = right; if (aabb.Minimum.Y < 0) min.Y = bottom; if (aabb.Maximum.Y > 0) max.Y = top; } else { // The AABB near face is also in front. Project AABB near face to near plane // and take the most extreme. min.X = Math.Min(min.X, aabb.Minimum.X / -aabb.Maximum.Z * near); min.Y = Math.Min(min.Y, aabb.Minimum.Y / -aabb.Maximum.Z * near); max.X = Math.Max(max.X, aabb.Maximum.X / -aabb.Maximum.Z * near); max.Y = Math.Max(max.Y, aabb.Maximum.Y / -aabb.Maximum.Z * near); } Vector4F bounds; bounds.X = (min.X - left) / width; bounds.Y = (top - max.Y) / height; bounds.Z = (max.X - left) / width; bounds.W = (top - min.Y) / height; bounds.X = MathHelper.Clamp(bounds.X, 0, 1); bounds.Y = MathHelper.Clamp(bounds.Y, 0, 1); bounds.Z = MathHelper.Clamp(bounds.Z, bounds.X, 1); bounds.W = MathHelper.Clamp(bounds.W, bounds.Y, 1); return bounds; }
/// <summary> /// Estimates the size of an object in pixels. /// </summary> /// <param name="cameraNode">The camera node with perspective projection.</param> /// <param name="viewport">The viewport.</param> /// <param name="geometricObject">The geometric object.</param> /// <returns> /// The estimated width and height of <paramref name="geometricObject"/> in pixels. /// </returns> /// <remarks> /// The method assumes that the object is fully visible by the camera, i.e. it does not perform /// frustum culling. It estimates the size of <paramref name="geometricObject"/> based on its /// bounding shape. /// </remarks> /// <exception cref="ArgumentNullException"> /// <paramref name="cameraNode"/> or <paramref name="geometricObject"/> is /// <see langword="null"/>. /// </exception> internal static Vector2F GetScreenSize(CameraNode cameraNode, Viewport viewport, IGeometricObject geometricObject) { // This implementation is just for reference. (It is preferable to optimize // and inline the code when needed.) if (cameraNode == null) throw new ArgumentNullException("cameraNode"); if (geometricObject == null) throw new ArgumentNullException("geometricObject"); // Use bounding sphere of AABB in world space. var aabb = geometricObject.Aabb; float diameter = aabb.Extent.Length; float width = diameter; float height = diameter; Matrix44F proj = cameraNode.Camera.Projection; bool isOrthographic = (proj.M33 != 0); // ----- xScale, yScale: // Orthographic Projection: // proj.M00 = 2 / (right - left) // proj.M11 = 2 / (top - bottom) // // Perspective Projection: // proj.M00 = 2 * zNear / (right - left) = 1 / tan(fovX/2) // proj.M11 = 2 * zNear / (top - bottom) = 1 / tan(fovY/2) float xScale = Math.Abs(proj.M00); float yScale = Math.Abs(proj.M11); // Screen size [px]. Vector2F screenSize; if (isOrthographic) { // ----- Orthographic Projection // sizeX = viewportWidth * width / (right - left) // = viewportWidth * width * xScale / 2 screenSize.X = viewport.Width * width * xScale / 2; // sizeY = viewportHeight* height / (top - bottom) // = viewportHeight* height * xScale / 2 screenSize.Y = viewport.Height * height * yScale / 2; } else { // ----- Perspective Projection // Camera properties. Pose cameraPose = cameraNode.PoseWorld; Vector3F cameraPosition = cameraPose.Position; Matrix33F cameraOrientation = cameraPose.Orientation; Vector3F cameraForward = -cameraOrientation.GetColumn(2); // Get planar distance from camera to object by projecting the distance // vector onto the look direction. Vector3F cameraToObject = aabb.Center - cameraPosition; float distance = Vector3F.Dot(cameraToObject, cameraForward); // Assume that object is in front of camera (no frustum culling). distance = Math.Abs(distance); // Avoid division by zero. if (distance < Numeric.EpsilonF) distance = Numeric.EpsilonF; // sizeX = viewportWidth * width / (objectDistance * 2 * tan(fovX/2)) // = viewportWidth * width * zNear / (objectDistance * (right - left)) // = viewportWidth * width * xScale / (2 * objectDistance) screenSize.X = viewport.Width * width * xScale / (2 * distance); // sizeY = viewportHeight * height / (objectDistance * 2 * tan(fovY/2)) // = viewportHeight * height * zNear / (objectDistance * (top - bottom)) // = viewportHeight * height * yScale / (2 * objectDistance) screenSize.Y = viewport.Height * height * yScale / (2 * distance); } return screenSize; }
/// <summary> /// Gets the bounds of the specified sphere relative to the viewport. /// </summary> /// <param name="cameraNode">The camera node.</param> /// <param name="positionWorld">The sphere center in world space.</param> /// <param name="radius">The sphere radius.</param> /// <returns> /// The bounds (left, top, right, bottom) where each entry is in the range [0, 1]. /// </returns> /// <exception cref="ArgumentNullException"> /// <paramref name="cameraNode"/> is <see langword="null"/>. /// </exception> internal static Vector4F GetBounds(CameraNode cameraNode, Vector3F positionWorld, float radius) { var camera = cameraNode.Camera; var projection = camera.Projection; float near = projection.Near; float left = projection.Left; float width = projection.Width; float top = projection.Top; float height = projection.Height; Vector3F l = cameraNode.PoseWorld.ToLocalPosition(positionWorld); float r = radius; // Default bounds (left, top, right, bottom) var bounds = new Vector4F(0, 0, 1, 1); // ----- Solve for N = (x, 0, z). // Discriminant already divided by 4: float d = (r * r * l.X * l.X - (l.X * l.X + l.Z * l.Z) * (r * r - l.Z * l.Z)); if (d > 0) { // Camera is outside the sphere. float rootD = (float)Math.Sqrt(d); // Now check two possible solutions (+/- rootD): float nx1 = (r * l.X + rootD) / (l.X * l.X + l.Z * l.Z); float nx2 = (r * l.X - rootD) / (l.X * l.X + l.Z * l.Z); float nz1 = (r - nx1 * l.X) / l.Z; float nz2 = (r - nx2 * l.X) / l.Z; // Compute tangent position (px, 0, pz) on the sphere. float pz1 = (l.X * l.X + l.Z * l.Z - r * r) / (l.Z - (nz1 / nx1) * l.X); float pz2 = (l.X * l.X + l.Z * l.Z - r * r) / (l.Z - (nz2 / nx2) * l.X); if (pz1 < 0) { // Plane (nx1, 0, nz1) is within camera frustum. float px = -pz1 * nz1 / nx1; float x = nz1 * near / nx1; // x coordinate on the near plane. float boundsX = (x - left) / width; // Value relative to viewport. (0 = left, 1 = right) // Shrink the scissor rectangle on the left or on the right side. if (px < l.X) bounds.X = Math.Max(bounds.X, boundsX); else bounds.Z = Math.Min(bounds.Z, boundsX); } if (pz2 < 0) { float px = -pz2 * nz2 / nx2; float x = nz2 * near / nx2; float scissorX = (x - left) / width; if (px < l.X) bounds.X = Math.Max(bounds.X, scissorX); else bounds.Z = Math.Min(bounds.Z, scissorX); } } // ----- Solve for N = (0, y, z) first. d = (r * r * l.Y * l.Y - (l.Y * l.Y + l.Z * l.Z) * (r * r - l.Z * l.Z)); if (d > 0) { // Camera is outside the sphere. float rootD = (float)Math.Sqrt(d); float ny1 = (r * l.Y + rootD) / (l.Y * l.Y + l.Z * l.Z); float ny2 = (r * l.Y - rootD) / (l.Y * l.Y + l.Z * l.Z); float nz1 = (r - ny1 * l.Y) / l.Z; float nz2 = (r - ny2 * l.Y) / l.Z; float pz1 = (l.Y * l.Y + l.Z * l.Z - r * r) / (l.Z - (nz1 / ny1) * l.Y); float pz2 = (l.Y * l.Y + l.Z * l.Z - r * r) / (l.Z - (nz2 / ny2) * l.Y); if (pz1 < 0) { float py = -pz1 * nz1 / ny1; float y = nz1 * near / ny1; float scissorY = -(y - top) / height; if (py > l.Y) bounds.Y = Math.Max(bounds.Y, scissorY); else bounds.W = Math.Min(bounds.W, scissorY); } if (pz2 < 0) { float py = -pz2 * nz2 / ny2; float y = nz2 * near / ny2; float scissorY = -(y - top) / height; if (py > l.Y) bounds.Y = Math.Max(bounds.Y, scissorY); else bounds.W = Math.Min(bounds.W, scissorY); } } bounds.X = MathHelper.Clamp(bounds.X, 0, 1); bounds.Y = MathHelper.Clamp(bounds.Y, 0, 1); bounds.Z = MathHelper.Clamp(bounds.Z, bounds.X, 1); bounds.W = MathHelper.Clamp(bounds.W, bounds.Y, 1); return bounds; }
protected override void OnUnload() { CameraNode.Dispose(false); CameraNode = null; }
protected override void OnUnload() { var graphicsService = ServiceLocator.Current.GetInstance<IGraphicsService>(); var screen = ((BasicScreen)graphicsService.Screens["Default"]); if (screen.ActiveCamera == _cameraNode) { screen.ActiveCamera = null; } _cameraNode.Parent.Children.Remove(_cameraNode); _cameraNode.Dispose(); _cameraNode = null; }
public static float GetViewNormalizedDistance(SceneNode sceneNode, CameraNode cameraNode) { Debug.Assert( sceneNode.ScaleWorld.X > 0 && sceneNode.ScaleWorld.Y > 0 && sceneNode.ScaleWorld.Z > 0, "Assuming that all scale factors are positive."); Pose cameraPose = cameraNode.PoseWorld; Vector3F cameraToObject = sceneNode.PoseWorld.Position - cameraPose.Position; // Get planar distance by projecting the distance vector onto the look direction. // This is stable for sideways movement but unstable for camera rotations. //Vector3F cameraForward = -cameraPose.Orientation.GetColumn(2); //float distance = Math.Abs(Vector3F.Dot(cameraToObject, cameraForward)); // Get normal (radial) distance (stable for camera rotations, unstable for sideways movement). float distance = cameraToObject.Length; // Make distance independent of current FOV and scale. distance = GetViewNormalizedDistance(distance, cameraNode.Camera.Projection); distance /= sceneNode.ScaleWorld.LargestComponent; return distance; }
private void InitializeAudio() { // The camera defines the position of the audio listener. _listener = new AudioListener(); _cameraNode = GraphicsScreen.CameraNode; // Set a distance scale that is suitable for our demo. SoundEffect.DistanceScale = 10; // ----- Load sounds, create instances and emitters. _hitSound = ContentManager.Load<SoundEffect>("Audio/Hit"); for (int i = 0; i < _hitSoundInstances.Length; i++) { _hitSoundInstances[i] = _hitSound.CreateInstance(); // Change pitch. Our instance sounds better this way. _hitSoundInstances[i].Pitch = -1; _hitEmitters[i] = new AudioEmitter(); } _scratchSound = ContentManager.Load<SoundEffect>("Audio/Scratch"); _scratchSoundInstance = _scratchSound.CreateInstance(); _scratchEmitter = new AudioEmitter(); _rollSound = ContentManager.Load<SoundEffect>("Audio/Roll"); _rollSoundInstance = _rollSound.CreateInstance(); _rollEmitter = new AudioEmitter(); // The hit sounds are instant sounds. The scratch and rolling sounds are looped. _scratchSoundInstance.IsLooped = true; _rollSoundInstance.IsLooped = true; }
//-------------------------------------------------------------- #region Creation & Cleanup //-------------------------------------------------------------- /// <summary> /// Initializes a new instance of the <see cref="VarianceShadowMapRenderer"/> class. /// </summary> /// <param name="render"> /// The method which renders the scene into the shadow map. Must not be <see langword="null"/>. /// See <see cref="RenderCallback"/> for more information. /// </param> public VarianceShadowMapRenderer(Func<RenderContext, bool> render) { if (render == null) throw new ArgumentNullException("render"); RenderCallback = render; _cameraVolume = new PerspectiveViewVolume(); _orthographicCameraNode = new CameraNode(new Camera(new OrthographicProjection())); }
private static bool IsCameraUnderwater(CustomSceneQuery query, CameraNode cameraNode) { var cameraPosition = cameraNode.PoseWorld.Position; foreach (var node in query.RenderableNodes) { var waterNode = node as WaterNode; if (waterNode != null && waterNode.IsUnderwater(cameraPosition)) return true; } return false; }
/// <summary> /// Initializes a new instance of the <see cref="OcclusionBuffer"/> class with the specified /// buffer size. /// </summary> /// <param name="graphicsService">The graphics service.</param> /// <param name="width">The width of the occlusion buffer.</param> /// <param name="height">The height of the occlusion buffer.</param> /// <param name="bufferSize"> /// The size of the internal triangle buffer (= max number of occluder triangles that can be /// rendered in a single draw call). Needs to be large enough to store the most complex /// occluder. /// </param> /// <exception cref="ArgumentNullException"> /// <paramref name="graphicsService"/> is <see langword="null"/>. /// </exception> public OcclusionBuffer(IGraphicsService graphicsService, int width, int height, int bufferSize) { if (graphicsService == null) throw new ArgumentNullException("graphicsService"); // For simplicity only accept power-of-two formats. if (!MathHelper.IsPowerOf2(width) && !MathHelper.IsPowerOf2(height)) throw new ArgumentException("Width and height of occlusion buffer expected to be a power of two."); // The current texture atlas layout assumes that width ≥ height. if (width < height) throw new ArgumentException("Width expected to be greater than or equal to the height of the occlusion buffer."); var graphicsDevice = graphicsService.GraphicsDevice; if (bufferSize < 1) throw new ArgumentOutOfRangeException("bufferSize", "The buffer size needs to be creater than 1."); if (bufferSize >= graphicsDevice.GetMaxPrimitivesPerCall()) throw new ArgumentOutOfRangeException("bufferSize", "The buffer size exceeds the max number of primitives supported by the current graphics device."); // ----- RenderBatch handles occluders. // bufferSize is the max number of triangles per draw call. // Vertex buffer size: // - In the worst case n triangles need n * 3 vertices. // - The max size is limited to 65536 because 16-bit indices are used. var vertices = new Vector3F[Math.Min(bufferSize * 3, ushort.MaxValue + 1)]; // Index buffer size: number of triangles * 3 var indices = new ushort[bufferSize * 3]; _renderBatch = new RenderBatch<Vector3F, ushort>( graphicsDevice, VertexPosition.VertexDeclaration, vertices, true, indices, true); _effect = graphicsService.Content.Load<Effect>("DigitalRune/OcclusionCulling"); _parameterClampAabbMinimum = _effect.Parameters["ClampAabbMinimum"]; _parameterClampAabbMaximum = _effect.Parameters["ClampAabbMaximum"]; _parameterCameraViewProj = _effect.Parameters["CameraViewProj"]; _parameterCameraNear = _effect.Parameters["CameraNear"]; _parameterCameraFar = _effect.Parameters["CameraFar"]; _parameterCameraPosition = _effect.Parameters["CameraPosition"]; _parameterNormalizationFactor = _effect.Parameters["NormalizationFactor"]; _parameterLightViewProj = _effect.Parameters["LightViewProj"]; _parameterLightToCamera = _effect.Parameters["LightToCamera"]; _parameterHzbSize = _effect.Parameters["HzbSize"]; _parameterTargetSize = _effect.Parameters["TargetSize"]; _parameterAtlasSize = _effect.Parameters["AtlasSize"]; _parameterTexelOffset = _effect.Parameters["TexelOffset"]; _parameterHalfTexelOffset = _effect.Parameters["HalfTexelOffset"]; _parameterMaxLevel = _effect.Parameters["MaxLevel"]; _parameterHzbTexture = _effect.Parameters["HzbTexture"]; _parameterLightHzbTexture = _effect.Parameters["LightHzb"]; _parameterDebugLevel = _effect.Parameters["DebugLevel"]; _parameterDebugMinimum = _effect.Parameters["DebugMinimum"]; _parameterDebugMaximum = _effect.Parameters["DebugMaximum"]; _techniqueOccluder = _effect.Techniques["Occluder"]; _techniqueDownsample = _effect.Techniques["Downsample"]; _techniqueCopy = _effect.Techniques["Copy"]; _techniqueQuery = _effect.Techniques["Query"]; _techniqueVisualize = _effect.Techniques["Visualize"]; _occlusionProxies = new List<IOcclusionProxy>(); _sceneNodes = new List<SceneNode>(); // Store delegate methods to avoid garbage. _updateOcclusionProxies = UpdateOcclusionProxies; _updateOcclusionProxy = UpdateOcclusionProxy; _splitVolume = new PerspectiveViewVolume(); _orthographicCameraNode = new CameraNode(new Camera(new OrthographicProjection())); _shadowCasters = new List<SceneNode>(); /* // By default, enable multithreading on multi-core systems. #if WP7 || UNITY // Cannot access Environment.ProcessorCount in phone app. (Security issue.) EnableMultithreading = false; #else // Enable multithreading by default if the current system has multiple processors. EnableMultithreading = Environment.ProcessorCount > 1; // Multithreading works but Parallel.For of Xamarin.Android/iOS is very inefficient. if (GlobalSettings.PlatformID == PlatformID.Android || GlobalSettings.PlatformID == PlatformID.iOS) EnableMultithreading = false; #endif */ // Disable multithreading by default. Multithreading causes massive lags in the // XNA version, but the MonoGame version is not affected!? EnableMultithreading = false; // For best performance: Enable progressive shadow caster culling. ProgressiveShadowCasterCulling = true; Statistics = new OcclusionCullingStatistics(); InitializeBuffers(graphicsDevice, width, height); }
public SceneCaptureCubeSample(Microsoft.Xna.Framework.Game game) : base(game) { SampleFramework.IsMouseVisible = false; // Create a graphics screen. This screen has to call the SceneCaptureRenderer // to handle the SceneCaptureNodes! _graphicsScreen = new DeferredGraphicsScreen(Services) { DrawReticle = true }; GraphicsService.Screens.Insert(0, _graphicsScreen); GameObjectService.Objects.Add(new DeferredGraphicsOptionsObject(Services)); Services.Register(typeof(DebugRenderer), null, _graphicsScreen.DebugRenderer); Services.Register(typeof(IScene), null, _graphicsScreen.Scene); // Add gravity and damping to the physics Simulation. Simulation.ForceEffects.Add(new Gravity()); Simulation.ForceEffects.Add(new Damping()); // Add a custom game object which controls the camera. var cameraGameObject = new CameraObject(Services); GameObjectService.Objects.Add(cameraGameObject); _graphicsScreen.ActiveCameraNode = cameraGameObject.CameraNode; // More standard objects. GameObjectService.Objects.Add(new GrabObject(Services)); GameObjectService.Objects.Add(new ObjectCreatorObject(Services)); //GameObjectService.Objects.Add(new StaticSkyObject(Services)); GameObjectService.Objects.Add(new DynamicSkyObject(Services, true, false, true)); GameObjectService.Objects.Add(new GroundObject(Services)); GameObjectService.Objects.Add(new DudeObject(Services)); GameObjectService.Objects.Add(new DynamicObject(Services, 1)); GameObjectService.Objects.Add(new DynamicObject(Services, 2)); GameObjectService.Objects.Add(new DynamicObject(Services, 5)); GameObjectService.Objects.Add(new DynamicObject(Services, 6)); GameObjectService.Objects.Add(new DynamicObject(Services, 7)); GameObjectService.Objects.Add(new FogObject(Services) { AttachToCamera = true }); GameObjectService.Objects.Add(new LavaBallsObject(Services)); // Add a few palm trees. Random random = new Random(12345); for (int i = 0; i < 10; i++) { Vector3F position = new Vector3F(random.NextFloat(-3, -8), 0, random.NextFloat(0, -5)); Matrix33F orientation = Matrix33F.CreateRotationY(random.NextFloat(0, ConstantsF.TwoPi)); float scale = random.NextFloat(0.5f, 1.2f); GameObjectService.Objects.Add(new StaticObject(Services, "PalmTree/palm_tree", scale, new Pose(position, orientation))); } // Load the "Bubble" mesh and place it at a fixed position in the scene. var modelNode = ContentManager.Load<ModelNode>("Bubble/Bubble"); var meshNode = modelNode.GetDescendants().OfType<MeshNode>().First().Clone(); meshNode.PoseWorld = new Pose(new Vector3F(0, 1, 0)); _graphicsScreen.Scene.Children.Add(meshNode); // Surface of the mesh should reflect the scene in real-time. Reflections are // created using environment mapping: The scene is rendered into a cube map, // which is then applied to the mesh. // To render the scene into a cube map, we need to define a CameraNode and a // SceneCaptureNode: The CameraNode defines the point from where the scene is // captured. The SceneCaptureNode defines where and in which format the captured // image is needed. // Attach a camera to the center of the mesh. var projection = new PerspectiveProjection(); projection.SetFieldOfView(ConstantsF.PiOver2, 1, 0.1f, 20); var captureCameraNode = new CameraNode(new Camera(projection)); meshNode.Children = new SceneNodeCollection { captureCameraNode }; // Attach a SceneCaptureNode with a cube map render target to the mesh. var renderToTexture = new RenderToTexture { Texture = new RenderTargetCube( GraphicsService.GraphicsDevice, 256, true, SurfaceFormat.Color, DepthFormat.None), }; var sceneCaptureNode = new SceneCaptureNode(renderToTexture) { Shape = meshNode.Shape, CameraNode = captureCameraNode, }; meshNode.Children.Add(sceneCaptureNode); // The bubble model uses a special effect and is rendered in the "AlphaBlend" // render pass. Let's modify the effect parameters to use the created cube map // as the reflection map of the bubble. var effectBinding = meshNode.Mesh.Materials[0]["AlphaBlend"]; effectBinding.Set("ReflectionStrength", 0.5f); effectBinding.Set("RefractionStrength", 0.0f); effectBinding.Set("FresnelBias", 1.0f); effectBinding.Set("BlendMode", 1.0f); effectBinding.Set("Alpha", 1.0f); effectBinding.Set("CustomEnvironmentMap", (TextureCube)renderToTexture.Texture); }