Esempio n. 1
0
        private void RenderHiDef(TextureCube texture, Matrix33F orientation, float exposure, RenderContext context)
        {
            var graphicsDevice = context.GraphicsService.GraphicsDevice;

            var savedRenderState = new RenderStateSnapshot(graphicsDevice);

            graphicsDevice.RasterizerState   = RasterizerState.CullNone;
            graphicsDevice.DepthStencilState = DepthStencilState.DepthRead;
            graphicsDevice.BlendState        = BlendState.Opaque;

            var       cameraNode = context.CameraNode;
            Matrix44F view       = cameraNode.View;
            Matrix44F projection = cameraNode.Camera.Projection;

            // Cube maps are left handed --> Sample with inverted z. (Otherwise, the
            // cube map and objects or texts in it are mirrored.)
            var mirrorZ = Matrix44F.CreateScale(1, 1, -1);

            _parameterWorldViewProjection.SetValue(
                (Matrix)(projection * view * new Matrix44F(orientation, Vector3F.Zero) * mirrorZ));
            _parameterExposure.SetValue(new Vector4(exposure, exposure, exposure, 1));
            _textureParameter.SetValue(texture);

            if (context.IsHdrEnabled())
            {
                _passLinear.Apply();
            }
            else
            {
                _passGamma.Apply();
            }

            _submesh.Draw();
            savedRenderState.Restore();
        }
Esempio n. 2
0
        public void CompositeShapeWithRotatedChildren()
        {
            var s = new CompositeShape();

            s.Children.Add(new GeometricObject(new BoxShape(1, 2, 3), new Vector3F(1.1f, 0.3f, 0.8f), new Pose(new Vector3F(100, 10, 0), RandomHelper.Random.NextQuaternionF())));
            s.Children.Add(new GeometricObject(new ConeShape(1, 2), new Vector3F(1.1f, 0.3f, 0.8f), new Pose(new Vector3F(-10, -10, 0), RandomHelper.Random.NextQuaternionF())));
            float     m0;
            Vector3F  com0;
            Matrix33F i0;

            MassHelper.GetMass(s, new Vector3F(2), 1, true, 0.001f, 10, out m0, out com0, out i0);

            var m = s.GetMesh(0.001f, 6);

            m.Transform(Matrix44F.CreateScale(2));
            float     m1;
            Vector3F  com1;
            Matrix33F i1;

            MassHelper.GetMass(m, out m1, out com1, out i1);

            const float e = 0.01f;

            Assert.IsTrue(Numeric.AreEqual(m0, m1, e * (1 + m0)));
            Assert.IsTrue(Vector3F.AreNumericallyEqual(com0, com1, e * (1 + com0.Length)));
            Assert.IsTrue(Matrix33F.AreNumericallyEqual(i0, i1, e * (1 + i0.Trace)));
        }
Esempio n. 3
0
        public void ScaledConvexMass()
        {
            var       s = new ScaledConvexShape(new CapsuleShape(1, 3), new Vector3F(0.9f, -0.8f, 1.2f));
            float     m0;
            Vector3F  com0;
            Matrix33F i0;

            MassHelper.GetMass(s, new Vector3F(1, -2, -3), 1, true, 0.001f, 10, out m0, out com0, out i0);

            var m = s.GetMesh(0.001f, 6);

            m.Transform(Matrix44F.CreateScale(1, -2, -3));
            float     m1;
            Vector3F  com1;
            Matrix33F i1;

            MassHelper.GetMass(m, out m1, out com1, out i1);

            const float e = 0.01f;

            Assert.IsTrue(Numeric.AreEqual(m0, m1, e * (1 + m0)));
            Assert.IsTrue(Vector3F.AreNumericallyEqual(com0, com1, e * (1 + com0.Length)));
            Assert.IsTrue(Matrix33F.AreNumericallyEqual(i0, i1, e * (1 + i0.Trace)));

            // Try other density.
            float     m2;
            Vector3F  com2;
            Matrix33F i2;

            MassHelper.GetMass(s, new Vector3F(1, -2, -3), 0.7f, true, 0.001f, 10, out m2, out com2, out i2);
            Assert.IsTrue(Numeric.AreEqual(m0 * 0.7f, m2, e * (1 + m0)));
            Assert.IsTrue(Vector3F.AreNumericallyEqual(com0, com2, e * (1 + com0.Length)));
            Assert.IsTrue(Matrix33F.AreNumericallyEqual(i0 * 0.7f, i2, e * (1 + i0.Trace)));
        }
Esempio n. 4
0
        /// <summary>
        /// Called when a mesh should be generated for the shape.
        /// </summary>
        /// <param name="absoluteDistanceThreshold">The absolute distance threshold.</param>
        /// <param name="iterationLimit">The iteration limit.</param>
        /// <returns>The triangle mesh for this shape.</returns>
        protected override TriangleMesh OnGetMesh(float absoluteDistanceThreshold, int iterationLimit)
        {
            // Convert absolute error to relative error.
            float maxExtent         = GetAabb(Vector3F.One, Pose.Identity).Extent.LargestComponent;
            float relativeThreshold = !Numeric.IsZero(maxExtent)
                                ? absoluteDistanceThreshold / maxExtent
                                : Numeric.EpsilonF;

            // Get meshes of children and add them to mesh in parent space.
            TriangleMesh mesh = new TriangleMesh();
            int          numberOfGeometries = Children.Count;

            for (int childIndex = 0; childIndex < numberOfGeometries; childIndex++)
            {
                IGeometricObject geometricObject = Children[childIndex];

                // Get child mesh.
                var childMesh = geometricObject.Shape.GetMesh(relativeThreshold, iterationLimit);

                // Transform child mesh into local space of this parent shape.
                childMesh.Transform(geometricObject.Pose.ToMatrix44F() * Matrix44F.CreateScale(geometricObject.Scale));

                // Add to parent mesh.
                mesh.Add(childMesh, false);
            }

            return(mesh);
        }
Esempio n. 5
0
        public void IsValidTest()
        {
            var m = Matrix44F.CreateTranslation(1, 2, 3) * Matrix44F.CreateRotationY(0.1f) * Matrix44F.CreateScale(-2, 3, 4);

            Assert.IsTrue(SrtTransform.IsValid(m));

            // Concatenating to SRTs creates a skew.
            m = Matrix44F.CreateRotationZ(0.1f) * Matrix44F.CreateScale(-2, 3, 4) * m;
            Assert.IsFalse(SrtTransform.IsValid(m));
        }
Esempio n. 6
0
        /// <summary>
        /// Called when a mesh should be generated for the shape.
        /// </summary>
        /// <param name="absoluteDistanceThreshold">The absolute distance threshold.</param>
        /// <param name="iterationLimit">The iteration limit.</param>
        /// <returns>The triangle mesh for this shape.</returns>
        protected override TriangleMesh OnGetMesh(float absoluteDistanceThreshold, int iterationLimit)
        {
            // Convert absolute error to relative error.
            Vector3F extents           = GetAabb(Vector3F.One, Pose.Identity).Extent;
            float    maxExtent         = extents.LargestComponent;
            float    relativeThreshold = !Numeric.IsZero(maxExtent)
                                ? absoluteDistanceThreshold / maxExtent
                                : Numeric.EpsilonF;

            // Get child mesh.
            TriangleMesh mesh = _child.Shape.GetMesh(relativeThreshold, iterationLimit);

            // Transform child mesh into local space of this parent shape.
            mesh.Transform(_child.Pose.ToMatrix44F() * Matrix44F.CreateScale(_child.Scale));
            return(mesh);
        }
Esempio n. 7
0
        private void CreateRigidBody()
        {
            var triangleMesh = new TriangleMesh();

            foreach (var meshNode in _modelNode.GetSubtree().OfType <MeshNode>())
            {
                // Extract the triangle mesh from the DigitalRune Graphics Mesh instance.
                var subTriangleMesh = new TriangleMesh();
                foreach (var submesh in meshNode.Mesh.Submeshes)
                {
                    submesh.ToTriangleMesh(subTriangleMesh);
                }

                // Apply the transformation of the mesh node.
                subTriangleMesh.Transform(meshNode.PoseWorld * Matrix44F.CreateScale(meshNode.ScaleWorld));

                // Combine into final triangle mesh.
                triangleMesh.Add(subTriangleMesh);
            }

            // Create a collision shape that uses the mesh.
            var triangleMeshShape = new TriangleMeshShape(triangleMesh);

            // Optional: Assign a spatial partitioning scheme to the triangle mesh. (A spatial partition
            // adds an additional memory overhead, but it improves collision detection speed tremendously!)
            triangleMeshShape.Partition = new CompressedAabbTree
            {
                // The tree is automatically built using a mixed top-down/bottom-up approach. Bottom-up
                // building is slower but produces better trees. If the tree building takes too long,
                // we can lower the BottomUpBuildThreshold (default is 128).
                BottomUpBuildThreshold = 0,
            };

            _rigidBody = new RigidBody(triangleMeshShape, new MassFrame(), null)
            {
                Pose       = _pose,
                Scale      = _scale,
                MotionType = MotionType.Static
            };

            // Add rigid body to physics simulation and model to scene.
            var simulation = _services.GetInstance <Simulation>();

            simulation.RigidBodies.Add(_rigidBody);
        }
Esempio n. 8
0
        public void UpdateClipSubmesh(IGraphicsService graphicsService, LightNode node)
        {
            var clip = node.Clip;

            Debug.Assert(clip != null);

            // We have to update the submesh if it is null or disposed.
            //   Submesh == null                            --> Update
            //   Submesh != null && VertexBuffer.IsDisposed --> Update
            //   Submesh != null && VertexBuffer == null    --> This is the EmptyShape. No updated needed.
            if (ClipSubmesh == null || (ClipSubmesh.VertexBuffer != null && ClipSubmesh.VertexBuffer.IsDisposed))
            {
                ShapeMeshCache.GetMesh(graphicsService, clip.Shape, out ClipSubmesh, out ClipMatrix);

                // Add transform of Clip.
                ClipMatrix = clip.Pose * Matrix44F.CreateScale(clip.Scale) * ClipMatrix;
            }
        }
Esempio n. 9
0
        public void ConeMass()
        {
            var       s = new ConeShape(1, 2);
            float     m0;
            Vector3F  com0;
            Matrix33F i0;

            MassHelper.GetMass(s, new Vector3F(1, -2, -3), 1, true, 0.001f, 10, out m0, out com0, out i0);

            var m = s.GetMesh(0.001f, 10);

            m.Transform(Matrix44F.CreateScale(1, -2, -3));
            float     m1;
            Vector3F  com1;
            Matrix33F i1;

            MassHelper.GetMass(m, out m1, out com1, out i1);

            const float e = 0.01f;

            Assert.IsTrue(Numeric.AreEqual(m0, m1, e * (1 + m0)));
            Assert.IsTrue(Vector3F.AreNumericallyEqual(com0, com1, e * (1 + com0.Length)));
            Assert.IsTrue(Matrix33F.AreNumericallyEqual(i0, i1, e * (1 + i0.Trace)));

            // Try other density.
            float     m2;
            Vector3F  com2;
            Matrix33F i2;

            MassHelper.GetMass(s, new Vector3F(1, -2, -3), 0.7f, true, 0.001f, 10, out m2, out com2, out i2);
            Assert.IsTrue(Numeric.AreEqual(m0 * 0.7f, m2, e * (1 + m0)));
            Assert.IsTrue(Vector3F.AreNumericallyEqual(com0, com2, e * (1 + com0.Length)));
            Assert.IsTrue(Matrix33F.AreNumericallyEqual(i0 * 0.7f, i2, e * (1 + i0.Trace)));

            // Try with target mass.
            float     m3;
            Vector3F  com3;
            Matrix33F i3;

            MassHelper.GetMass(s, new Vector3F(1, -2, -3), 23, false, 0.001f, 10, out m3, out com3, out i3);
            Assert.IsTrue(Numeric.AreEqual(23, m3, e * (1 + m0)));
            Assert.IsTrue(Vector3F.AreNumericallyEqual(com0, com3, e * (1 + com0.Length)));
            Assert.IsTrue(Matrix33F.AreNumericallyEqual(i0 * 23 / m0, i3, e * (1 + i0.Trace)));
        }
Esempio n. 10
0
        public void FromToMatrixTest()
        {
            var t = new Vector3F(1, 2, 3);
            var r = new QuaternionF(1, 2, 3, 4).Normalized;
            var s = new Vector3F(2, 7, 9);
            var m = Matrix44F.CreateTranslation(t) * Matrix44F.CreateRotation(r) * Matrix44F.CreateScale(s);

            var srt = SrtTransform.FromMatrix(m);

            Assert.IsTrue(Vector3F.AreNumericallyEqual(t, srt.Translation));
            Assert.IsTrue(QuaternionF.AreNumericallyEqual(r, srt.Rotation));
            Assert.IsTrue(Vector3F.AreNumericallyEqual(s, srt.Scale));

            // XNA:
            srt = SrtTransform.FromMatrix((Matrix)m);
            Assert.IsTrue(Vector3F.AreNumericallyEqual(t, srt.Translation));
            Assert.IsTrue(QuaternionF.AreNumericallyEqual(r, srt.Rotation));
            Assert.IsTrue(Vector3F.AreNumericallyEqual(s, srt.Scale));

            // With negative scale, the decomposition is not unique (many possible combinations of
            // axis mirroring + rotation).
            t   = new Vector3F(1, 2, 3);
            r   = new QuaternionF(1, 2, 3, 4).Normalized;
            s   = new Vector3F(2, -7, 9);
            m   = Matrix44F.CreateTranslation(t) * Matrix44F.CreateRotation(r) * Matrix44F.CreateScale(s);
            srt = SrtTransform.FromMatrix(m);
            var m2 = (Matrix44F)srt;

            Assert.IsTrue(Matrix44F.AreNumericallyEqual(m, m2));

            m2 = srt.ToMatrix44F();
            Assert.IsTrue(Matrix44F.AreNumericallyEqual(m, m2));

            m2 = srt;
            Assert.IsTrue(Matrix44F.AreNumericallyEqual(m, m2));

            Matrix mXna = srt.ToXna();

            Assert.IsTrue(Matrix44F.AreNumericallyEqual(m, (Matrix44F)mXna));

            mXna = srt;
            Assert.IsTrue(Matrix44F.AreNumericallyEqual(m, (Matrix44F)mXna));
        }
Esempio n. 11
0
        public ConvexDecompositionSample(Microsoft.Xna.Framework.Game game)
            : base(game)
        {
            SampleFramework.IsMouseVisible = false;
            GraphicsScreen.ClearBackground = true;
            GraphicsScreen.BackgroundColor = Color.CornflowerBlue;
            SetCamera(new Vector3F(3, 3, 3), 0.8f, -0.6f);

            // Load model.
            _modelNode = ContentManager.Load <ModelNode>("Saucer/Saucer").Clone();

            // Combine all meshes of the model into a single TriangleMesh.
            TriangleMesh mesh = new TriangleMesh();

            foreach (var meshNode in _modelNode.GetChildren().OfType <MeshNode>())
            {
                var childMesh = MeshHelper.ToTriangleMesh(meshNode.Mesh);
                childMesh.Transform(meshNode.PoseWorld * Matrix44F.CreateScale(meshNode.ScaleWorld));
                mesh.Add(childMesh);
            }

            // Start convex decomposition on another thread.
            _convexDecomposition = new ConvexDecomposition();
            _convexDecomposition.ProgressChanged        += OnProgressChanged;
            _convexDecomposition.AllowedConcavity        = 0.8f;
            _convexDecomposition.IntermediateVertexLimit = 65536;
            _convexDecomposition.VertexLimit             = 64;

            // 0 gives optimal results but is the slowest. Small positive values improve
            // speed but the result might be less optimal.
            _convexDecomposition.SmallIslandBoost = 0.02f;

            _convexDecomposition.SampleTriangleCenters  = true;
            _convexDecomposition.SampleTriangleVertices = true;

            // Experimental multithreading. Enable at own risk ;-)
            _convexDecomposition.EnableMultithreading = true;

            _convexDecomposition.DecomposeAsync(mesh);
        }
Esempio n. 12
0
        public void CompositeShapeWithNonUniformScaling()
        {
            var s = new CompositeShape();

            s.Children.Add(new GeometricObject(new BoxShape(1, 2, 3), new Vector3F(1.1f, 0.3f, 0.8f), new Pose(new Vector3F(100, 10, 0))));
            s.Children.Add(new GeometricObject(new SphereShape(1), new Vector3F(1.1f, 0.3f, 0.8f), new Pose(new Vector3F(-10, -10, 0))));
            float     m0;
            Vector3F  com0;
            Matrix33F i0;

            MassHelper.GetMass(s, new Vector3F(2, 2.1f, 2.8f), 0.7f, true, 0.001f, 10, out m0, out com0, out i0);

            var m = s.GetMesh(0.001f, 6);

            m.Transform(Matrix44F.CreateScale(2, 2.1f, 2.8f));
            float     m1;
            Vector3F  com1;
            Matrix33F i1;

            MassHelper.GetMass(m, out m1, out com1, out i1);

            const float e = 0.01f;

            Assert.IsTrue(Numeric.AreEqual(m0, 0.7f * m1, e * (1 + m0)));
            Assert.IsTrue(Vector3F.AreNumericallyEqual(com0, com1, e * (1 + com0.Length)));
            Assert.IsTrue(Matrix33F.AreNumericallyEqual(i0, 0.7f * i1, e * (1 + i0.Trace)));

            // Try with target mass.
            float     m3;
            Vector3F  com3;
            Matrix33F i3;

            MassHelper.GetMass(s, new Vector3F(2, 2.1f, 2.8f), 23, false, 0.001f, 10, out m3, out com3, out i3);
            Assert.IsTrue(Numeric.AreEqual(23, m3, e * (1 + m0)));
            Assert.IsTrue(Vector3F.AreNumericallyEqual(com0, com3, e * (1 + com0.Length)));
            Assert.IsTrue(Matrix33F.AreNumericallyEqual(i0 * 23 / m0, i3, e * (1 + i0.Trace)));
        }
Esempio n. 13
0
    private void CreateMesh(Shape shape, out Submesh submesh, out Matrix44F matrix)
    {
      // Use a special shared submesh for box shapes.
      var boxShape = shape as BoxShape;
      if (boxShape != null)
      {
        if (_boxSubmesh == null)
          _boxSubmesh = MeshHelper.GetBox(_graphicsService);

        submesh = _boxSubmesh;
        matrix = Matrix44F.CreateScale(boxShape.Extent);
        return;
      }

      var transformedShape = shape as TransformedShape;
      boxShape = (transformedShape != null) ? transformedShape.Child.Shape as BoxShape : null;
      if (boxShape != null)
      {
        if (_boxSubmesh == null)
          _boxSubmesh = MeshHelper.GetBox(_graphicsService);

        submesh = _boxSubmesh;
        matrix = transformedShape.Child.Pose
                 * Matrix44F.CreateScale(transformedShape.Child.Scale * boxShape.Extent);
        return;
      }

      // Create the submesh. Return EmptySubmesh if the MeshHelper returns null.
      var newSubmesh = MeshHelper.CreateSubmesh(
        _graphicsService.GraphicsDevice,
        shape.GetMesh(MeshRelativeError, MeshIterationLimit),
        NormalAngleLimit);

      submesh = newSubmesh ?? EmptySubmesh;
      matrix = Matrix44F.Identity;
    }
Esempio n. 14
0
        public void TransformedShapeMassWithNonuniformScaling()
        {
            var       s = new TransformedShape(new GeometricObject(new BoxShape(3, 2, 1), new Vector3F(0.7f, 0.8f, 0.9f), new Pose(new Vector3F(-1, 7, 4))));
            float     m0;
            Vector3F  com0;
            Matrix33F i0;

            MassHelper.GetMass(s, new Vector3F(2, 2.1f, 2.8f), 1, true, 0.001f, 10, out m0, out com0, out i0);

            var m = s.GetMesh(0.001f, 6);

            m.Transform(Matrix44F.CreateScale(2, 2.1f, 2.8f));
            float     m1;
            Vector3F  com1;
            Matrix33F i1;

            MassHelper.GetMass(m, out m1, out com1, out i1);

            const float e = 0.01f;

            Assert.IsTrue(Numeric.AreEqual(m0, m1, e * (1 + m0)));
            Assert.IsTrue(Vector3F.AreNumericallyEqual(com0, com1, e * (1 + com0.Length)));
            Assert.IsTrue(Matrix33F.AreNumericallyEqual(i0, i1, e * (1 + i0.Trace)));
        }
Esempio n. 15
0
        public MeshFromModelSample(Microsoft.Xna.Framework.Game game)
            : base(game)
        {
            // Add basic force effects.
            Simulation.ForceEffects.Add(new Gravity());
            Simulation.ForceEffects.Add(new Damping());

            // Add a ground plane.
            RigidBody groundPlane = new RigidBody(new PlaneShape(Vector3F.UnitY, 0))
            {
                Name       = "GroundPlane", // Names are not required but helpful for debugging.
                MotionType = MotionType.Static,
            };

            Simulation.RigidBodies.Add(groundPlane);

            // Use content pipeline to load a model.
            var bowlModelNode = ContentManager.Load <ModelNode>("Bowl");

            // Get mesh of the imported model.
            var meshNode = bowlModelNode.GetDescendants().OfType <MeshNode>().First();

            // Extract the triangle mesh from the DigitalRune Graphics Mesh instance.
            // Note: XNA graphics use clockwise winding for triangle front sides and DigitalRune Physics uses
            // counter-clockwise winding for front sides. FromModel() automatically flips the
            // winding order.
            TriangleMesh mesh = MeshHelper.ToTriangleMesh(meshNode.Mesh);

            // Apply the transformation of the mesh node.
            mesh.Transform(meshNode.PoseWorld * Matrix44F.CreateScale(meshNode.ScaleWorld));

            // Note: To convert an XNA Model instance to a triangle mesh you can use:
            //TriangleMesh mesh = TriangleMesh.FromModel(bowlModel);

            // Meshes are usually "one-sided" (objects can pass through the backside of the triangles)!
            // If you need to reverse the triangle winding order, use this:
            // Reverse winding order:
            //for (int i = 0; i < mesh.NumberOfTriangles; i++)
            //{
            //  var dummy = mesh.Indices[i * 3 + 1];
            //  mesh.Indices[i * 3 + 1] = mesh.Indices[i * 3 + 2];
            //  mesh.Indices[i * 3 + 2] = dummy;
            //}

            // Create a collision shape that uses the mesh.
            TriangleMeshShape meshShape = new TriangleMeshShape(mesh);

            // Meshes are usually "one-sided" and objects moving into a backside can move through the
            // mesh. Objects are only stopped if they approach from the front. If IsTwoSided is set,
            // objects are blocked from both sides.
            meshShape.IsTwoSided = true;

            // Optional: Assign a spatial partitioning scheme to the triangle mesh. (A spatial partition
            // adds an additional memory overhead, but it improves collision detection speed tremendously!)
            meshShape.Partition = new AabbTree <int>
            {
                // The tree is automatically built using a mixed top-down/bottom-up approach. Bottom-up
                // building is slower but produces better trees. If the tree building takes too long,
                // we can lower the BottomUpBuildThreshold (default is 128).
                BottomUpBuildThreshold = 0,
            };

            // Optional: The partition will be automatically built when needed. For static meshes it is
            // built only once when it is needed for the first time. Building the AABB tree can take a
            // few seconds for very large meshes.
            // By calling Update() manually we can force the partition to be built right now:
            //triangleMeshShape.Partition.Update(false);
            // We could also call this method in a background thread while the level is loading. Or,
            // we can build the triangle mesh and the AABB tree in the XNA content pipeline and avoid the
            // building of the tree at runtime (see Sample 33).

            // Create a static rigid body with the mesh shape.
            // We explicitly specify a mass frame. We can use any mass frame for static bodies (because
            // static bodies are effectively treated as if they have infinite mass). If we do not specify
            // a mass frame in the rigid body constructor, the constructor will automatically compute an
            // approximate mass frame (which can take some time for large meshes).
            var bowlBody = new RigidBody(meshShape, new MassFrame(), null)
            {
                Name       = "Bowl",
                Pose       = new Pose(new Vector3F()),
                MotionType = MotionType.Static
            };

            Simulation.RigidBodies.Add(bowlBody);

            // Add a dynamic sphere.
            Shape     sphereShape = new SphereShape(0.4f);
            RigidBody sphere      = new RigidBody(sphereShape)
            {
                Name = "Sphere",
                Pose = new Pose(new Vector3F(0, 10, 0)),
            };

            Simulation.RigidBodies.Add(sphere);
        }
Esempio n. 16
0
        private static void CacheVertexBuffer(FigureNode node, GraphicsDevice graphicsDevice)
        {
            var figureRenderData = node.Figure.RenderData;
            var nodeRenderData   = (FigureNodeRenderData)node.RenderData;

            Vector3F[] positions = figureRenderData.Vertices.Array;

            #region ----- Cache vertex/index buffer for fill. -----
            var fillIndices = figureRenderData.FillIndices;
            if (fillIndices != null &&
                fillIndices.Count > 0 &&
                !Numeric.IsZero(node.FillAlpha))
            {
                // This code is similar to the code in Fill().

                Matrix44F world   = node.PoseWorld * Matrix44F.CreateScale(node.ScaleWorld);
                Vector3F  color3F = node.FillColor * node.FillAlpha;
                Color     color   = new Color(color3F.X, color3F.Y, color3F.Z, node.FillAlpha);

                int numberOfVertices = figureRenderData.Vertices.Count;
                int numberOfIndices  = figureRenderData.FillIndices.Count;

                VertexPositionColor[] vertices = new VertexPositionColor[numberOfVertices];

                // Copy all vertices.
                for (int i = 0; i < numberOfVertices; i++)
                {
                    vertices[i].Position = (Vector3)world.TransformPosition(positions[i]);
                    vertices[i].Color    = color;
                }

                nodeRenderData.FillVertexBuffer = new VertexBuffer(
                    graphicsDevice,
                    VertexPositionColor.VertexDeclaration,
                    numberOfVertices,
                    BufferUsage.WriteOnly);
                nodeRenderData.FillVertexBuffer.SetData(vertices);

                if (numberOfVertices <= ushort.MaxValue)
                {
                    // Copy all indices from int[] to ushort[].
                    int[]    int32Indices = figureRenderData.FillIndices.Array;
                    ushort[] indices      = new ushort[numberOfIndices];
                    for (int i = 0; i < numberOfIndices; i++)
                    {
                        indices[i] = (ushort)int32Indices[i];
                    }

                    nodeRenderData.FillIndexBuffer = new IndexBuffer(
                        graphicsDevice,
                        IndexElementSize.SixteenBits,
                        numberOfIndices,
                        BufferUsage.WriteOnly);
                    nodeRenderData.FillIndexBuffer.SetData(indices);
                }
                else
                {
                    nodeRenderData.FillIndexBuffer = new IndexBuffer(
                        graphicsDevice,
                        IndexElementSize.ThirtyTwoBits,
                        numberOfIndices,
                        BufferUsage.WriteOnly);
                    // Note: The FillIndices.Array may contain more than numberOfIndices entries! -->
                    // Specify number of indices explicitly!
                    nodeRenderData.FillIndexBuffer.SetData(figureRenderData.FillIndices.Array, 0, numberOfIndices);
                }
            }
            #endregion

            #region ----- Cache vertex/index buffer for stroke. -----

            var strokeIndices = figureRenderData.StrokeIndices;
            if (strokeIndices != null &&
                strokeIndices.Count > 0 &&
                !Numeric.IsZero(node.StrokeThickness) &&
                !Numeric.IsZero(node.StrokeAlpha))
            {
                // This code is similar to the code in Stroke() and in the ctor.

                Matrix44F world = node.PoseWorld * Matrix44F.CreateScale(node.ScaleWorld);

                float       thickness       = node.StrokeThickness;
                Vector3F    color3F         = node.StrokeColor * node.StrokeAlpha;
                HalfVector4 color           = new HalfVector4(color3F.X, color3F.Y, color3F.Z, node.StrokeAlpha);
                Vector4F    dash            = node.StrokeDashPattern * node.StrokeThickness;
                bool        usesDashPattern = (dash.Y + dash.Z) != 0;
                HalfVector4 dashSum         = new HalfVector4(
                    dash.X,
                    dash.X + dash.Y,
                    dash.X + dash.Y + dash.Z,
                    dash.X + dash.Y + dash.Z + dash.W);

                // Convert to vertices.
                float    lastDistance = 0;
                Vector3F lastPosition = new Vector3F(float.NaN);
                Vector3F lastWorld    = new Vector3F();

                HalfVector4 data0 = new HalfVector4(0, 1, thickness, 0);
                HalfVector4 data1 = new HalfVector4(0, 0, thickness, 0);
                HalfVector4 data2 = new HalfVector4(1, 0, thickness, 0);
                HalfVector4 data3 = new HalfVector4(1, 1, thickness, 0);

                int[] figureIndices        = strokeIndices.Array;
                int   numberOfLineSegments = strokeIndices.Count / 2;
                int   numberOfVertices     = numberOfLineSegments * 4;

                StrokeVertex[] vertices = new StrokeVertex[numberOfVertices];
                for (int i = 0; i < numberOfLineSegments; i++)
                {
                    int      startIndex = figureIndices[i * 2 + 0];
                    int      endIndex   = figureIndices[i * 2 + 1];
                    Vector3F start      = positions[startIndex];
                    Vector3F end        = positions[endIndex];

                    bool notConnectedWithLast = start != lastPosition;
                    lastPosition = end;

                    Vector3F startWorld = notConnectedWithLast ? world.TransformPosition(start) : lastWorld;
                    Vector3F endWorld   = world.TransformPosition(end);
                    lastWorld = endWorld;

                    // Compute start/end distances of lines from beginning of line strip
                    // for dash patterns.
                    float startDistance = 0;
                    float endDistance   = 1;
                    if (usesDashPattern)
                    {
                        Debug.Assert(node.DashInWorldSpace, "Cannot cache vertex buffer for figure with screen-space dash patterns.");

                        if (notConnectedWithLast)
                        {
                            lastDistance = 0;
                        }

                        startDistance = lastDistance;
                        endDistance   = startDistance + (endWorld - startWorld).Length;
                        lastDistance  = endDistance;

                        // The shader needs to know that DashInWorldSpace is true. To avoid
                        // effect parameter changes, we store the value in the sign of the distance!
                        startDistance = -startDistance;
                        endDistance   = -endDistance;
                    }

                    Vector4 s = new Vector4(startWorld.X, startWorld.Y, startWorld.Z, startDistance);
                    Vector4 e = new Vector4(endWorld.X, endWorld.Y, endWorld.Z, endDistance);

                    vertices[i * 4 + 0].Start = s;
                    vertices[i * 4 + 0].End   = e;
                    vertices[i * 4 + 0].Data  = data0;
                    vertices[i * 4 + 0].Color = color;
                    vertices[i * 4 + 0].Dash  = dashSum;

                    vertices[i * 4 + 1].Start = s;
                    vertices[i * 4 + 1].End   = e;
                    vertices[i * 4 + 1].Data  = data1;
                    vertices[i * 4 + 1].Color = color;
                    vertices[i * 4 + 1].Dash  = dashSum;

                    vertices[i * 4 + 2].Start = s;
                    vertices[i * 4 + 2].End   = e;
                    vertices[i * 4 + 2].Data  = data2;
                    vertices[i * 4 + 2].Color = color;
                    vertices[i * 4 + 2].Dash  = dashSum;

                    vertices[i * 4 + 3].Start = s;
                    vertices[i * 4 + 3].End   = e;
                    vertices[i * 4 + 3].Data  = data3;
                    vertices[i * 4 + 3].Color = color;
                    vertices[i * 4 + 3].Dash  = dashSum;
                }

                nodeRenderData.StrokeVertexBuffer = new VertexBuffer(
                    graphicsDevice,
                    StrokeVertex.VertexDeclaration,
                    vertices.Length,
                    BufferUsage.WriteOnly);
                nodeRenderData.StrokeVertexBuffer.SetData(vertices);

                // Create stroke indices.
                int numberOfIndices = numberOfLineSegments * 6;
                if (numberOfVertices <= ushort.MaxValue)
                {
                    ushort[] indices = new ushort[numberOfIndices];
                    for (int i = 0; i < numberOfLineSegments; i++)
                    {
                        // Create index buffer for quad (= two triangles, clockwise).
                        //   1--2
                        //   | /|
                        //   |/ |
                        //   0--3
                        indices[i * 6 + 0] = (ushort)(i * 4 + 0);
                        indices[i * 6 + 1] = (ushort)(i * 4 + 1);
                        indices[i * 6 + 2] = (ushort)(i * 4 + 2);
                        indices[i * 6 + 3] = (ushort)(i * 4 + 0);
                        indices[i * 6 + 4] = (ushort)(i * 4 + 2);
                        indices[i * 6 + 5] = (ushort)(i * 4 + 3);
                    }

                    nodeRenderData.StrokeIndexBuffer = new IndexBuffer(
                        graphicsDevice,
                        IndexElementSize.SixteenBits,
                        numberOfIndices,
                        BufferUsage.WriteOnly);
                    nodeRenderData.StrokeIndexBuffer.SetData(indices);
                }
                else
                {
                    int[] indices = new int[numberOfIndices];
                    for (int i = 0; i < numberOfLineSegments; i++)
                    {
                        // Create index buffer for quad (= two triangles, clockwise).
                        //   1--2
                        //   | /|
                        //   |/ |
                        //   0--3
                        indices[i * 6 + 0] = i * 4 + 0;
                        indices[i * 6 + 1] = i * 4 + 1;
                        indices[i * 6 + 2] = i * 4 + 2;
                        indices[i * 6 + 3] = i * 4 + 0;
                        indices[i * 6 + 4] = i * 4 + 2;
                        indices[i * 6 + 5] = i * 4 + 3;
                    }

                    nodeRenderData.StrokeIndexBuffer = new IndexBuffer(
                        graphicsDevice,
                        IndexElementSize.ThirtyTwoBits,
                        numberOfIndices,
                        BufferUsage.WriteOnly);
                    nodeRenderData.StrokeIndexBuffer.SetData(indices);
                }
            }
            #endregion

            nodeRenderData.IsValid = true;
        }
Esempio n. 17
0
        public override void Render(IList <SceneNode> nodes, RenderContext context, RenderOrder order)
        {
            if (nodes == null)
            {
                throw new ArgumentNullException("nodes");
            }
            if (context == null)
            {
                throw new ArgumentNullException("context");
            }

            int numberOfNodes = nodes.Count;

            if (numberOfNodes == 0)
            {
                return;
            }

            context.Validate(_effect);
            context.ThrowIfCameraMissing();

            var graphicsDevice   = _effect.GraphicsDevice;
            var savedRenderState = new RenderStateSnapshot(graphicsDevice);

            graphicsDevice.DepthStencilState = DepthStencilState.None;
            graphicsDevice.RasterizerState   = RasterizerState.CullNone;
            graphicsDevice.BlendState        = GraphicsHelper.BlendStateAdd;

            var viewport = graphicsDevice.Viewport;

            _parameterViewportSize.SetValue(new Vector2(viewport.Width, viewport.Height));
            _parameterGBuffer0.SetValue(context.GBuffer0);
            _parameterGBuffer1.SetValue(context.GBuffer1);

            var    cameraNode     = context.CameraNode;
            Pose   cameraPose     = cameraNode.PoseWorld;
            Matrix viewProjection = (Matrix)cameraNode.View * cameraNode.Camera.Projection;

            // Update SceneNode.LastFrame for all visible nodes.
            int frame = context.Frame;

            context.CameraNode.LastFrame = frame;

            bool isHdrEnabled = context.IsHdrEnabled();

            // Copy nodes to list and sort them by persistent IDs. This is necessary to avoid popping when
            // light probes overlap.
            _jobs.Clear();
            for (int i = 0; i < numberOfNodes; i++)
            {
                var lightNode = nodes[i] as LightNode;
                if (lightNode == null)
                {
                    continue;
                }

                var light = lightNode.Light as ImageBasedLight;
                if (light == null || light.Texture == null)
                {
                    continue;
                }

                // Build sort-ID - high values for lights which should be rendered last.
                ulong sortId = 0;

                // Render infinite lights first and others later.
                if (!(light.Shape is InfiniteShape))
                {
                    sortId += ((ulong)1 << 32); // Set high value above 32-bit range.
                }
                // Sort by priority. Lights with higher priority should be rendered last
                // (= over the other lights).
                // Shift priority (signed int) to positive range and add it.
                sortId += (ulong)((long)lightNode.Priority + int.MaxValue + 1);

                // Shift sortId and add light.Id in least significant bits.
                sortId = (sortId << 16) | (ushort)light.Id;

                // Add to list for sorting.
                _jobs.Add(new Job
                {
                    SortId    = sortId,
                    LightNode = lightNode,
                });
            }

            // Sort by ascending sort-ID value.
            _jobs.Sort(Comparer.Instance);

            numberOfNodes = _jobs.Count;
            for (int i = 0; i < numberOfNodes; i++)
            {
                var lightNode = _jobs[i].LightNode;
                var light     = (ImageBasedLight)lightNode.Light;

                // LightNode is visible in current frame.
                lightNode.LastFrame = frame;

                // ReSharper disable CompareOfFloatsByEqualityOperator
                bool enableDiffuse = !(Numeric.IsNaN(light.DiffuseIntensity) || (light.DiffuseIntensity == 0.0f && light.BlendMode == 0.0f));
                // ReSharper restore CompareOfFloatsByEqualityOperator

                bool enableSpecular = !Numeric.IsNaN(light.SpecularIntensity);
                if (!enableDiffuse && !enableSpecular)
                {
                    continue;
                }

                float hdrScale = isHdrEnabled ? light.HdrScale : 1;

                // We use 1x1 mipmap level for diffuse.
                // (2x2 is still okay, 4x4 already looks a bit like a specular reflection.)
                float diffuseIntensity = enableDiffuse ? light.DiffuseIntensity : 0.0f;
                _parameterParameters0.SetValue(new Vector4(
                                                   (Vector3)light.Color * diffuseIntensity * hdrScale, // DiffuseColor
                                                   Math.Max(0, light.Texture.LevelCount - 1)));        // Diffuse mip level.

                // Shader supports only RGBM.
                float rgbmMax;
                if (light.Encoding is RgbmEncoding)
                {
                    rgbmMax = GraphicsHelper.ToGamma(((RgbmEncoding)light.Encoding).Max);
                }
                else if (light.Encoding is SRgbEncoding)
                {
                    // Decoding RGBM with MaxValue 1 is equal to encoding sRGB, i.e. only
                    // gamma-to-linear is performed (assuming that the cube map alpha channel is 1).
                    rgbmMax = 1;
                }
                else
                {
                    throw new NotSupportedException(
                              "ImageBasedLight must use sRGB or RGBM encoding. Other encodings are not yet supported.");
                }

                _parameterParameters1.SetValue(new Vector4(
                                                   (Vector3)light.Color * light.SpecularIntensity * hdrScale, // SpecularColor
                                                   rgbmMax));

                // Bounding box can be a box shape or an infinite shape.
                var boundingBoxShape = lightNode.Shape as BoxShape;

                // Get extent of bounding box. For infinite shapes we simply set a large value.
                var boundingBoxExtent = boundingBoxShape != null
                              ? boundingBoxShape.Extent * lightNode.ScaleWorld
                              : new Vector3F(1e20f);

                // Falloff can only be used for box shapes but not for infinite shapes.
                float falloffRange = (boundingBoxShape != null) ? light.FalloffRange : 0;

                // AABB for localization in local space.
                // Use invalid min and max (min > max) to disable localization.
                Aabb projectionAabb = new Aabb(new Vector3F(1), new Vector3F(-1));
                if (light.EnableLocalizedReflection)
                {
                    if (light.LocalizedReflectionBox.HasValue)
                    {
                        // User defined AABB.
                        projectionAabb          = light.LocalizedReflectionBox.Value;
                        projectionAabb.Minimum *= lightNode.ScaleWorld;
                        projectionAabb.Maximum *= lightNode.ScaleWorld;
                    }
                    else if (boundingBoxShape != null)
                    {
                        // AABB is equal to the bounding box.
                        projectionAabb = new Aabb(-boundingBoxExtent / 2, boundingBoxExtent / 2);
                    }
                }

                _parameterParameters2.SetValue(new Vector4(
                                                   boundingBoxExtent.X / 2,
                                                   boundingBoxExtent.Y / 2,
                                                   boundingBoxExtent.Z / 2,
                                                   falloffRange));

                _parameterParameters3.SetValue(new Vector4(
                                                   projectionAabb.Minimum.X,
                                                   projectionAabb.Minimum.Y,
                                                   projectionAabb.Minimum.Z,
                                                   light.Texture.Size));

                _parameterParameters4.SetValue(new Vector4(
                                                   projectionAabb.Maximum.X,
                                                   projectionAabb.Maximum.Y,
                                                   projectionAabb.Maximum.Z,
                                                   light.BlendMode));

                // Precomputed value for specular reflection lookup.
                const float sqrt3 = 1.7320508075688772935274463415059f;
                _parameterPrecomputedTerm.SetValue((float)Math.Log(light.Texture.Size * sqrt3, 2.0));

                _parameterEnvironmentMap.SetValue(light.Texture);

                // Compute screen space rectangle and FrustumFarCorners.
                var rectangle           = GraphicsHelper.GetViewportRectangle(cameraNode, viewport, lightNode);
                var texCoordTopLeft     = new Vector2F(rectangle.Left / (float)viewport.Width, rectangle.Top / (float)viewport.Height);
                var texCoordBottomRight = new Vector2F(rectangle.Right / (float)viewport.Width, rectangle.Bottom / (float)viewport.Height);
                GraphicsHelper.GetFrustumFarCorners(cameraNode.Camera.Projection, texCoordTopLeft, texCoordBottomRight, _frustumFarCorners);

                // Convert frustum far corners from view space to world space.
                for (int j = 0; j < _frustumFarCorners.Length; j++)
                {
                    _frustumFarCorners[j] = (Vector3)cameraPose.ToWorldDirection((Vector3F)_frustumFarCorners[j]);
                }

                _parameterFrustumCorners.SetValue(_frustumFarCorners);

                EffectPass passLight = null;
                if (enableDiffuse && enableSpecular)
                {
                    passLight = _passDiffuseAndSpecularLight;
                }
                else if (enableDiffuse)
                {
                    // TODO: Can we disable writes to LightBuffer1?
                    passLight = _passDiffuseLight;
                }
                else
                {
                    // TODO: Can we disable writes to LightBuffer0?
                    passLight = _passSpecularLight;
                }

                // Simply render fullscreen quad if we do not have a clip shape or a bounding box.
                if (lightNode.Clip == null && boundingBoxShape == null)
                {
                    graphicsDevice.BlendState = BlendState.AlphaBlend;

                    // Transform matrix transforms from world space with camera as origin to
                    // local space. The lightNode.Scale is already in the other parameters and not
                    // used in Transform.
                    var pose = lightNode.PoseWorld;
                    pose.Position -= cameraPose.Position;
                    _parameterTransform.SetValue(pose.Inverse);

                    passLight.Apply();
                    graphicsDevice.DrawFullScreenQuad();
                    continue;
                }

                // ----- Render clip mesh.
                graphicsDevice.DepthStencilState = GraphicsHelper.DepthStencilStateOnePassStencilFail;
                graphicsDevice.BlendState        = GraphicsHelper.BlendStateNoColorWrite;
                if (lightNode.Clip != null)
                {
                    // Using user-defined clip shape.
                    var data = lightNode.RenderData as LightRenderData;
                    if (data == null)
                    {
                        data = new LightRenderData();
                        lightNode.RenderData = data;
                    }

                    data.UpdateClipSubmesh(context.GraphicsService, lightNode);
                    _parameterTransform.SetValue((Matrix)data.ClipMatrix * viewProjection);
                    _passClip.Apply();
                    data.ClipSubmesh.Draw();

                    graphicsDevice.DepthStencilState = lightNode.InvertClip
            ? GraphicsHelper.DepthStencilStateStencilEqual0
            : GraphicsHelper.DepthStencilStateStencilNotEqual0;
                }
                else
                {
                    Debug.Assert(boundingBoxShape != null);

                    // Use box submesh.
                    if (_boxSubmesh == null)
                    {
                        _boxSubmesh = MeshHelper.GetBox(context.GraphicsService);
                    }

                    Matrix44F world = lightNode.PoseWorld
                                      * Matrix44F.CreateScale(lightNode.ScaleLocal * boundingBoxShape.Extent);
                    _parameterTransform.SetValue((Matrix)world * viewProjection);

                    _passClip.Apply();
                    _boxSubmesh.Draw();

                    graphicsDevice.DepthStencilState = GraphicsHelper.DepthStencilStateStencilNotEqual0;
                }

                graphicsDevice.BlendState = BlendState.AlphaBlend;

                {
                    // Transform matrix transforms from world space with camera as origin to
                    // local space. The lightNode.Scale is already in the other parameters and not
                    // used in Transform.
                    var pose = lightNode.PoseWorld;
                    pose.Position -= cameraPose.Position;
                    _parameterTransform.SetValue(pose.Inverse);
                }

                // ----- Render full screen quad.
                passLight.Apply();
                graphicsDevice.DrawQuad(rectangle);
            }

            savedRenderState.Restore();
            _jobs.Clear();
        }
Esempio n. 18
0
        //--------------------------------------------------------------
        #region Creation & Cleanup
        //--------------------------------------------------------------

        public VehicleObject(IServiceLocator services)
        {
            _services = services;
            Name      = "Vehicle";

            _inputService = _services.GetInstance <IInputService>();
            _simulation   = _services.GetInstance <Simulation>();

            // Load models for rendering.
            var contentManager = _services.GetInstance <ContentManager>();

            _vehicleModelNode   = contentManager.Load <ModelNode>("Car/Car").Clone();
            _wheelModelNodes    = new ModelNode[4];
            _wheelModelNodes[0] = contentManager.Load <ModelNode>("Car/Wheel").Clone();
            _wheelModelNodes[1] = _wheelModelNodes[0].Clone();
            _wheelModelNodes[2] = _wheelModelNodes[0].Clone();
            _wheelModelNodes[3] = _wheelModelNodes[0].Clone();

            // Add wheels under the car model node.
            _vehicleModelNode.Children.Add(_wheelModelNodes[0]);
            _vehicleModelNode.Children.Add(_wheelModelNodes[1]);
            _vehicleModelNode.Children.Add(_wheelModelNodes[2]);
            _vehicleModelNode.Children.Add(_wheelModelNodes[3]);

            // ----- Create the chassis of the car.
            // The Vehicle needs a rigid body that represents the chassis. This can be any shape (e.g.
            // a simple BoxShape). In this example we will build a convex polyhedron from the car model.

            // 1. Extract the vertices from the car model.
            // The car model has ~10,000 vertices. It consists of a MeshNode for the glass
            // parts and a MeshNode "Car" for the chassis.
            var meshNode = _vehicleModelNode.GetDescendants()
                           .OfType <MeshNode>()
                           .First(mn => mn.Name == "Car");
            var mesh = MeshHelper.ToTriangleMesh(meshNode.Mesh);

            // Apply the transformation of the mesh node.
            mesh.Transform(meshNode.PoseWorld * Matrix44F.CreateScale(meshNode.ScaleWorld));

            // 2. (Optional) Create simplified convex hull from mesh.
            // We could also skip this step and directly create a convex polyhedron from the mesh using
            //    var chassisShape = new ConvexPolyhedron(mesh.Vertices);
            // However, the convex polyhedron would still have 500-600 vertices.
            // We can reduce the number of vertices by using the GeometryHelper.
            // Create a convex hull for mesh with max. 64 vertices. Additional, shrink the hull by 4 cm.
            var convexHull = GeometryHelper.CreateConvexHull(mesh.Vertices, 64, -0.04f);

            // 3. Create convex polyhedron shape using the vertices of the convex hull.
            var chassisShape = new ConvexPolyhedron(convexHull.Vertices.Select(v => v.Position));

            // (Note: Building convex hulls and convex polyhedra are time-consuming. To save loading time
            // we should build the shape in the XNA content pipeline. See other DigitalRune Physics
            // Samples.)

            // The mass properties of the car. We use a mass of 800 kg.
            var mass = MassFrame.FromShapeAndMass(chassisShape, Vector3F.One, 800, 0.1f, 1);

            // Trick: We artificially modify the center of mass of the rigid body. Lowering the center
            // of mass makes the car more stable against rolling in tight curves.
            // We could also modify mass.Inertia for other effects.
            var pose = mass.Pose;

            pose.Position.Y -= 0.5f;  // Lower the center of mass.
            pose.Position.Z  = -0.5f; // The center should be below the driver.
            // (Note: The car model is not exactly centered.)
            mass.Pose = pose;

            // Material for the chassis.
            var material = new UniformMaterial
            {
                Restitution     = 0.1f,
                StaticFriction  = 0.2f,
                DynamicFriction = 0.2f
            };

            var chassis = new RigidBody(chassisShape, mass, material)
            {
                Pose     = new Pose(new Vector3F(0, 2, 0)), // Start position
                UserData = "NoDraw",                        // (Remove this line to render the collision model.)
            };

            // ----- Create the vehicle.
            Vehicle = new Vehicle(_simulation, chassis);

            // Add 4 wheels.
            Vehicle.Wheels.Add(new Wheel {
                Offset = new Vector3F(-0.9f, 0.6f, -2.0f), Radius = 0.36f, SuspensionRestLength = 0.55f, MinSuspensionLength = 0.25f, Friction = 2
            });                                                                                                                                                              // Front left
            Vehicle.Wheels.Add(new Wheel {
                Offset = new Vector3F(0.9f, 0.6f, -2.0f), Radius = 0.36f, SuspensionRestLength = 0.55f, MinSuspensionLength = 0.25f, Friction = 2
            });                                                                                                                                                              // Front right
            Vehicle.Wheels.Add(new Wheel {
                Offset = new Vector3F(-0.9f, 0.6f, 0.98f), Radius = 0.36f, SuspensionRestLength = 0.55f, MinSuspensionLength = 0.25f, Friction = 1.8f
            });                                                                                                                                                               // Back left
            Vehicle.Wheels.Add(new Wheel {
                Offset = new Vector3F(0.9f, 0.6f, 0.98f), Radius = 0.36f, SuspensionRestLength = 0.55f, MinSuspensionLength = 0.25f, Friction = 1.8f
            });                                                                                                                                                               // Back right

            // Vehicles are disabled per default. This way we can create the vehicle and the simulation
            // objects are only added when needed.
            Vehicle.Enabled = false;
        }
Esempio n. 19
0
        public override void Render(IList <SceneNode> nodes, RenderContext context, RenderOrder order)
        {
            if (nodes == null)
            {
                throw new ArgumentNullException("nodes");
            }
            if (context == null)
            {
                throw new ArgumentNullException("context");
            }

            int numberOfNodes = nodes.Count;

            if (numberOfNodes == 0)
            {
                return;
            }

            context.Validate(_effect);
            context.ThrowIfCameraMissing();

            var graphicsDevice   = _effect.GraphicsDevice;
            var savedRenderState = new RenderStateSnapshot(graphicsDevice);

            graphicsDevice.DepthStencilState = DepthStencilState.None;
            graphicsDevice.RasterizerState   = RasterizerState.CullNone;
            graphicsDevice.BlendState        = GraphicsHelper.BlendStateAdd;

            var viewport = graphicsDevice.Viewport;

            _parameterViewportSize.SetValue(new Vector2(viewport.Width, viewport.Height));
            _parameterGBuffer0.SetValue(context.GBuffer0);
            _parameterGBuffer1.SetValue(context.GBuffer1);

            var    cameraNode     = context.CameraNode;
            Pose   cameraPose     = cameraNode.PoseWorld;
            Matrix viewProjection = (Matrix)cameraNode.View * cameraNode.Camera.Projection;

            // Update SceneNode.LastFrame for all visible nodes.
            int frame = context.Frame;

            context.CameraNode.LastFrame = frame;

            var isHdrEnabled = context.IsHdrEnabled();

            for (int i = 0; i < numberOfNodes; i++)
            {
                var lightNode = nodes[i] as LightNode;
                if (lightNode == null)
                {
                    continue;
                }

                var light = lightNode.Light as PointLight;
                if (light == null)
                {
                    continue;
                }

                // LightNode is visible in current frame.
                lightNode.LastFrame = frame;

                float hdrScale = isHdrEnabled ? light.HdrScale : 1;
                _parameterDiffuseColor.SetValue((Vector3)light.Color * light.DiffuseIntensity * hdrScale);
                _parameterSpecularColor.SetValue((Vector3)light.Color * light.SpecularIntensity * hdrScale);

                Pose lightPose = lightNode.PoseWorld;

                bool hasShadow = (lightNode.Shadow != null && lightNode.Shadow.ShadowMask != null);
                if (hasShadow)
                {
                    switch (lightNode.Shadow.ShadowMaskChannel)
                    {
                    case 0: _parameterShadowMaskChannel.SetValue(new Vector4(1, 0, 0, 0)); break;

                    case 1: _parameterShadowMaskChannel.SetValue(new Vector4(0, 1, 0, 0)); break;

                    case 2: _parameterShadowMaskChannel.SetValue(new Vector4(0, 0, 1, 0)); break;

                    default: _parameterShadowMaskChannel.SetValue(new Vector4(0, 0, 0, 1)); break;
                    }

                    _parameterShadowMask.SetValue(lightNode.Shadow.ShadowMask);
                }

                _parameterPosition.SetValue((Vector3)(lightPose.Position - cameraPose.Position));
                _parameterRange.SetValue(light.Range);
                _parameterAttenuation.SetValue(light.Attenuation);

                bool hasTexture = (light.Texture != null);
                if (hasTexture)
                {
                    _parameterTexture.SetValue(light.Texture);

                    // Cube maps are left handed --> Sample with inverted z. (Otherwise, the
                    // cube map and objects or texts in it are mirrored.)
                    var mirrorZ = Matrix44F.CreateScale(1, 1, -1);
                    _parameterTextureMatrix.SetValue((Matrix)(mirrorZ * lightPose.Inverse));
                }

                var rectangle           = GraphicsHelper.GetViewportRectangle(cameraNode, viewport, lightPose.Position, light.Range);
                var texCoordTopLeft     = new Vector2F(rectangle.Left / (float)viewport.Width, rectangle.Top / (float)viewport.Height);
                var texCoordBottomRight = new Vector2F(rectangle.Right / (float)viewport.Width, rectangle.Bottom / (float)viewport.Height);
                GraphicsHelper.GetFrustumFarCorners(cameraNode.Camera.Projection, texCoordTopLeft, texCoordBottomRight, _frustumFarCorners);

                // Convert frustum far corners from view space to world space.
                for (int j = 0; j < _frustumFarCorners.Length; j++)
                {
                    _frustumFarCorners[j] = (Vector3)cameraPose.ToWorldDirection((Vector3F)_frustumFarCorners[j]);
                }

                _parameterFrustumCorners.SetValue(_frustumFarCorners);

                if (lightNode.Clip != null)
                {
                    var data = lightNode.RenderData as LightRenderData;
                    if (data == null)
                    {
                        data = new LightRenderData();
                        lightNode.RenderData = data;
                    }

                    data.UpdateClipSubmesh(context.GraphicsService, lightNode);

                    graphicsDevice.DepthStencilState = GraphicsHelper.DepthStencilStateOnePassStencilFail;
                    graphicsDevice.BlendState        = GraphicsHelper.BlendStateNoColorWrite;

                    _parameterWorldViewProjection.SetValue((Matrix)data.ClipMatrix * viewProjection);
                    _passClip.Apply();
                    data.ClipSubmesh.Draw();

                    graphicsDevice.DepthStencilState = lightNode.InvertClip
            ? GraphicsHelper.DepthStencilStateStencilEqual0
            : GraphicsHelper.DepthStencilStateStencilNotEqual0;
                    graphicsDevice.BlendState = GraphicsHelper.BlendStateAdd;
                }
                else
                {
                    graphicsDevice.DepthStencilState = DepthStencilState.None;
                }

                if (hasShadow)
                {
                    if (hasTexture)
                    {
                        if (light.Texture.Format == SurfaceFormat.Alpha8)
                        {
                            _passShadowedTexturedAlpha.Apply();
                        }
                        else
                        {
                            _passShadowedTexturedRgb.Apply();
                        }
                    }
                    else
                    {
                        _passShadowed.Apply();
                    }
                }
                else
                {
                    if (hasTexture)
                    {
                        if (light.Texture.Format == SurfaceFormat.Alpha8)
                        {
                            _passTexturedAlpha.Apply();
                        }
                        else
                        {
                            _passTexturedRgb.Apply();
                        }
                    }
                    else
                    {
                        _passDefault.Apply();
                    }
                }

                graphicsDevice.DrawQuad(rectangle);
            }

            savedRenderState.Restore();
        }
        //--------------------------------------------------------------
        #region Methods
        //--------------------------------------------------------------

        /// <summary>
        /// Computes the intersection of <see cref="MeshNode"/>s.
        /// </summary>
        /// <param name="meshNodePairs">
        /// A collection of <see cref="MeshNode"/> pairs.The renderer computes the intersection volume
        /// of each pair.
        /// </param>
        /// <param name="color">The diffuse color used for the intersection.</param>
        /// <param name="alpha">The opacity of the intersection.</param>
        /// <param name="maxConvexity">
        /// The maximum convexity of the submeshes. A convex mesh has a convexity of 1. A concave mesh
        /// has a convexity greater than 1. Convexity is the number of layers required for depth peeling
        /// (= the number of front face layers when looking at the object).
        /// </param>
        /// <param name="context">The render context.</param>
        /// <remarks>
        /// <para>
        /// This method renders an off-screen image (color and depth) of the intersection volume. This
        /// operation destroys the currently set render target and depth/stencil buffer.
        /// </para>
        /// </remarks>
        /// <exception cref="ObjectDisposedException">
        /// The <see cref="IntersectionRenderer"/> has already been disposed.
        /// </exception>
        /// <exception cref="ArgumentNullException">
        /// <paramref name="meshNodePairs"/> or <see cref="context"/> is
        /// <see langword="null"/>.
        /// </exception>
        /// <exception cref="ArgumentOutOfRangeException">
        /// The convexity must be greater than 0.
        /// </exception>
        /// <exception cref="GraphicsException">
        /// Invalid render context: Graphics service is not set.
        /// </exception>
        /// <exception cref="GraphicsException">
        /// Invalid render context: Wrong graphics device.
        /// </exception>
        /// <exception cref="GraphicsException">
        /// Invalid render context: Scene is not set.
        /// </exception>
        /// <exception cref="GraphicsException">
        /// Invalid render context: Camera node needs to be set in render context.
        /// </exception>
        public void ComputeIntersection(IEnumerable <Pair <MeshNode> > meshNodePairs,
                                        Vector3F color, float alpha, float maxConvexity, RenderContext context)
        {
            if (_isDisposed)
            {
                throw new ObjectDisposedException("IntersectionRenderer has already been disposed.");
            }
            if (meshNodePairs == null)
            {
                throw new ArgumentNullException("meshNodePairs");
            }
            if (maxConvexity < 1)
            {
                throw new ArgumentOutOfRangeException("maxConvexity", "The max convexity must be greater than 0.");
            }
            if (context == null)
            {
                throw new ArgumentNullException("context");
            }
            if (context.GraphicsService == null)
            {
                throw new GraphicsException("Invalid render context: Graphics service is not set.");
            }
            if (_graphicsService != context.GraphicsService)
            {
                throw new GraphicsException("Invalid render context: Wrong graphics service.");
            }
            if (context.CameraNode == null)
            {
                throw new GraphicsException("Camera node needs to be set in render context.");
            }
            if (context.Scene == null)
            {
                throw new GraphicsException("A scene needs to be set in the render context.");
            }

            // Create 2 ordered pairs for each unordered pair.
            _pairs.Clear();
            foreach (var pair in meshNodePairs)
            {
                if (pair.First == null || pair.Second == null)
                {
                    continue;
                }

                // Frustum culling.
                if (!context.Scene.HaveContact(pair.First, context.CameraNode))
                {
                    continue;
                }
                if (!context.Scene.HaveContact(pair.Second, context.CameraNode))
                {
                    continue;
                }

                _pairs.Add(new Pair <MeshNode, MeshNode>(pair.First, pair.Second));
                _pairs.Add(new Pair <MeshNode, MeshNode>(pair.Second, pair.First));
            }

            var renderTargetPool = _graphicsService.RenderTargetPool;

            if (_pairs.Count == 0)
            {
                renderTargetPool.Recycle(_intersectionImage);
                _intersectionImage = null;
                return;
            }

            // Color and alpha are applied in RenderIntersection().
            _color = color;
            _alpha = alpha;

            var graphicsDevice = _graphicsService.GraphicsDevice;

            // Save original render states.
            var originalBlendState        = graphicsDevice.BlendState;
            var originalDepthStencilState = graphicsDevice.DepthStencilState;
            var originalRasterizerState   = graphicsDevice.RasterizerState;
            var originalScissorRectangle  = graphicsDevice.ScissorRectangle;

            // Get offscreen render targets.
            var viewport = context.Viewport;

            viewport.X      = 0;
            viewport.Y      = 0;
            viewport.Width  = (int)(viewport.Width / DownsampleFactor);
            viewport.Height = (int)(viewport.Height / DownsampleFactor);
            var renderTargetFormat = new RenderTargetFormat(viewport.Width, viewport.Height, false, SurfaceFormat.Color, DepthFormat.Depth24Stencil8);

            // Try to reuse any existing render targets.
            // (Usually they are recycled in RenderIntersection()).
            var currentScene = _intersectionImage;

            if (currentScene == null || !renderTargetFormat.IsCompatibleWith(currentScene))
            {
                currentScene.SafeDispose();
                currentScene = renderTargetPool.Obtain2D(renderTargetFormat);
            }
            var lastScene = renderTargetPool.Obtain2D(renderTargetFormat);

            // Set shared effect parameters.
            var cameraNode = context.CameraNode;
            var view       = (Matrix)cameraNode.View;
            var projection = cameraNode.Camera.Projection;
            var near       = projection.Near;
            var far        = projection.Far;

            _parameterViewportSize.SetValue(new Vector2(viewport.Width, viewport.Height));

            // The DepthEpsilon has to be tuned if depth peeling does not work because
            // of numerical problems equality z comparisons.
            _parameterCameraParameters.SetValue(new Vector3(near, far - near, 0.0000001f));
            _parameterView.SetValue(view);
            _parameterProjection.SetValue((Matrix)projection);

            var defaultTexture = _graphicsService.GetDefaultTexture2DBlack();

            // Handle all pairs.
            bool isFirstPass = true;

            while (true)
            {
                // Find a mesh node A and all mesh nodes to which it needs to be clipped.
                MeshNode meshNodeA = null;
                _partners.Clear();
                for (int i = 0; i < _pairs.Count; i++)
                {
                    var pair = _pairs[i];

                    if (pair.First == null)
                    {
                        continue;
                    }

                    if (meshNodeA == null)
                    {
                        meshNodeA = pair.First;
                    }

                    if (pair.First == meshNodeA)
                    {
                        _partners.Add(pair.Second);

                        //  Remove this pair.
                        _pairs[i] = new Pair <MeshNode, MeshNode>();
                    }
                }

                // Abort if we have handled all pairs.
                if (meshNodeA == null)
                {
                    break;
                }

                var worldTransformA = (Matrix)(meshNodeA.PoseWorld * Matrix44F.CreateScale(meshNodeA.ScaleWorld));

                if (EnableScissorTest)
                {
                    // Scissor rectangle of A.
                    var scissorA = GraphicsHelper.GetScissorRectangle(context.CameraNode, viewport, meshNodeA);

                    // Union of scissor rectangles of partners.
                    Rectangle partnerRectangle = GraphicsHelper.GetScissorRectangle(context.CameraNode, viewport, _partners[0]);
                    for (int i = 1; i < _partners.Count; i++)
                    {
                        var a = GraphicsHelper.GetScissorRectangle(context.CameraNode, viewport, _partners[i]);
                        partnerRectangle = Rectangle.Union(partnerRectangle, a);
                    }

                    // Use intersection of A and partners.
                    graphicsDevice.ScissorRectangle = Rectangle.Intersect(scissorA, partnerRectangle);

                    // We store the union of all scissor rectangles for use in RenderIntersection().
                    if (isFirstPass)
                    {
                        _totalScissorRectangle = graphicsDevice.ScissorRectangle;
                    }
                    else
                    {
                        _totalScissorRectangle = Rectangle.Union(_totalScissorRectangle, graphicsDevice.ScissorRectangle);
                    }
                }

                // Depth peeling of A.
                for (int layer = 0; layer < maxConvexity; layer++)
                {
                    // Set and clear render target.
                    graphicsDevice.SetRenderTarget(currentScene);
                    graphicsDevice.Clear(new Color(1, 1, 1, 0)); // RGB = "a large depth", A = "empty area"

                    // Render a depth layer of A.
                    graphicsDevice.DepthStencilState = DepthStencilStateWriteLess;
                    graphicsDevice.BlendState        = BlendState.Opaque;
                    graphicsDevice.RasterizerState   = EnableScissorTest ? CullCounterClockwiseScissor : RasterizerState.CullCounterClockwise;
                    _parameterWorld.SetValue(worldTransformA);
                    _parameterTexture.SetValue((layer == 0) ? defaultTexture : lastScene);
                    _passPeel.Apply();
                    foreach (var submesh in meshNodeA.Mesh.Submeshes)
                    {
                        submesh.Draw();
                    }

                    // Render partners to set stencil.
                    graphicsDevice.DepthStencilState = DepthStencilStateOnePassStencilFail;
                    graphicsDevice.BlendState        = BlendStateNoWrite;
                    graphicsDevice.RasterizerState   = EnableScissorTest ? CullNoneScissor : RasterizerState.CullNone;
                    foreach (var partner in _partners)
                    {
                        _parameterWorld.SetValue((Matrix)(partner.PoseWorld * Matrix44F.CreateScale(partner.ScaleWorld)));
                        _passMark.Apply();
                        foreach (var submesh in partner.Mesh.Submeshes)
                        {
                            submesh.Draw();
                        }
                    }

                    // Clear depth buffer. Leave stencil buffer unchanged.
                    graphicsDevice.Clear(ClearOptions.DepthBuffer, new Color(0, 1, 0), 1, 0);

                    // Render A to compute lighting.
                    graphicsDevice.DepthStencilState = DepthStencilStateStencilNotEqual0;
                    graphicsDevice.BlendState        = BlendState.Opaque;
                    graphicsDevice.RasterizerState   = EnableScissorTest ? CullCounterClockwiseScissor :  RasterizerState.CullCounterClockwise;
                    _parameterWorld.SetValue(worldTransformA);
                    _passDraw.Apply();
                    foreach (var submesh in meshNodeA.Mesh.Submeshes)
                    {
                        submesh.Draw();
                    }

                    // Combine last intersection image with current.
                    if (!isFirstPass)
                    {
                        graphicsDevice.DepthStencilState = DepthStencilState.DepthRead;
                        graphicsDevice.BlendState        = BlendState.Opaque;
                        graphicsDevice.RasterizerState   = EnableScissorTest ? CullNoneScissor : RasterizerState.CullNone;
                        _parameterTexture.SetValue(lastScene);
                        _passCombine.Apply();
                        graphicsDevice.DrawFullScreenQuad();
                    }

                    isFirstPass = false;

                    // ----- Swap render targets.
                    MathHelper.Swap(ref lastScene, ref currentScene);
                }
            }

            // Store final images for RenderIntersection.
            _intersectionImage = lastScene;

            // Scale scissor rectangle back to full-screen resolution.
            if (DownsampleFactor > 1)
            {
                _totalScissorRectangle.X      = (int)(_totalScissorRectangle.X * DownsampleFactor);
                _totalScissorRectangle.Y      = (int)(_totalScissorRectangle.Y * DownsampleFactor);
                _totalScissorRectangle.Width  = (int)(_totalScissorRectangle.Width * DownsampleFactor);
                _totalScissorRectangle.Height = (int)(_totalScissorRectangle.Height * DownsampleFactor);
            }


            // Restore original render state.
            graphicsDevice.BlendState        = originalBlendState ?? BlendState.Opaque;
            graphicsDevice.DepthStencilState = originalDepthStencilState ?? DepthStencilState.Default;
            graphicsDevice.RasterizerState   = originalRasterizerState ?? RasterizerState.CullCounterClockwise;
            graphicsDevice.ScissorRectangle  = originalScissorRectangle;

            renderTargetPool.Recycle(currentScene);
            _partners.Clear();
            _pairs.Clear();
        }
Esempio n. 21
0
        internal static void GetMass(Shape shape, Vector3F scale, float densityOrMass, bool isDensity, float relativeDistanceThreshold, int iterationLimit,
                                     out float mass, out Vector3F centerOfMass, out Matrix33F inertia)
        {
            if (shape == null)
            {
                throw new ArgumentNullException("shape");
            }
            if (densityOrMass <= 0)
            {
                throw new ArgumentOutOfRangeException("densityOrMass", "The density or mass must be greater than 0.");
            }
            if (relativeDistanceThreshold < 0)
            {
                throw new ArgumentOutOfRangeException("relativeDistanceThreshold", "The relative distance threshold must not be negative.");
            }

            mass         = 0;
            centerOfMass = Vector3F.Zero;
            inertia      = Matrix33F.Zero;

            // Note: We support all shape types of DigitalRune Geometry.
            // To support user-defined shapes we could add an interface IMassSource which can be
            // implemented by shapes. In the else-case below we can check whether the shape implements
            // the interface.
            if (shape is EmptyShape)
            {
                return;
            }
            else if (shape is InfiniteShape)
            {
                mass    = float.PositiveInfinity;
                inertia = Matrix33F.CreateScale(float.PositiveInfinity);
            }
            else if (shape is BoxShape)
            {
                GetMass((BoxShape)shape, scale, densityOrMass, isDensity, out mass, out inertia);
            }
            else if (shape is CapsuleShape)
            {
                GetMass((CapsuleShape)shape, scale, densityOrMass, isDensity, out mass, out inertia);
            }
            else if (shape is ConeShape)
            {
                GetMass((ConeShape)shape, scale, densityOrMass, isDensity, out mass, out centerOfMass, out inertia);
            }
            else if (shape is CylinderShape)
            {
                GetMass((CylinderShape)shape, scale, densityOrMass, isDensity, out mass, out inertia);
            }
            else if (shape is ScaledConvexShape)
            {
                var scaledConvex = (ScaledConvexShape)shape;
                GetMass(scaledConvex.Shape, scale * scaledConvex.Scale, densityOrMass, isDensity, relativeDistanceThreshold, iterationLimit, out mass, out centerOfMass, out inertia);
            }
            else if (shape is SphereShape)
            {
                GetMass((SphereShape)shape, scale, densityOrMass, isDensity, out mass, out inertia);
            }
            else if (shape is TransformedShape)
            {
                var transformed = (TransformedShape)shape;

                // Call GetMass for the contained GeometricObject.
                GetMass(transformed.Child, scale, densityOrMass, isDensity, relativeDistanceThreshold, iterationLimit, out mass, out centerOfMass, out inertia);
            }
            else if (shape is HeightField)
            {
                // Height fields should always be static. Therefore, they we can treat them as having
                // infinite or zero mass.
                return;
            }
            else if (shape is CompositeShape)
            {
                var   composite = (CompositeShape)shape;
                float density   = (isDensity) ? densityOrMass : 1;
                foreach (var child in composite.Children)
                {
                    // Call GetMass for the child geometric object.
                    float     childMass;
                    Vector3F  childCenterOfMass;
                    Matrix33F childInertia;
                    GetMass(child, scale, density, true, relativeDistanceThreshold, iterationLimit, out childMass, out childCenterOfMass, out childInertia);

                    // Add child mass to total mass.
                    mass = mass + childMass;

                    // Add child inertia to total inertia and consider the translation.
                    inertia += GetTranslatedMassInertia(childMass, childInertia, childCenterOfMass);

                    // Add weighted centerOfMass.
                    centerOfMass = centerOfMass + childCenterOfMass * childMass;
                }

                // centerOfMass must be divided by total mass because child center of mass were weighted
                // with the child masses.
                centerOfMass /= mass;

                // Make inertia relative to center of mass.
                inertia = GetUntranslatedMassInertia(mass, inertia, centerOfMass);

                if (!isDensity)
                {
                    // Yet, we have not computed the correct total mass. We have to adjust the total mass to
                    // be equal to the given target mass.
                    AdjustMass(densityOrMass, ref mass, ref inertia);
                }
            }
            else if (iterationLimit <= 0)
            {
                // We do not have a special formula for this kind of shape and iteration limit is 0 or less.
                // --> Use mass properties of AABB.
                var aabb   = shape.GetAabb(scale, Pose.Identity);
                var extent = aabb.Extent;
                centerOfMass = aabb.Center;
                GetMass(extent, densityOrMass, isDensity, out mass, out inertia);
            }
            else
            {
                // We do not have a special formula for this kind of shape.
                // --> General polyhedron mass from triangle mesh.
                var mesh = shape.GetMesh(relativeDistanceThreshold, iterationLimit);
                mesh.Transform(Matrix44F.CreateScale(scale));
                GetMass(mesh, out mass, out centerOfMass, out inertia);

                // Mass was computed for density = 1. --> Scale mass.
                if (isDensity)
                {
                    var volume     = mesh.GetVolume();
                    var targetMass = volume * densityOrMass;
                    AdjustMass(targetMass, ref mass, ref inertia);
                }
                else
                {
                    AdjustMass(densityOrMass, ref mass, ref inertia);
                }

                if (Numeric.IsLessOrEqual(mass, 0))
                {
                    // If the mass is not valid, we fall back to the AABB mass.
                    // This can happen for non-closed meshes that have a "negative" volume.
                    GetMass(shape, scale, densityOrMass, isDensity, relativeDistanceThreshold, -1, out mass, out centerOfMass, out inertia);
                    return;
                }
            }
        }
Esempio n. 22
0
        private void RenderHiDef(SkyboxNode node, RenderContext context)
        {
            var graphicsDevice = context.GraphicsService.GraphicsDevice;

            var savedRenderState = new RenderStateSnapshot(graphicsDevice);

            graphicsDevice.RasterizerState   = RasterizerState.CullNone;
            graphicsDevice.DepthStencilState = DepthStencilState.DepthRead;
            graphicsDevice.BlendState        = node.EnableAlphaBlending ? BlendState.AlphaBlend : BlendState.Opaque;

            bool sourceIsFloatingPoint = TextureHelper.IsFloatingPointFormat(node.Texture.Format);

            // Set sampler state. (Floating-point textures cannot use linear filtering. (XNA would throw an exception.))
            if (sourceIsFloatingPoint)
            {
                graphicsDevice.SamplerStates[0] = SamplerState.PointClamp;
            }
            else
            {
                graphicsDevice.SamplerStates[0] = SamplerState.LinearClamp;
            }

            var       cameraNode = context.CameraNode;
            Matrix44F view       = cameraNode.View;
            Matrix44F projection = cameraNode.Camera.Projection;

            // Cube maps are left handed --> Sample with inverted z. (Otherwise, the
            // cube map and objects or texts in it are mirrored.)
            var       mirrorZ     = Matrix44F.CreateScale(1, 1, -1);
            Matrix33F orientation = node.PoseWorld.Orientation;

            _parameterWorldViewProjection.SetValue((Matrix)(projection * view * new Matrix44F(orientation, Vector3F.Zero) * mirrorZ));

            Vector4 color = node.EnableAlphaBlending
                      ? new Vector4((Vector3)node.Color * node.Alpha, node.Alpha) // Premultiplied
                      : new Vector4((Vector3)node.Color, 1);                      // Opaque

            _parameterColor.SetValue(color);
            _textureParameter.SetValue(node.Texture);

            if (node.Encoding is RgbEncoding)
            {
                _parameterTextureSize.SetValue(node.Texture.Size);
                if (context.IsHdrEnabled())
                {
                    _passRgbToRgb.Apply();
                }
                else
                {
                    _passRgbToSRgb.Apply();
                }
            }
            else if (node.Encoding is SRgbEncoding)
            {
                if (!sourceIsFloatingPoint)
                {
                    if (context.IsHdrEnabled())
                    {
                        _passSRgbToRgb.Apply();
                    }
                    else
                    {
                        _passSRgbToSRgb.Apply();
                    }
                }
                else
                {
                    throw new GraphicsException("sRGB encoded skybox cube maps must not use a floating point format.");
                }
            }
            else if (node.Encoding is RgbmEncoding)
            {
                float max = GraphicsHelper.ToGamma(((RgbmEncoding)node.Encoding).Max);
                _parameterRgbmMaxValue.SetValue(max);

                if (context.IsHdrEnabled())
                {
                    _passRgbmToRgb.Apply();
                }
                else
                {
                    _passRgbmToSRgb.Apply();
                }
            }
            else
            {
                throw new NotSupportedException("The SkyBoxRenderer supports only RgbEncoding, SRgbEncoding and RgbmEncoding.");
            }

            _submesh.Draw();
            savedRenderState.Restore();
        }
Esempio n. 23
0
        private void Stroke(FigureNode node, ArrayList <Vector3F> strokeVertices, ArrayList <int> strokeIndices)
        {
            if (_mode != RenderMode.Stroke)
            {
                Flush();
                _strokeEffect.CurrentTechnique.Passes[0].Apply();
                _mode = RenderMode.Stroke;
            }

            // Use cached vertex buffer if available.
            var nodeRenderData = node.RenderData as FigureNodeRenderData;

            if (nodeRenderData != null && nodeRenderData.IsValid)
            {
                Flush();
                var graphicsDevice = _graphicsService.GraphicsDevice;
                graphicsDevice.SetVertexBuffer(nodeRenderData.StrokeVertexBuffer);
                graphicsDevice.Indices = nodeRenderData.StrokeIndexBuffer;
                int primitiveCount = nodeRenderData.StrokeIndexBuffer.IndexCount / 3;
#if MONOGAME
                graphicsDevice.DrawIndexedPrimitives(PrimitiveType.TriangleList, 0, 0, primitiveCount);
#else
                int vertexCount = nodeRenderData.StrokeVertexBuffer.VertexCount;
                graphicsDevice.DrawIndexedPrimitives(PrimitiveType.TriangleList, 0, 0, vertexCount, 0, primitiveCount);
#endif
                return;
            }

            var batchVertices = _strokeBatch.Vertices;

            var world     = node.PoseWorld * Matrix44F.CreateScale(node.ScaleWorld);
            var worldView = _view * world;

            var  thickness       = node.StrokeThickness;
            var  color3F         = node.StrokeColor * node.StrokeAlpha;
            var  color           = new HalfVector4(color3F.X, color3F.Y, color3F.Z, node.StrokeAlpha);
            var  dash            = node.StrokeDashPattern * node.StrokeThickness;
            bool usesDashPattern = (dash.Y + dash.Z) != 0;
            var  dashSum         = new HalfVector4(
                dash.X,
                dash.X + dash.Y,
                dash.X + dash.Y + dash.Z,
                dash.X + dash.Y + dash.Z + dash.W);

            // Convert to vertices.
            float    lastDistance  = 0;
            Vector3F lastPosition  = new Vector3F(float.NaN);
            Vector3F lastWorld     = new Vector3F();
            Vector3F lastView      = new Vector3F();
            Vector3F lastProjected = new Vector3F();

            var data0 = new HalfVector4(0, 1, thickness, 0);
            var data1 = new HalfVector4(0, 0, thickness, 0);
            var data2 = new HalfVector4(1, 0, thickness, 0);
            var data3 = new HalfVector4(1, 1, thickness, 0);

            Vector3F[] figurePoints         = strokeVertices.Array;
            int[]      figureIndices        = strokeIndices.Array;
            int        numberOfLineSegments = strokeIndices.Count / 2;

            for (int i = 0; i < numberOfLineSegments; i++)
            {
                var startIndex = figureIndices[i * 2 + 0];
                var endIndex   = figureIndices[i * 2 + 1];
                var start      = figurePoints[startIndex];
                var end        = figurePoints[endIndex];

                var notConnectedWithLast = start != lastPosition;
                lastPosition = end;

                Vector3F startWorld = notConnectedWithLast ? world.TransformPosition(start) : lastWorld;
                Vector3F endWorld   = world.TransformPosition(end);
                lastWorld = endWorld;

                // Compute start/end distances of lines from beginning of line strip
                // for dash patterns.
                float startDistance = 0;
                float endDistance   = 1;
                if (usesDashPattern)
                {
                    if (!node.DashInWorldSpace)
                    {
                        Vector3F startView = notConnectedWithLast ? worldView.TransformPosition(start) : lastView;
                        var      endView   = worldView.TransformPosition(end);
                        lastView = endView;

                        // Clip to near plane - otherwise lines which end near the camera origin
                        // (where planar z == 0) will disappear. (Projection singularity!)
                        float deltaZ = Math.Abs(startView.Z - endView.Z);
                        float pStart = MathHelper.Clamp((startView.Z - (-_cameraNear)) / deltaZ, 0, 1);
                        startView = InterpolationHelper.Lerp(startView, endView, pStart);
                        float pEnd = MathHelper.Clamp((endView.Z - (-_cameraNear)) / deltaZ, 0, 1);
                        endView = InterpolationHelper.Lerp(endView, startView, pEnd);

                        Vector3F startProjected;
                        if (notConnectedWithLast)
                        {
                            lastDistance   = 0;
                            startProjected = _viewport.ProjectToViewport(startView, _projection);
                        }
                        else
                        {
                            startProjected = lastProjected;
                        }
                        var endProjected = _viewport.ProjectToViewport(endView, _projection);
                        lastProjected = endProjected;

                        startDistance = lastDistance;
                        endDistance   = startDistance + (endProjected - startProjected).Length;
                        lastDistance  = endDistance;
                    }
                    else
                    {
                        if (notConnectedWithLast)
                        {
                            lastDistance = 0;
                        }

                        startDistance = lastDistance;
                        endDistance   = startDistance + (endWorld - startWorld).Length;
                        lastDistance  = endDistance;

                        // The shader needs to know that DashInWorldSpace is true. To avoid
                        // effect parameter changes, we store the value in the sign of the distance!
                        startDistance = -startDistance;
                        endDistance   = -endDistance;
                    }
                }

                var s = new Vector4(startWorld.X, startWorld.Y, startWorld.Z, startDistance);
                var e = new Vector4(endWorld.X, endWorld.Y, endWorld.Z, endDistance);

                int index, dummy;
                _strokeBatch.Submit(PrimitiveType.TriangleList, 4, 6, out index, out dummy);

                batchVertices[index + 0].Start = s;
                batchVertices[index + 0].End   = e;
                batchVertices[index + 0].Data  = data0;
                batchVertices[index + 0].Color = color;
                batchVertices[index + 0].Dash  = dashSum;

                batchVertices[index + 1].Start = s;
                batchVertices[index + 1].End   = e;
                batchVertices[index + 1].Data  = data1;
                batchVertices[index + 1].Color = color;
                batchVertices[index + 1].Dash  = dashSum;

                batchVertices[index + 2].Start = s;
                batchVertices[index + 2].End   = e;
                batchVertices[index + 2].Data  = data2;
                batchVertices[index + 2].Color = color;
                batchVertices[index + 2].Dash  = dashSum;

                batchVertices[index + 3].Start = s;
                batchVertices[index + 3].End   = e;
                batchVertices[index + 3].Data  = data3;
                batchVertices[index + 3].Color = color;
                batchVertices[index + 3].Dash  = dashSum;
            }
        }
Esempio n. 24
0
        public override void Render(IList <SceneNode> nodes, RenderContext context, RenderOrder order)
        {
            if (nodes == null)
            {
                throw new ArgumentNullException("nodes");
            }
            if (context == null)
            {
                throw new ArgumentNullException("context");
            }

            int numberOfNodes = nodes.Count;

            if (numberOfNodes == 0)
            {
                return;
            }

            context.Validate(_effect);
            context.ThrowIfCameraMissing();

            var graphicsDevice   = _effect.GraphicsDevice;
            var savedRenderState = new RenderStateSnapshot(graphicsDevice);

            graphicsDevice.DepthStencilState = DepthStencilState.None;
            graphicsDevice.RasterizerState   = RasterizerState.CullNone;
            graphicsDevice.BlendState        = GraphicsHelper.BlendStateAdd;

            var viewport = graphicsDevice.Viewport;

            _parameterViewportSize.SetValue(new Vector2(viewport.Width, viewport.Height));
            _parameterGBuffer0.SetValue(context.GBuffer0);
            _parameterGBuffer1.SetValue(context.GBuffer1);

            var    cameraNode     = context.CameraNode;
            Matrix viewProjection = (Matrix)cameraNode.View * cameraNode.Camera.Projection;

            var cameraPose = cameraNode.PoseWorld;

            GraphicsHelper.GetFrustumFarCorners(cameraNode.Camera.Projection, _cameraFrustumFarCorners);

            // Convert frustum far corners from view space to world space.
            for (int i = 0; i < _cameraFrustumFarCorners.Length; i++)
            {
                _cameraFrustumFarCorners[i] = (Vector3)cameraPose.ToWorldDirection((Vector3F)_cameraFrustumFarCorners[i]);
            }

            _parameterFrustumCorners.SetValue(_cameraFrustumFarCorners);

            // Update SceneNode.LastFrame for all visible nodes.
            int frame = context.Frame;

            cameraNode.LastFrame = frame;

            var isHdrEnabled = context.IsHdrEnabled();

            for (int i = 0; i < numberOfNodes; i++)
            {
                var lightNode = nodes[i] as LightNode;
                if (lightNode == null)
                {
                    continue;
                }

                var light = lightNode.Light as DirectionalLight;
                if (light == null)
                {
                    continue;
                }

                // LightNode is visible in current frame.
                lightNode.LastFrame = frame;

                float hdrScale = isHdrEnabled ? light.HdrScale : 1;
                _parameterDiffuseColor.SetValue((Vector3)light.Color * light.DiffuseIntensity * hdrScale);
                _parameterSpecularColor.SetValue((Vector3)light.Color * light.SpecularIntensity * hdrScale);

                Pose     lightPose           = lightNode.PoseWorld;
                Vector3F lightDirectionWorld = lightPose.ToWorldDirection(Vector3F.Forward);
                _parameterLightDirection.SetValue((Vector3)lightDirectionWorld);

                bool hasShadow = (lightNode.Shadow != null && lightNode.Shadow.ShadowMask != null);
                if (hasShadow)
                {
                    switch (lightNode.Shadow.ShadowMaskChannel)
                    {
                    case 0:  _parameterShadowMaskChannel.SetValue(new Vector4(1, 0, 0, 0)); break;

                    case 1:  _parameterShadowMaskChannel.SetValue(new Vector4(0, 1, 0, 0)); break;

                    case 2:  _parameterShadowMaskChannel.SetValue(new Vector4(0, 0, 1, 0)); break;

                    default: _parameterShadowMaskChannel.SetValue(new Vector4(0, 0, 0, 1)); break;
                    }

                    _parameterShadowMask.SetValue(lightNode.Shadow.ShadowMask);
                }

                bool hasTexture = (light.Texture != null);
                if (hasTexture)
                {
                    var textureProjection = Matrix44F.CreateOrthographicOffCenter(
                        -light.TextureOffset.X,
                        -light.TextureOffset.X + Math.Abs(light.TextureScale.X),
                        light.TextureOffset.Y,
                        light.TextureOffset.Y + Math.Abs(light.TextureScale.Y),
                        1,  // Not relevant
                        2); // Not relevant.
                    var scale = Matrix44F.CreateScale(Math.Sign(light.TextureScale.X), Math.Sign(light.TextureScale.Y), 1);

                    _parameterTextureMatrix.SetValue((Matrix)(GraphicsHelper.ProjectorBiasMatrix * scale * textureProjection * lightPose.Inverse));

                    _parameterTexture.SetValue(light.Texture);
                }

                if (lightNode.Clip != null)
                {
                    var data = lightNode.RenderData as LightRenderData;
                    if (data == null)
                    {
                        data = new LightRenderData();
                        lightNode.RenderData = data;
                    }

                    data.UpdateClipSubmesh(context.GraphicsService, lightNode);

                    graphicsDevice.DepthStencilState = GraphicsHelper.DepthStencilStateOnePassStencilFail;
                    graphicsDevice.BlendState        = GraphicsHelper.BlendStateNoColorWrite;

                    _parameterWorldViewProjection.SetValue((Matrix)data.ClipMatrix * viewProjection);
                    _passClip.Apply();
                    data.ClipSubmesh.Draw();

                    graphicsDevice.DepthStencilState = lightNode.InvertClip
            ? GraphicsHelper.DepthStencilStateStencilEqual0
            : GraphicsHelper.DepthStencilStateStencilNotEqual0;
                    graphicsDevice.BlendState = GraphicsHelper.BlendStateAdd;
                }
                else
                {
                    graphicsDevice.DepthStencilState = DepthStencilState.None;
                }

                if (hasShadow)
                {
                    if (hasTexture)
                    {
                        if (light.Texture.Format == SurfaceFormat.Alpha8)
                        {
                            _passShadowedTexturedAlpha.Apply();
                        }
                        else
                        {
                            _passShadowedTexturedRgb.Apply();
                        }
                    }
                    else
                    {
                        _passShadowed.Apply();
                    }
                }
                else
                {
                    if (hasTexture)
                    {
                        if (light.Texture.Format == SurfaceFormat.Alpha8)
                        {
                            _passTexturedAlpha.Apply();
                        }
                        else
                        {
                            _passTexturedRgb.Apply();
                        }
                    }
                    else
                    {
                        _passDefault.Apply();
                    }
                }

                graphicsDevice.DrawFullScreenQuad();
            }

            savedRenderState.Restore();
        }
        /// <inheritdoc/>
        public override void Render(IList <SceneNode> nodes, RenderContext context, RenderOrder order)
        {
            if (nodes == null)
            {
                throw new ArgumentNullException("nodes");
            }
            if (context == null)
            {
                throw new ArgumentNullException("context");
            }
            if (order != RenderOrder.UserDefined)
            {
                throw new NotImplementedException("Render order must be 'UserDefined'.");
            }
            if (context.CameraNode == null)
            {
                throw new GraphicsException("Camera node needs to be set in render context.");
            }
            if (context.GBuffer0 == null)
            {
                throw new GraphicsException("GBuffer0 needs to be set in render context.");
            }

            int numberOfNodes = nodes.Count;

            if (numberOfNodes == 0)
            {
                return;
            }

            var   graphicsService  = context.GraphicsService;
            var   graphicsDevice   = graphicsService.GraphicsDevice;
            var   viewport         = context.Viewport;
            int   width            = viewport.Width;
            int   height           = viewport.Height;
            var   renderTargetPool = graphicsService.RenderTargetPool;
            var   cameraNode       = context.CameraNode;
            var   projection       = cameraNode.Camera.Projection;
            Pose  view             = cameraNode.PoseWorld.Inverse;
            Pose  cameraPose       = cameraNode.PoseWorld;
            float near             = projection.Near;
            float far = projection.Far;

            int frame = context.Frame;

            cameraNode.LastFrame = frame;

            // Save render state.
            var originalRasterizerState   = graphicsDevice.RasterizerState;
            var originalDepthStencilState = graphicsDevice.DepthStencilState;
            var originalBlendState        = graphicsDevice.BlendState;

            graphicsDevice.RasterizerState   = RasterizerState.CullNone;
            graphicsDevice.DepthStencilState = DepthStencilState.None;

            RenderTarget2D offscreenBuffer = null;
            Texture        depthBufferHalf = null;

            if (!EnableOffscreenRendering || context.RenderTarget == null)
            {
                graphicsDevice.BlendState = BlendState.AlphaBlend;
                _parameterGBuffer0.SetValue(context.GBuffer0);
            }
            else
            {
                // Render at half resolution into off-screen buffer.
                width  = Math.Max(1, width / 2);
                height = Math.Max(1, height / 2);

                graphicsDevice.BlendState = BlendStateOffscreen;

                offscreenBuffer = renderTargetPool.Obtain2D(
                    new RenderTargetFormat(width, height, false, context.RenderTarget.Format, DepthFormat.None));
                graphicsDevice.SetRenderTarget(offscreenBuffer);
                graphicsDevice.Clear(Color.Black);

                // Get half-res depth buffer.
                object obj;
                if (context.Data.TryGetValue(RenderContextKeys.DepthBufferHalf, out obj) &&
                    obj is Texture2D)
                {
                    depthBufferHalf = (Texture2D)obj;
                    _parameterGBuffer0.SetValue(depthBufferHalf);
                }
                else
                {
                    string message = "Downsampled depth buffer is not set in render context. (The downsampled "
                                     + "depth buffer (half width and height) is required by the VolumetricLightRenderer "
                                     + "to use half-res off-screen rendering. It needs to be stored in "
                                     + "RenderContext.Data[RenderContextKeys.DepthBufferHalf].)";
                    throw new GraphicsException(message);
                }
            }

            // Set global effect parameters.
            _parameterViewportSize.SetValue(new Vector2(width, height));

            var isHdrEnabled = context.RenderTarget != null && context.RenderTarget.Format == SurfaceFormat.HdrBlendable;

            for (int i = 0; i < numberOfNodes; i++)
            {
                var node = nodes[i] as VolumetricLightNode;
                if (node == null)
                {
                    continue;
                }

                // VolumetricLightNode is visible in current frame.
                node.LastFrame = frame;

                // Effect parameters for volumetric light properties.
                _parameterColor.SetValue((Vector3)node.Color / node.NumberOfSamples);
                _parameterNumberOfSamples.SetValue(node.NumberOfSamples);
                _parameterLightTextureMipMap.SetValue((float)node.MipMapBias);

                // The volumetric light effect is created for the parent light node.
                var lightNode = node.Parent as LightNode;
                if (lightNode == null)
                {
                    continue;
                }

                Pose lightPose = lightNode.PoseWorld;

                // Get start and end depth values of light AABB in view space.
                var lightAabbView = lightNode.Shape.GetAabb(lightNode.ScaleWorld, view * lightPose);
                var startZ        = Math.Max(-lightAabbView.Maximum.Z, near) / far;
                var endZ          = Math.Min(-lightAabbView.Minimum.Z / far, 1);
                _parameterDepthInterval.SetValue(new Vector2(startZ, endZ));

                // Get a rectangle that covers the light in screen space.
                var rectangle           = GraphicsHelper.GetScissorRectangle(cameraNode, new Viewport(0, 0, width, height), lightNode);
                var texCoordTopLeft     = new Vector2F(rectangle.Left / (float)width, rectangle.Top / (float)height);
                var texCoordBottomRight = new Vector2F(rectangle.Right / (float)width, rectangle.Bottom / (float)height);

                GraphicsHelper.GetFrustumFarCorners(cameraNode.Camera.Projection, texCoordTopLeft, texCoordBottomRight, _frustumFarCorners);

                // Convert frustum far corners from view space to world space.
                for (int j = 0; j < _frustumFarCorners.Length; j++)
                {
                    _frustumFarCorners[j] = (Vector3)cameraPose.ToWorldDirection((Vector3F)_frustumFarCorners[j]);
                }

                _parameterFrustumCorners.SetValue(_frustumFarCorners);

                Vector2 randomSeed = AnimateNoise ? new Vector2((float)MathHelper.Frac(context.Time.TotalSeconds))
                                          : new Vector2(0);
                _parameterRandomSeed.SetValue(randomSeed);

                // Set light parameters and apply effect pass.
                if (lightNode.Light is PointLight)
                {
                    var light = (PointLight)lightNode.Light;

                    float hdrScale = isHdrEnabled ? light.HdrScale : 1;
                    _parameterLightDiffuse.SetValue((Vector3)light.Color * light.DiffuseIntensity * hdrScale);
                    _parameterLightPosition.SetValue((Vector3)(lightPose.Position - cameraPose.Position));
                    _parameterLightRange.SetValue(light.Range);
                    _parameterLightAttenuation.SetValue(light.Attenuation);

                    bool hasTexture = (light.Texture != null);
                    if (hasTexture)
                    {
                        _parameterLightTexture.SetValue(light.Texture);

                        // Cube maps are left handed --> Sample with inverted z. (Otherwise, the
                        // cube map and objects or texts in it are mirrored.)
                        var mirrorZ = Matrix44F.CreateScale(1, 1, -1);
                        _parameterLightTextureMatrix.SetValue((Matrix)(mirrorZ * lightPose.Inverse));
                    }

                    if (hasTexture)
                    {
                        if (light.Texture.Format == SurfaceFormat.Alpha8)
                        {
                            _passPointLightTextureAlpha.Apply();
                        }
                        else
                        {
                            _passPointLightTextureRgb.Apply();
                        }
                    }
                    else
                    {
                        _passPointLight.Apply();
                    }
                }
                else if (lightNode.Light is Spotlight)
                {
                    var light = (Spotlight)lightNode.Light;

                    float hdrScale = isHdrEnabled ? light.HdrScale : 1;
                    _parameterLightDiffuse.SetValue((Vector3)light.Color * light.DiffuseIntensity * hdrScale);
                    _parameterLightPosition.SetValue((Vector3)(lightPose.Position - cameraPose.Position));
                    _parameterLightRange.SetValue(light.Range);
                    _parameterLightAttenuation.SetValue(light.Attenuation);
                    _parameterLightDirection.SetValue((Vector3)lightPose.ToWorldDirection(Vector3F.Forward));
                    _parameterLightAngles.SetValue(new Vector2(light.FalloffAngle, light.CutoffAngle));

                    bool hasTexture = (light.Texture != null);
                    if (hasTexture)
                    {
                        _parameterLightTexture.SetValue(light.Texture);

                        var proj = Matrix44F.CreatePerspectiveFieldOfView(light.CutoffAngle * 2, 1, 0.1f, 100);
                        _parameterLightTextureMatrix.SetValue((Matrix)(GraphicsHelper.ProjectorBiasMatrix * proj * (lightPose.Inverse * new Pose(cameraPose.Position))));
                    }

                    if (hasTexture)
                    {
                        if (light.Texture.Format == SurfaceFormat.Alpha8)
                        {
                            _passSpotlightTextureAlpha.Apply();
                        }
                        else
                        {
                            _passSpotlightTextureRgb.Apply();
                        }
                    }
                    else
                    {
                        _passSpotlight.Apply();
                    }
                }
                else if (lightNode.Light is ProjectorLight)
                {
                    var light = (ProjectorLight)lightNode.Light;

                    float hdrScale = isHdrEnabled ? light.HdrScale : 1;
                    _parameterLightDiffuse.SetValue((Vector3)light.Color * light.DiffuseIntensity * hdrScale);
                    _parameterLightPosition.SetValue((Vector3)(lightPose.Position - cameraPose.Position));
                    _parameterLightRange.SetValue(light.Projection.Far);
                    _parameterLightAttenuation.SetValue(light.Attenuation);

                    _parameterLightTexture.SetValue(light.Texture);

                    _parameterLightTextureMatrix.SetValue((Matrix)(GraphicsHelper.ProjectorBiasMatrix * light.Projection * (lightPose.Inverse * new Pose(cameraPose.Position))));

                    if (light.Texture.Format == SurfaceFormat.Alpha8)
                    {
                        _passProjectorLightTextureAlpha.Apply();
                    }
                    else
                    {
                        _passProjectorLightTextureRgb.Apply();
                    }
                }
                else
                {
                    continue;
                }

                // Draw a screen space quad covering the light.
                graphicsDevice.DrawQuad(rectangle);
            }

            _parameterGBuffer0.SetValue((Texture)null);
            _parameterLightTexture.SetValue((Texture)null);

            if (offscreenBuffer != null)
            {
                // ----- Combine off-screen buffer with scene.
                graphicsDevice.BlendState = BlendState.Opaque;

                // The previous scene render target is bound as texture.
                // --> Switch scene render targets!
                var sceneRenderTarget = context.RenderTarget;
                var renderTarget      = renderTargetPool.Obtain2D(new RenderTargetFormat(sceneRenderTarget));
                context.SourceTexture = offscreenBuffer;
                context.RenderTarget  = renderTarget;

                // Use the UpsampleFilter, which supports "nearest-depth upsampling".
                // (Nearest-depth upsampling is an "edge-aware" method that tries to
                // maintain the original geometry and prevent blurred edges.)
                if (_upsampleFilter == null)
                {
                    _upsampleFilter                = new UpsampleFilter(graphicsService);
                    _upsampleFilter.Mode           = UpsamplingMode.NearestDepth;
                    _upsampleFilter.RebuildZBuffer = true;
                }

                _upsampleFilter.DepthThreshold = DepthThreshold;
                context.SceneTexture           = sceneRenderTarget;

                _upsampleFilter.Process(context);

                context.SceneTexture  = null;
                context.SourceTexture = null;
                renderTargetPool.Recycle(offscreenBuffer);
                renderTargetPool.Recycle(sceneRenderTarget);
            }

            // Restore render states.
            graphicsDevice.RasterizerState   = originalRasterizerState;
            graphicsDevice.DepthStencilState = originalDepthStencilState;
            graphicsDevice.BlendState        = originalBlendState;
        }
Esempio n. 26
0
        private void Fill(FigureNode node, ArrayList <Vector3F> vertices, ArrayList <int> indices)
        {
            if (_mode != RenderMode.Fill)
            {
                Flush();
                _fillEffect.CurrentTechnique.Passes[0].Apply();
                _mode = RenderMode.Fill;
            }

            // Use cached vertex buffer if available.
            var nodeRenderData = node.RenderData as FigureNodeRenderData;

            if (nodeRenderData != null && nodeRenderData.IsValid)
            {
                Flush();
                var graphicsDevice = _graphicsService.GraphicsDevice;
                graphicsDevice.SetVertexBuffer(nodeRenderData.FillVertexBuffer);
                graphicsDevice.Indices = nodeRenderData.FillIndexBuffer;
                int primitiveCount = nodeRenderData.FillIndexBuffer.IndexCount / 3;
#if MONOGAME
                graphicsDevice.DrawIndexedPrimitives(PrimitiveType.TriangleList, 0, 0, primitiveCount);
#else
                int vertexCount = nodeRenderData.FillVertexBuffer.VertexCount;
                graphicsDevice.DrawIndexedPrimitives(PrimitiveType.TriangleList, 0, 0, vertexCount, 0, primitiveCount);
#endif
                return;
            }

            Matrix44F world   = node.PoseWorld * Matrix44F.CreateScale(node.ScaleWorld);
            Vector3F  color3F = node.FillColor * node.FillAlpha;
            Color     color   = new Color(color3F.X, color3F.Y, color3F.Z, node.FillAlpha);

            var numberOfVertices = vertices.Count;
            var numberOfIndices  = indices.Count;

            VertexPositionColor[] batchVertices = _fillBatch.Vertices;
            ushort[] batchIndices = _fillBatch.Indices;

            if (numberOfVertices > batchVertices.Length || numberOfIndices > batchIndices.Length)
            {
                string message = string.Format(
                    CultureInfo.InvariantCulture,
                    "The BufferSize of this FigureRenderer is not large enough to render the FigureNode (Name = \"{0}\").",
                    node.Name);
                throw new GraphicsException(message);
            }

            int vertexBufferStartIndex, indexBufferStartIndex;
            _fillBatch.Submit(PrimitiveType.TriangleList, numberOfVertices, numberOfIndices,
                              out vertexBufferStartIndex, out indexBufferStartIndex);

            // Copy all vertices.
            Vector3F[] vertexArray = vertices.Array;
            for (int i = 0; i < numberOfVertices; i++)
            {
                batchVertices[vertexBufferStartIndex + i].Position = (Vector3)(world.TransformPosition(vertexArray[i]));
                batchVertices[vertexBufferStartIndex + i].Color    = color;
            }

            // Copy all indices.
            int[] indexArray = indices.Array;
            for (int i = 0; i < numberOfIndices; i++)
            {
                batchIndices[indexBufferStartIndex + i] = (ushort)(vertexBufferStartIndex + indexArray[i]);
            }
        }