/// <summary> /// Diagonalizes the inertia matrix. /// </summary> /// <param name="inertia">The inertia matrix.</param> /// <param name="inertiaDiagonal">The inertia of the principal axes.</param> /// <param name="rotation"> /// The rotation that rotates from principal axis space to parent/world space. /// </param> /// <remarks> /// All valid inertia matrices can be transformed into a coordinate space where all elements /// non-diagonal matrix elements are 0. The axis of this special space are the principal axes. /// </remarks> internal static void DiagonalizeInertia(Matrix33F inertia, out Vector3F inertiaDiagonal, out Matrix33F rotation) { // Alternatively we could use Jacobi transformation (iterative method, see Bullet/btMatrix3x3.diagonalize() // and Numerical Recipes book) or we could find the eigenvalues using the characteristic // polynomial which is a cubic polynomial and then solve for the eigenvectors (see Numeric // Recipes and "Mathematics for 3D Game Programming and Computer Graphics" chapter ray-tracing // for cubic equations and computation of bounding boxes. // Perform eigenvalue decomposition. var eigenValueDecomposition = new EigenvalueDecompositionF(inertia.ToMatrixF()); inertiaDiagonal = eigenValueDecomposition.RealEigenvalues.ToVector3F(); rotation = eigenValueDecomposition.V.ToMatrix33F(); if (!rotation.IsRotation) { // V is orthogonal but not necessarily a rotation. If it is no rotation // we have to swap two columns. MathHelper.Swap(ref inertiaDiagonal.Y, ref inertiaDiagonal.Z); Vector3F dummy = rotation.GetColumn(1); rotation.SetColumn(1, rotation.GetColumn(2)); rotation.SetColumn(2, dummy); Debug.Assert(rotation.IsRotation); } }
/// <inheritdoc/> protected override void OnSetup() { // Get anchor pose/position in world space. Pose anchorPoseA = BodyA.Pose * AnchorPoseALocal; Vector3F anchorPositionB = BodyB.Pose.ToWorldPosition(AnchorPositionBLocal); // TODO: We could store rALocal and use ToWorldDirection instead of ToWorldPosition. // Compute anchor position on B relative to anchor pose of A. Vector3F relativePosition = anchorPoseA.ToLocalPosition(anchorPositionB); // The linear constraint axes are the fixed anchor axes of A! Matrix33F anchorOrientation = anchorPoseA.Orientation; Vector3F rA = anchorPoseA.Position - BodyA.PoseCenterOfMass.Position; Vector3F rB = anchorPositionB - BodyB.PoseCenterOfMass.Position; // Remember old states. LimitState oldXLimitState = _limitStates[0]; LimitState oldYLimitState = _limitStates[1]; LimitState oldZLimitState = _limitStates[2]; SetupConstraint(0, relativePosition.X, anchorOrientation.GetColumn(0), rA, rB); SetupConstraint(1, relativePosition.Y, anchorOrientation.GetColumn(1), rA, rB); SetupConstraint(2, relativePosition.Z, anchorOrientation.GetColumn(2), rA, rB); Warmstart(0, oldXLimitState); Warmstart(1, oldYLimitState); Warmstart(2, oldZLimitState); }
public void GetColumn() { Matrix33F m = new Matrix33F(columnMajor, MatrixOrder.ColumnMajor); Assert.AreEqual(new Vector3F(1.0f, 4.0f, 7.0f), m.GetColumn(0)); Assert.AreEqual(new Vector3F(2.0f, 5.0f, 8.0f), m.GetColumn(1)); Assert.AreEqual(new Vector3F(3.0f, 6.0f, 9.0f), m.GetColumn(2)); }
/// <inheritdoc/> protected override void OnSetup() { // Get anchor orientations in world space. Matrix33F anchorOrientationA = BodyA.Pose.Orientation * AnchorOrientationALocal; Matrix33F anchorOrientationB = BodyB.Pose.Orientation * AnchorOrientationBLocal; // Get anchor orientation of B relative to A. Matrix33F relativeOrientation = anchorOrientationA.Transposed * anchorOrientationB; Vector3F angles = ConstraintHelper.GetEulerAngles(relativeOrientation); // The constraint axes: See OneNote for a detailed derivation of these non-intuitive axes. var xA = anchorOrientationA.GetColumn(0); // Anchor x-axis on A. var zB = anchorOrientationB.GetColumn(2); // Anchor z-axis on B. Vector3F constraintAxisY = Vector3F.Cross(zB, xA); Vector3F constraintAxisX = Vector3F.Cross(constraintAxisY, zB); Vector3F constraintAxisZ = Vector3F.Cross(xA, constraintAxisY); // Remember old states. LimitState oldXLimitState = _limitStates[0]; LimitState oldYLimitState = _limitStates[1]; LimitState oldZLimitState = _limitStates[2]; SetupConstraint(0, angles.X, constraintAxisX); SetupConstraint(1, angles.Y, constraintAxisY); SetupConstraint(2, angles.Z, constraintAxisZ); Warmstart(0, oldXLimitState); Warmstart(1, oldYLimitState); Warmstart(2, oldZLimitState); }
public void MultiplyMatrix() { Matrix33F m = new Matrix33F(12, 23, 45, 67, 89, 90, 43, 65, 87); Assert.AreEqual(Matrix33F.Zero, Matrix33F.Multiply(m, Matrix33F.Zero)); Assert.AreEqual(Matrix33F.Zero, Matrix33F.Multiply(Matrix33F.Zero, m)); Assert.AreEqual(m, Matrix33F.Multiply(m, Matrix33F.Identity)); Assert.AreEqual(m, Matrix33F.Multiply(Matrix33F.Identity, m)); Assert.IsTrue(Matrix33F.AreNumericallyEqual(Matrix33F.Identity, Matrix33F.Multiply(m, m.Inverse))); Assert.IsTrue(Matrix33F.AreNumericallyEqual(Matrix33F.Identity, Matrix33F.Multiply(m.Inverse, m))); Matrix33F m1 = new Matrix33F(columnMajor, MatrixOrder.ColumnMajor); Matrix33F m2 = new Matrix33F(12, 23, 45, 67, 89, 90, 43, 65, 87); Matrix33F result = Matrix33F.Multiply(m1, m2); for (int column = 0; column < 3; column++) { for (int row = 0; row < 3; row++) { Assert.AreEqual(Vector3F.Dot(m1.GetRow(row), m2.GetColumn(column)), result[row, column]); } } }
//-------------------------------------------------------------- #region Creation & Cleanup //-------------------------------------------------------------- #endregion //-------------------------------------------------------------- #region Methods //-------------------------------------------------------------- /// <inheritdoc/> protected override void OnSetup() { var deltaTime = Simulation.Settings.Timing.FixedTimeStep; float errorReduction = ConstraintHelper.ComputeErrorReduction(deltaTime, SpringConstant, DampingConstant); float softness = ConstraintHelper.ComputeSoftness(deltaTime, SpringConstant, DampingConstant); // Get anchor orientations in world space. Matrix33F anchorOrientationA = BodyA.Pose.Orientation * AnchorOrientationALocal; Matrix33F anchorOrientationB = BodyB.Pose.Orientation * AnchorOrientationBLocal; Matrix33F relativeOrientation = anchorOrientationA.Transposed * anchorOrientationB; Vector3F angles = ConstraintHelper.GetEulerAngles(relativeOrientation); // The constraint axes: See OneNote for a detailed derivation of these non-intuitive axes. var xA = anchorOrientationA.GetColumn(0); // Anchor x-axis on A. var zB = anchorOrientationB.GetColumn(2); // Anchor z-axis on B. Vector3F constraintAxisY = Vector3F.Cross(zB, xA); Vector3F constraintAxisX = Vector3F.Cross(constraintAxisY, zB); Vector3F constraintAxisZ = Vector3F.Cross(xA, constraintAxisY); SetupConstraint(0, angles[0], TargetAngles[0], constraintAxisX, deltaTime, errorReduction, softness); SetupConstraint(1, angles[1], TargetAngles[1], constraintAxisY, deltaTime, errorReduction, softness); SetupConstraint(2, angles[2], TargetAngles[2], constraintAxisZ, deltaTime, errorReduction, softness); // No warmstarting. _constraints[0].ConstraintImpulse = 0; _constraints[1].ConstraintImpulse = 0; _constraints[2].ConstraintImpulse = 0; }
//-------------------------------------------------------------- #region Creation & Cleanup //-------------------------------------------------------------- #endregion //-------------------------------------------------------------- #region Methods //-------------------------------------------------------------- /// <inheritdoc/> protected override void OnSetup() { // Anchor pose/position in world space. Pose anchorPoseA = BodyA.Pose * AnchorPoseALocal; Vector3F anchorPositionB = BodyB.Pose.ToWorldPosition(AnchorPositionBLocal); // Compute anchor pose of B relative to anchor pose of A. Vector3F relativePosition = anchorPoseA.ToLocalPosition(anchorPositionB); // The linear constraint axes are the fixed anchor axes of A! Matrix33F anchorOrientation = anchorPoseA.Orientation; Vector3F rA = anchorPoseA.Position - BodyA.PoseCenterOfMass.Position; Vector3F rB = anchorPositionB - BodyB.PoseCenterOfMass.Position; var deltaTime = Simulation.Settings.Timing.FixedTimeStep; float errorReduction = ConstraintHelper.ComputeErrorReduction(deltaTime, SpringConstant, DampingConstant); float softness = ConstraintHelper.ComputeSoftness(deltaTime, SpringConstant, DampingConstant); if (!UseSingleAxisMode) { SetupConstraint(0, relativePosition.X, TargetPosition.X, anchorOrientation.GetColumn(0), rA, rB, deltaTime, errorReduction, softness); SetupConstraint(1, relativePosition.Y, TargetPosition.Y, anchorOrientation.GetColumn(1), rA, rB, deltaTime, errorReduction, softness); SetupConstraint(2, relativePosition.Z, TargetPosition.Z, anchorOrientation.GetColumn(2), rA, rB, deltaTime, errorReduction, softness); } else { var axis = TargetPosition - relativePosition; var deviation = axis.Length; if (Numeric.IsZero(deviation)) { axis = Vector3F.UnitX; } else { axis.Normalize(); } SetupConstraint(0, -deviation, 0, axis, rA, rB, deltaTime, errorReduction, softness); } // No warmstarting. _constraints[0].ConstraintImpulse = 0; _constraints[1].ConstraintImpulse = 0; _constraints[2].ConstraintImpulse = 0; }
//-------------------------------------------------------------- #region Creation & Cleanup //-------------------------------------------------------------- #endregion //-------------------------------------------------------------- #region Methods //-------------------------------------------------------------- /// <inheritdoc/> protected override void OnSetup() { Pose anchorPoseA = BodyA.Pose; Vector3F anchorPositionB = BodyB.Pose.ToWorldPosition(AnchorPositionBLocal); // The linear constraint axes are the fixed anchor axes of A! Matrix33F anchorOrientation = anchorPoseA.Orientation; Vector3F rA = anchorPoseA.Position - BodyA.PoseCenterOfMass.Position; Vector3F rB = anchorPositionB - BodyB.PoseCenterOfMass.Position; var deltaTime = Simulation.Settings.Timing.FixedTimeStep; var axis = BodyA.Pose.Orientation * AxisALocal; if (!UseSingleAxisMode) { // One constraint for each axis fixed on A. var targetVelocityVector = axis * TargetVelocity; SetupConstraint(0, targetVelocityVector.X, anchorOrientation.GetColumn(0), rA, rB, deltaTime); SetupConstraint(1, targetVelocityVector.Y, anchorOrientation.GetColumn(1), rA, rB, deltaTime); SetupConstraint(2, targetVelocityVector.Z, anchorOrientation.GetColumn(2), rA, rB, deltaTime); } else { // One constraint in direction of the velocity. if (axis.IsNumericallyZero) { // TODO: We could have a separate Axis property if the motor should be able to constrain to velocity 0 in Single Axis Mode. // No velocity axis. _minImpulseLimits[0] = 0; } else { SetupConstraint(0, TargetVelocity, axis, rA, rB, deltaTime); } } // No warmstarting. _constraints[0].ConstraintImpulse = 0; _constraints[1].ConstraintImpulse = 0; _constraints[2].ConstraintImpulse = 0; }
public void SetColumn() { Matrix33F m = new Matrix33F(columnMajor, MatrixOrder.ColumnMajor); m.SetColumn(0, new Vector3F(0.1f, 0.2f, 0.3f)); Assert.AreEqual(new Vector3F(0.1f, 0.2f, 0.3f), m.GetColumn(0)); Assert.AreEqual(new Vector3F(2.0f, 5.0f, 8.0f), m.GetColumn(1)); Assert.AreEqual(new Vector3F(3.0f, 6.0f, 9.0f), m.GetColumn(2)); m.SetColumn(1, new Vector3F(0.4f, 0.5f, 0.6f)); Assert.AreEqual(new Vector3F(0.1f, 0.2f, 0.3f), m.GetColumn(0)); Assert.AreEqual(new Vector3F(0.4f, 0.5f, 0.6f), m.GetColumn(1)); Assert.AreEqual(new Vector3F(3.0f, 6.0f, 9.0f), m.GetColumn(2)); m.SetColumn(2, new Vector3F(0.7f, 0.8f, 0.9f)); Assert.AreEqual(new Vector3F(0.1f, 0.2f, 0.3f), m.GetColumn(0)); Assert.AreEqual(new Vector3F(0.4f, 0.5f, 0.6f), m.GetColumn(1)); Assert.AreEqual(new Vector3F(0.7f, 0.8f, 0.9f), m.GetColumn(2)); }
public void MultiplyMatrixOperator() { Matrix33F m = new Matrix33F(12, 23, 45, 67, 89, 90, 43, 65, 87); Assert.AreEqual(Matrix33F.Zero, m * Matrix33F.Zero); Assert.AreEqual(Matrix33F.Zero, Matrix33F.Zero * m); Assert.AreEqual(m, m * Matrix33F.Identity); Assert.AreEqual(m, Matrix33F.Identity * m); Assert.IsTrue(Matrix33F.AreNumericallyEqual(Matrix33F.Identity, m * m.Inverse)); Assert.IsTrue(Matrix33F.AreNumericallyEqual(Matrix33F.Identity, m.Inverse * m)); Matrix33F m1 = new Matrix33F(columnMajor, MatrixOrder.ColumnMajor); Matrix33F m2 = new Matrix33F(12, 23, 45, 67, 89, 90, 43, 65, 87); Matrix33F result = m1 * m2; for (int column = 0; column < 3; column++) for (int row = 0; row < 3; row++) Assert.AreEqual(Vector3F.Dot(m1.GetRow(row), m2.GetColumn(column)), result[row, column]); }
public void GetColumnException2() { Matrix33F m = new Matrix33F(columnMajor, MatrixOrder.ColumnMajor); m.GetColumn(3); }
public override void Render(IList <SceneNode> nodes, RenderContext context, RenderOrder order) { if (nodes == null) { throw new ArgumentNullException("nodes"); } if (context == null) { throw new ArgumentNullException("context"); } if (context.Scene == null) { throw new ArgumentException("Scene needs to be set in render context.", "context"); } if (context.CameraNode == null) { throw new ArgumentException("Camera needs to be set in render context.", "context"); } if (!(context.CameraNode.Camera.Projection is PerspectiveProjection)) { throw new ArgumentException("The camera in the render context must use a perspective projection.", "context"); } int numberOfNodes = nodes.Count; if (numberOfNodes == 0) { return; } var graphicsDevice = context.GraphicsService.GraphicsDevice; int frame = context.Frame; var savedRenderState = new RenderStateSnapshot(graphicsDevice); var originalRenderTarget = context.RenderTarget; var originalViewport = context.Viewport; var originalCameraNode = context.CameraNode; var originalLodCameraNode = context.LodCameraNode; float originalLodBias = context.LodBias; var originalReferenceNode = context.ReferenceNode; Pose originalCameraPose = originalCameraNode.PoseWorld; Vector3F originalCameraPosition = originalCameraPose.Position; Matrix33F originalCameraOrientation = originalCameraPose.Orientation; Vector3F right = originalCameraOrientation.GetColumn(0); Vector3F up = originalCameraOrientation.GetColumn(1); Vector3F back = originalCameraOrientation.GetColumn(2); try { // Use foreach instead of for-loop to catch InvalidOperationExceptions in // case the collection is modified. for (int i = 0; i < numberOfNodes; i++) { var node = nodes[i] as PlanarReflectionNode; if (node == null) { continue; } // Update each node only once per frame. if (node.LastFrame == frame) { continue; } node.LastFrame = frame; var texture = node.RenderToTexture.Texture; if (texture == null) { continue; } var renderTarget = texture as RenderTarget2D; if (renderTarget == null) { throw new GraphicsException( "PlanarReflectionNode.RenderToTexture.Texture is invalid. The texture must be a RenderTarget2D."); } // RenderToTexture instances can be shared. --> Update them only once per frame. if (node.RenderToTexture.LastFrame == frame) { continue; } // Do not render if we look at the back of the reflection plane. Vector3F planeNormal = node.NormalWorld; Vector3F planePosition = node.PoseWorld.Position; Vector3F planeToCamera = originalCameraPosition - planePosition; if (Vector3F.Dot(planeNormal, planeToCamera) < 0) { continue; } var cameraNode = node.CameraNode; // Reflect camera pose. Pose cameraPose; cameraPose.Position = planePosition + Reflect(planeToCamera, planeNormal); cameraPose.Orientation = new Matrix33F(); cameraPose.Orientation.SetColumn(0, Reflect(right, planeNormal)); cameraPose.Orientation.SetColumn(1, -Reflect(up, planeNormal)); cameraPose.Orientation.SetColumn(2, Reflect(back, planeNormal)); cameraNode.PoseWorld = cameraPose; // The projection of the player camera. var originalProjection = originalCameraNode.Camera.Projection; // The projection of the reflected camera. var projection = (PerspectiveProjection)cameraNode.Camera.Projection; // Choose optimal projection. We get the screen-space bounds of the reflection node. // Then we make the FOV so small that it exactly contains the node. projection.Set(originalProjection); var bounds = GraphicsHelper.GetBounds(cameraNode, node); // Abort if the bounds are empty. if (Numeric.AreEqual(bounds.X, bounds.Z) || Numeric.AreEqual(bounds.Y, bounds.W)) { continue; } // Apply FOV scale to bounds. float fovScale = node.FieldOfViewScale; float deltaX = (bounds.Z - bounds.X) * (fovScale - 1) / 2; bounds.X -= deltaX; bounds.Z += deltaX; float deltaY = (bounds.W - bounds.Y) * (fovScale - 1) / 2; bounds.Y -= deltaY; bounds.W += deltaY; // Update projection to contain only the node bounds. projection.Left = projection.Left + bounds.X * projection.Width; projection.Right = projection.Left + bounds.Z * projection.Width; projection.Top = projection.Top - bounds.Y * projection.Height; projection.Bottom = projection.Top - bounds.W * projection.Height; // Set far clip plane. if (node.Far.HasValue) { projection.Far = node.Far.Value; } // Set near clip plane. Vector3F planeNormalCamera = cameraPose.ToLocalDirection(-node.NormalWorld); Vector3F planePointCamera = cameraPose.ToLocalPosition(node.PoseWorld.Position); projection.NearClipPlane = new Plane(planeNormalCamera, planePointCamera); context.CameraNode = cameraNode; context.LodCameraNode = cameraNode; context.LodBias = node.LodBias ?? originalLodBias; context.ReferenceNode = node; context.RenderTarget = renderTarget; context.Viewport = new Viewport(0, 0, renderTarget.Width, renderTarget.Height); RenderCallback(context); // Update other properties of RenderToTexture. node.RenderToTexture.LastFrame = frame; node.RenderToTexture.TextureMatrix = GraphicsHelper.ProjectorBiasMatrix * cameraNode.Camera.Projection * cameraNode.PoseWorld.Inverse; } } catch (InvalidOperationException exception) { throw new GraphicsException( "InvalidOperationException was raised in PlanarReflectionRenderer.Render(). " + "This can happen if a SceneQuery instance that is currently in use is modified in the " + "RenderCallback. --> Use different SceneQuery types in the method which calls " + "SceneCaptureRenderer.Render() and in the RenderCallback method.", exception); } graphicsDevice.SetRenderTarget(null); savedRenderState.Restore(); context.RenderTarget = originalRenderTarget; context.Viewport = originalViewport; context.CameraNode = originalCameraNode; context.LodCameraNode = originalLodCameraNode; context.LodBias = originalLodBias; context.ReferenceNode = originalReferenceNode; }
public override void Render(IList <SceneNode> nodes, RenderContext context, RenderOrder order) { if (nodes == null) { throw new ArgumentNullException("nodes"); } if (context == null) { throw new ArgumentNullException("context"); } int numberOfNodes = nodes.Count; if (numberOfNodes == 0) { return; } context.ThrowIfCameraMissing(); context.ThrowIfSceneMissing(); var originalRenderTarget = context.RenderTarget; var originalViewport = context.Viewport; var originalReferenceNode = context.ReferenceNode; // Camera properties var cameraNode = context.CameraNode; var cameraPose = cameraNode.PoseWorld; var projection = cameraNode.Camera.Projection; if (!(projection is PerspectiveProjection)) { throw new NotImplementedException( "Cascaded shadow maps not yet implemented for scenes with orthographic camera."); } float fieldOfViewY = projection.FieldOfViewY; float aspectRatio = projection.AspectRatio; // Update SceneNode.LastFrame for all visible nodes. int frame = context.Frame; cameraNode.LastFrame = frame; // The scene node renderer should use the light camera instead of the player camera. context.CameraNode = _orthographicCameraNode; context.Technique = "Directional"; var graphicsService = context.GraphicsService; var graphicsDevice = graphicsService.GraphicsDevice; var savedRenderState = new RenderStateSnapshot(graphicsDevice); for (int i = 0; i < numberOfNodes; i++) { var lightNode = nodes[i] as LightNode; if (lightNode == null) { continue; } var shadow = lightNode.Shadow as CascadedShadow; if (shadow == null) { continue; } // LightNode is visible in current frame. lightNode.LastFrame = frame; var format = new RenderTargetFormat( shadow.PreferredSize * shadow.NumberOfCascades, shadow.PreferredSize, false, shadow.Prefer16Bit ? SurfaceFormat.HalfSingle : SurfaceFormat.Single, DepthFormat.Depth24); bool allLocked = shadow.IsCascadeLocked[0] && shadow.IsCascadeLocked[1] && shadow.IsCascadeLocked[2] && shadow.IsCascadeLocked[3]; if (shadow.ShadowMap == null) { shadow.ShadowMap = graphicsService.RenderTargetPool.Obtain2D(format); allLocked = false; // Need to render shadow map. } // If we can reuse the whole shadow map texture, abort early. if (allLocked) { continue; } _csmSplitDistances[0] = projection.Near; _csmSplitDistances[1] = shadow.Distances.X; _csmSplitDistances[2] = shadow.Distances.Y; _csmSplitDistances[3] = shadow.Distances.Z; _csmSplitDistances[4] = shadow.Distances.W; // (Re-)Initialize the array for cached matrices in the CascadedShadow. if (shadow.ViewProjections == null || shadow.ViewProjections.Length < shadow.NumberOfCascades) { shadow.ViewProjections = new Matrix[shadow.NumberOfCascades]; } // Initialize the projection matrices to an empty matrix. // The unused matrices should not contain valid projections because // CsmComputeSplitOptimized in CascadedShadowMask.fxh should not choose // the wrong cascade. for (int j = 0; j < shadow.ViewProjections.Length; j++) { if (!shadow.IsCascadeLocked[j]) // Do not delete cached info for cached cascade. { shadow.ViewProjections[j] = new Matrix(); } } // If some cascades are cached, we have to create a new shadow map and copy // the old cascades into the new shadow map. if (shadow.IsCascadeLocked[0] || shadow.IsCascadeLocked[1] || shadow.IsCascadeLocked[2] || shadow.IsCascadeLocked[3]) { var oldShadowMap = shadow.ShadowMap; shadow.ShadowMap = graphicsService.RenderTargetPool.Obtain2D(new RenderTargetFormat(oldShadowMap)); graphicsDevice.SetRenderTarget(shadow.ShadowMap); graphicsDevice.Clear(Color.White); var spriteBatch = graphicsService.GetSpriteBatch(); spriteBatch.Begin(SpriteSortMode.Deferred, BlendState.Opaque, SamplerState.PointClamp, DepthStencilState.None, RasterizerState.CullNone); for (int cascade = 0; cascade < shadow.NumberOfCascades; cascade++) { if (shadow.IsCascadeLocked[cascade]) { var viewport = GetViewport(shadow, cascade); var rectangle = new Rectangle(viewport.X, viewport.Y, viewport.Width, viewport.Height); spriteBatch.Draw(oldShadowMap, rectangle, rectangle, Color.White); } } spriteBatch.End(); graphicsService.RenderTargetPool.Recycle(oldShadowMap); } else { graphicsDevice.SetRenderTarget(shadow.ShadowMap); graphicsDevice.Clear(Color.White); } context.RenderTarget = shadow.ShadowMap; graphicsDevice.DepthStencilState = DepthStencilState.Default; graphicsDevice.RasterizerState = RasterizerState.CullCounterClockwise; graphicsDevice.BlendState = BlendState.Opaque; context.ReferenceNode = lightNode; context.Object = shadow; context.ShadowNear = 0; // Obsolete: Only kept for backward compatibility. bool shadowMapContainsSomething = false; for (int split = 0; split < shadow.NumberOfCascades; split++) { if (shadow.IsCascadeLocked[split]) { continue; } context.Data[RenderContextKeys.ShadowTileIndex] = CubeMapShadowMapRenderer.BoxedIntegers[split]; // near/far of this split. float near = _csmSplitDistances[split]; float far = Math.Max(_csmSplitDistances[split + 1], near + Numeric.EpsilonF); // Create a view volume for this split. _splitVolume.SetFieldOfView(fieldOfViewY, aspectRatio, near, far); // Find the bounding sphere of the split camera frustum. Vector3F center; float radius; GetBoundingSphere(_splitVolume, out center, out radius); // Extend radius to get enough border for filtering. int shadowMapSize = shadow.ShadowMap.Height; // We could extend by (ShadowMapSize + BorderTexels) / ShadowMapSize; // Add at least 1 texel. (This way, shadow mask shader can clamp uv to // texture rect in without considering half texel border to avoid sampling outside..) radius *= (float)(shadowMapSize + 1) / shadowMapSize; // Convert center to light space. Pose lightPose = lightNode.PoseWorld; center = cameraPose.ToWorldPosition(center); center = lightPose.ToLocalPosition(center); // Snap center to texel positions to avoid shadow swimming. SnapPositionToTexels(ref center, 2 * radius, shadowMapSize); // Convert center back to world space. center = lightPose.ToWorldPosition(center); Matrix33F orientation = lightPose.Orientation; Vector3F backward = orientation.GetColumn(2); var orthographicProjection = (OrthographicProjection)_orthographicCameraNode.Camera.Projection; // Create a tight orthographic frustum around the cascade's bounding sphere. orthographicProjection.SetOffCenter(-radius, radius, -radius, radius, 0, 2 * radius); Vector3F cameraPosition = center + radius * backward; Pose frustumPose = new Pose(cameraPosition, orientation); Pose view = frustumPose.Inverse; shadow.ViewProjections[split] = (Matrix)view * orthographicProjection; // Convert depth bias from "texel" to light space [0, 1] depth. // Minus sign to move receiver depth closer to light. Divide by depth to normalize. float unitsPerTexel = orthographicProjection.Width / shadow.ShadowMap.Height; shadow.EffectiveDepthBias[split] = -shadow.DepthBias[split] * unitsPerTexel / orthographicProjection.Depth; // Convert normal offset from "texel" to world space. shadow.EffectiveNormalOffset[split] = shadow.NormalOffset[split] * unitsPerTexel; // For rendering the shadow map, move near plane back by MinLightDistance // to catch occluders in front of the cascade. orthographicProjection.Near = -shadow.MinLightDistance; _orthographicCameraNode.PoseWorld = frustumPose; // Set a viewport to render a tile in the texture atlas. graphicsDevice.Viewport = GetViewport(shadow, split); context.Viewport = graphicsDevice.Viewport; shadowMapContainsSomething |= RenderCallback(context); } // Recycle shadow map if empty. if (!shadowMapContainsSomething) { graphicsService.RenderTargetPool.Recycle(shadow.ShadowMap); shadow.ShadowMap = null; } } graphicsDevice.SetRenderTarget(null); savedRenderState.Restore(); context.CameraNode = cameraNode; context.ShadowNear = float.NaN; context.Technique = null; context.RenderTarget = originalRenderTarget; context.Viewport = originalViewport; context.ReferenceNode = originalReferenceNode; context.Object = null; context.Data[RenderContextKeys.ShadowTileIndex] = null; }
/// <summary> /// Estimates the size of an object in pixels. /// </summary> /// <param name="cameraNode">The camera node with perspective projection.</param> /// <param name="viewport">The viewport.</param> /// <param name="geometricObject">The geometric object.</param> /// <returns> /// The estimated width and height of <paramref name="geometricObject"/> in pixels. /// </returns> /// <remarks> /// The method assumes that the object is fully visible by the camera, i.e. it does not perform /// frustum culling. It estimates the size of <paramref name="geometricObject"/> based on its /// bounding shape. /// </remarks> /// <exception cref="ArgumentNullException"> /// <paramref name="cameraNode"/> or <paramref name="geometricObject"/> is /// <see langword="null"/>. /// </exception> internal static Vector2F GetScreenSize(CameraNode cameraNode, Viewport viewport, IGeometricObject geometricObject) { // This implementation is just for reference. (It is preferable to optimize // and inline the code when needed.) if (cameraNode == null) { throw new ArgumentNullException("cameraNode"); } if (geometricObject == null) { throw new ArgumentNullException("geometricObject"); } // Use bounding sphere of AABB in world space. var aabb = geometricObject.Aabb; float diameter = aabb.Extent.Length; float width = diameter; float height = diameter; Matrix44F proj = cameraNode.Camera.Projection; bool isOrthographic = (proj.M33 != 0); // ----- xScale, yScale: // Orthographic Projection: // proj.M00 = 2 / (right - left) // proj.M11 = 2 / (top - bottom) // // Perspective Projection: // proj.M00 = 2 * zNear / (right - left) = 1 / tan(fovX/2) // proj.M11 = 2 * zNear / (top - bottom) = 1 / tan(fovY/2) float xScale = Math.Abs(proj.M00); float yScale = Math.Abs(proj.M11); // Screen size [px]. Vector2F screenSize; if (isOrthographic) { // ----- Orthographic Projection // sizeX = viewportWidth * width / (right - left) // = viewportWidth * width * xScale / 2 screenSize.X = viewport.Width * width * xScale / 2; // sizeY = viewportHeight* height / (top - bottom) // = viewportHeight* height * xScale / 2 screenSize.Y = viewport.Height * height * yScale / 2; } else { // ----- Perspective Projection // Camera properties. Pose cameraPose = cameraNode.PoseWorld; Vector3F cameraPosition = cameraPose.Position; Matrix33F cameraOrientation = cameraPose.Orientation; Vector3F cameraForward = -cameraOrientation.GetColumn(2); // Get planar distance from camera to object by projecting the distance // vector onto the look direction. Vector3F cameraToObject = aabb.Center - cameraPosition; float distance = Vector3F.Dot(cameraToObject, cameraForward); // Assume that object is in front of camera (no frustum culling). distance = Math.Abs(distance); // Avoid division by zero. if (distance < Numeric.EpsilonF) { distance = Numeric.EpsilonF; } // sizeX = viewportWidth * width / (objectDistance * 2 * tan(fovX/2)) // = viewportWidth * width * zNear / (objectDistance * (right - left)) // = viewportWidth * width * xScale / (2 * objectDistance) screenSize.X = viewport.Width * width * xScale / (2 * distance); // sizeY = viewportHeight * height / (objectDistance * 2 * tan(fovY/2)) // = viewportHeight * height * zNear / (objectDistance * (top - bottom)) // = viewportHeight * height * yScale / (2 * objectDistance) screenSize.Y = viewport.Height * height * yScale / (2 * distance); } return(screenSize); }
public override void Render(IList <SceneNode> nodes, RenderContext context, RenderOrder order) { ThrowIfDisposed(); if (nodes == null) { throw new ArgumentNullException("nodes"); } if (context == null) { throw new ArgumentNullException("context"); } int numberOfNodes = nodes.Count; if (nodes.Count == 0) { return; } context.Validate(_effect); context.ThrowIfCameraMissing(); var graphicsDevice = context.GraphicsService.GraphicsDevice; var savedRenderState = new RenderStateSnapshot(graphicsDevice); graphicsDevice.RasterizerState = RasterizerState.CullNone; graphicsDevice.DepthStencilState = DepthStencilState.DepthRead; graphicsDevice.BlendState = BlendState.AlphaBlend; // Camera properties var cameraNode = context.CameraNode; Pose cameraPose = cameraNode.PoseWorld; Matrix view = (Matrix) new Matrix44F(cameraPose.Orientation.Transposed, new Vector3F(0)); Matrix projection = cameraNode.Camera.Projection; _effectParameterViewProjection.SetValue(view * projection); // Update SceneNode.LastFrame for all visible nodes. int frame = context.Frame; cameraNode.LastFrame = frame; for (int i = 0; i < numberOfNodes; i++) { var node = nodes[i] as SkyObjectNode; if (node == null) { continue; } // SkyObjectNode is visible in current frame. node.LastFrame = frame; // Get billboard axes from scene node pose. Matrix33F orientation = node.PoseWorld.Orientation; Vector3F right = orientation.GetColumn(0); Vector3F up = orientation.GetColumn(1); Vector3F normal = orientation.GetColumn(2); Vector3F forward = -normal; _effectParameterNormal.SetValue((Vector3)(normal)); // ----- Render object texture. var texture = node.Texture; if (texture != null) { _effectParameterUp.SetValue((Vector3)(up)); _effectParameterRight.SetValue((Vector3)(right)); _effectParameterSunLight.SetValue((Vector3)node.SunLight); _effectParameterAmbientLight.SetValue(new Vector4((Vector3)node.AmbientLight, node.Alpha)); _effectParameterObjectTexture.SetValue(texture.TextureAtlas); _effectParameterLightWrapSmoothness.SetValue(new Vector2(node.LightWrap, node.LightSmoothness)); _effectParameterSunDirection.SetValue((Vector3)node.SunDirection); float halfWidthX = (float)Math.Tan(node.AngularDiameter.X / 2); float halfWidthY = (float)Math.Tan(node.AngularDiameter.Y / 2); // Texture coordinates of packed texture. Vector2F texCoordLeftTop = texture.GetTextureCoordinates(new Vector2F(0, 0), 0); Vector2F texCoordRightBottom = texture.GetTextureCoordinates(new Vector2F(1, 1), 0); float texCoordLeft = texCoordLeftTop.X; float texCoordTop = texCoordLeftTop.Y; float texCoordRight = texCoordRightBottom.X; float texCoordBottom = texCoordRightBottom.Y; _effectParameterTextureParameters.SetValue(new Vector4( (texCoordLeft + texCoordRight) / 2, (texCoordTop + texCoordBottom) / 2, 1 / ((texCoordRight - texCoordLeft) / 2), // 1 / half extent 1 / ((texCoordBottom - texCoordTop) / 2))); _vertices[0].Position = (Vector3)(forward - right * halfWidthX - up * halfWidthY); _vertices[0].TextureCoordinate = new Vector2(texCoordLeft, texCoordBottom); _vertices[1].Position = (Vector3)(forward - right * halfWidthX + up * halfWidthY); _vertices[1].TextureCoordinate = new Vector2(texCoordLeft, texCoordTop); _vertices[2].Position = (Vector3)(forward + right * halfWidthX - up * halfWidthY); _vertices[2].TextureCoordinate = new Vector2(texCoordRight, texCoordBottom); _vertices[3].Position = (Vector3)(forward + right * halfWidthX + up * halfWidthY); _vertices[3].TextureCoordinate = new Vector2(texCoordRight, texCoordTop); if (context.IsHdrEnabled()) { _effectPassObjectLinear.Apply(); } else { _effectPassObjectGamma.Apply(); } graphicsDevice.DrawUserPrimitives(PrimitiveType.TriangleStrip, _vertices, 0, 2); } // ----- Render glows. if (node.GlowColor0.LengthSquared > 0 || node.GlowColor1.LengthSquared > 0) { _effectParameterGlow0.SetValue(new Vector4((Vector3)node.GlowColor0, node.GlowExponent0)); _effectParameterGlow1.SetValue(new Vector4((Vector3)node.GlowColor1, node.GlowExponent1)); float halfWidth0 = (float)Math.Tan(Math.Acos(Math.Pow(node.GlowCutoffThreshold / node.GlowColor0.LargestComponent, 1 / node.GlowExponent0))); if (!Numeric.IsPositiveFinite(halfWidth0)) { halfWidth0 = 0; } float halfWidth1 = (float)Math.Tan(Math.Acos(Math.Pow(node.GlowCutoffThreshold / node.GlowColor1.LargestComponent, 1 / node.GlowExponent1))); if (!Numeric.IsPositiveFinite(halfWidth1)) { halfWidth1 = 0; } float halfWidth = Math.Max(halfWidth0, halfWidth1); _vertices[0].Position = (Vector3)(forward - right * halfWidth - up * halfWidth); _vertices[0].TextureCoordinate = (Vector2) new Vector2F(0, 1); _vertices[1].Position = (Vector3)(forward - right * halfWidth + up * halfWidth); _vertices[1].TextureCoordinate = (Vector2) new Vector2F(0, 0); _vertices[2].Position = (Vector3)(forward + right * halfWidth - up * halfWidth); _vertices[2].TextureCoordinate = (Vector2) new Vector2F(1, 1); _vertices[3].Position = (Vector3)(forward + right * halfWidth + up * halfWidth); _vertices[3].TextureCoordinate = (Vector2) new Vector2F(1, 0); if (context.IsHdrEnabled()) { _effectPassGlowLinear.Apply(); } else { _effectPassGlowGamma.Apply(); } graphicsDevice.DrawUserPrimitives(PrimitiveType.TriangleStrip, _vertices, 0, 2); } } savedRenderState.Restore(); }
public override void Render(IList <SceneNode> nodes, RenderContext context, RenderOrder order) { if (nodes == null) { throw new ArgumentNullException("nodes"); } if (context == null) { throw new ArgumentNullException("context"); } int numberOfNodes = nodes.Count; if (numberOfNodes == 0) { return; } Debug.Assert(context.CameraNode != null, "A camera node has to be set in the render context."); Debug.Assert(context.Scene != null, "A scene has to be set in the render context."); var originalRenderTarget = context.RenderTarget; var originalViewport = context.Viewport; var originalReferenceNode = context.ReferenceNode; // Camera properties var cameraNode = context.CameraNode; var cameraPose = cameraNode.PoseWorld; var projection = cameraNode.Camera.Projection; if (!(projection is PerspectiveProjection)) { throw new NotImplementedException("VSM shadow maps not yet implemented for scenes with perspective camera."); } float fieldOfViewY = projection.FieldOfViewY; float aspectRatio = projection.AspectRatio; // Update SceneNode.LastFrame for all rendered nodes. int frame = context.Frame; cameraNode.LastFrame = frame; // The scene node renderer should use the light camera instead of the player camera. context.CameraNode = _orthographicCameraNode; // The shadow map is rendered using the technique "DirectionalVsm". // See ShadowMap.fx in the DigitalRune source code folder. context.Technique = "DirectionalVsm"; var graphicsService = context.GraphicsService; var graphicsDevice = graphicsService.GraphicsDevice; var originalBlendState = graphicsDevice.BlendState; var originalDepthStencilState = graphicsDevice.DepthStencilState; var originalRasterizerState = graphicsDevice.RasterizerState; for (int i = 0; i < numberOfNodes; i++) { var lightNode = nodes[i] as LightNode; if (lightNode == null) { continue; } var shadow = lightNode.Shadow as VarianceShadow; if (shadow == null) { continue; } // LightNode is visible in current frame. lightNode.LastFrame = frame; // The format of the shadow map: var format = new RenderTargetFormat( shadow.PreferredSize, shadow.PreferredSize, false, shadow.Prefer16Bit ? SurfaceFormat.HalfVector2 : SurfaceFormat.Vector2, // VSM needs two channels! DepthFormat.Depth24); if (shadow.ShadowMap != null && shadow.IsLocked) { continue; } if (shadow.ShadowMap == null) { shadow.ShadowMap = graphicsService.RenderTargetPool.Obtain2D(format); } graphicsDevice.DepthStencilState = DepthStencilState.Default; graphicsDevice.BlendState = BlendState.Opaque; // Render front and back faces for VSM due to low shadow map texel density. // (VSM is usually used for distant geometry.) graphicsDevice.RasterizerState = RasterizerState.CullNone; graphicsDevice.SetRenderTarget(shadow.ShadowMap); context.RenderTarget = shadow.ShadowMap; context.Viewport = graphicsDevice.Viewport; graphicsDevice.Clear(Color.White); // Compute an orthographic camera for the light. // If Shadow.TargetArea is null, the shadow map should cover the area in front of the player camera. // If Shadow.TargetArea is set, the shadow map should cover this static area. if (shadow.TargetArea == null) { // near/far of this shadowed area. float near = projection.Near; float far = shadow.MaxDistance; // Abort if near-far distances are invalid. if (Numeric.IsGreaterOrEqual(near, far)) { continue; } // Create a view volume for frustum part that is covered by the shadow map. _cameraVolume.SetFieldOfView(fieldOfViewY, aspectRatio, near, far); // Find the bounding sphere of the frustum part. Vector3F center; float radius; GetBoundingSphere(_cameraVolume, out center, out radius); // Convert center to light space. Pose lightPose = lightNode.PoseWorld; center = cameraPose.ToWorldPosition(center); center = lightPose.ToLocalPosition(center); // Snap center to texel positions to avoid shadow swimming. SnapPositionToTexels(ref center, 2 * radius, shadow.ShadowMap.Height); // Convert center back to world space. center = lightPose.ToWorldPosition(center); Matrix33F orientation = lightPose.Orientation; Vector3F backward = orientation.GetColumn(2); var orthographicProjection = (OrthographicProjection)_orthographicCameraNode.Camera.Projection; // Create a tight orthographic frustum around the target bounding sphere. orthographicProjection.SetOffCenter(-radius, radius, -radius, radius, 0, 2 * radius); Vector3F cameraPosition = center + radius * backward; Pose frustumPose = new Pose(cameraPosition, orientation); Pose view = frustumPose.Inverse; shadow.ViewProjection = (Matrix)view * orthographicProjection; // For rendering the shadow map, move near plane back by MinLightDistance // to catch occluders in front of the camera frustum. orthographicProjection.Near = -shadow.MinLightDistance; _orthographicCameraNode.PoseWorld = frustumPose; } else { // Get bounding sphere of static target area. Aabb targetAabb = shadow.TargetArea.Value; Vector3F center = targetAabb.Center; float radius = (targetAabb.Maximum - center).Length; // Convert center to light space. Matrix33F orientation = lightNode.PoseWorld.Orientation; Vector3F backward = orientation.GetColumn(2); var orthographicProjection = (OrthographicProjection)_orthographicCameraNode.Camera.Projection; // Create a tight orthographic frustum around the target bounding sphere. orthographicProjection.SetOffCenter(-radius, radius, -radius, radius, 0, 2 * radius); Vector3F cameraPosition = center + radius * backward; Pose frustumPose = new Pose(cameraPosition, orientation); Pose view = frustumPose.Inverse; shadow.ViewProjection = (Matrix)view * orthographicProjection; // For rendering the shadow map, move near plane back by MinLightDistance // to catch occluders in front of the camera frustum. orthographicProjection.Near = -shadow.MinLightDistance; _orthographicCameraNode.PoseWorld = frustumPose; } context.ReferenceNode = lightNode; context.Object = shadow; // Render objects into shadow map. bool shadowMapContainsSomething = RenderCallback(context); if (shadowMapContainsSomething) { // Blur shadow map. if (shadow.Filter != null && shadow.Filter.Scale > 0) { context.SourceTexture = shadow.ShadowMap; shadow.Filter.Process(context); context.SourceTexture = null; } } else { // Shadow map is empty. Recycle it. graphicsService.RenderTargetPool.Recycle(shadow.ShadowMap); shadow.ShadowMap = null; } } graphicsDevice.SetRenderTarget(null); graphicsDevice.BlendState = originalBlendState; graphicsDevice.DepthStencilState = originalDepthStencilState; graphicsDevice.RasterizerState = originalRasterizerState; context.CameraNode = cameraNode; context.Technique = null; context.RenderTarget = originalRenderTarget; context.Viewport = originalViewport; context.ReferenceNode = originalReferenceNode; context.Object = null; }
public override void ComputeCollision(ContactSet contactSet, CollisionQueryType type) { // Invoke GJK for closest points. if (type == CollisionQueryType.ClosestPoints) { throw new GeometryException("This collision algorithm cannot handle closest-point queries. Use GJK instead."); } CollisionObject collisionObjectA = contactSet.ObjectA; CollisionObject collisionObjectB = contactSet.ObjectB; IGeometricObject geometricObjectA = collisionObjectA.GeometricObject; IGeometricObject geometricObjectB = collisionObjectB.GeometricObject; BoxShape boxA = geometricObjectA.Shape as BoxShape; BoxShape boxB = geometricObjectB.Shape as BoxShape; // Check if collision objects shapes are correct. if (boxA == null || boxB == null) { throw new ArgumentException("The contact set must contain box shapes.", "contactSet"); } Vector3F scaleA = Vector3F.Absolute(geometricObjectA.Scale); Vector3F scaleB = Vector3F.Absolute(geometricObjectB.Scale); Pose poseA = geometricObjectA.Pose; Pose poseB = geometricObjectB.Pose; // We perform the separating axis test in the local space of A. // The following variables are in local space of A. // Center of box B. Vector3F cB = poseA.ToLocalPosition(poseB.Position); // Orientation matrix of box B Matrix33F mB = poseA.Orientation.Transposed * poseB.Orientation; // Absolute of mB. Matrix33F aMB = Matrix33F.Absolute(mB); // Half extent vectors of the boxes. Vector3F eA = 0.5f * boxA.Extent * scaleA; Vector3F eB = 0.5f * boxB.Extent * scaleB; // ----- Separating Axis tests // If the boxes are separated, we immediately return. // For the case of interpenetration, we store the smallest penetration depth. float smallestPenetrationDepth = float.PositiveInfinity; int separatingAxisNumber = 0; Vector3F normal = Vector3F.UnitX; bool isNormalInverted = false; contactSet.HaveContact = false; // Assume no contact. #region ----- Case 1: Separating Axis: (1, 0, 0) ----- float separation = Math.Abs(cB.X) - (eA.X + eB.X * aMB.M00 + eB.Y * aMB.M01 + eB.Z * aMB.M02); if (separation > 0) { return; } if (type != CollisionQueryType.Boolean && -separation < smallestPenetrationDepth) { normal = Vector3F.UnitX; smallestPenetrationDepth = -separation; isNormalInverted = cB.X < 0; separatingAxisNumber = 1; } #endregion #region ----- Case 2: Separating Axis: (0, 1, 0) ----- separation = Math.Abs(cB.Y) - (eA.Y + eB.X * aMB.M10 + eB.Y * aMB.M11 + eB.Z * aMB.M12); if (separation > 0) { return; } if (type != CollisionQueryType.Boolean && -separation < smallestPenetrationDepth) { normal = Vector3F.UnitY; smallestPenetrationDepth = -separation; isNormalInverted = cB.Y < 0; separatingAxisNumber = 2; } #endregion #region ----- Case 3: Separating Axis: (0, 0, 1) ----- separation = Math.Abs(cB.Z) - (eA.Z + eB.X * aMB.M20 + eB.Y * aMB.M21 + eB.Z * aMB.M22); if (separation > 0) { return; } if (type != CollisionQueryType.Boolean && -separation < smallestPenetrationDepth) { normal = Vector3F.UnitZ; smallestPenetrationDepth = -separation; isNormalInverted = cB.Z < 0; separatingAxisNumber = 3; } #endregion #region ----- Case 4: Separating Axis: OrientationB * (1, 0, 0) ----- float expression = cB.X * mB.M00 + cB.Y * mB.M10 + cB.Z * mB.M20; separation = Math.Abs(expression) - (eB.X + eA.X * aMB.M00 + eA.Y * aMB.M10 + eA.Z * aMB.M20); if (separation > 0) { return; } if (type != CollisionQueryType.Boolean && -separation < smallestPenetrationDepth) { normal = mB.GetColumn(0); smallestPenetrationDepth = -separation; isNormalInverted = expression < 0; separatingAxisNumber = 4; } #endregion #region ----- Case 5: Separating Axis: OrientationB * (0, 1, 0) ----- expression = cB.X * mB.M01 + cB.Y * mB.M11 + cB.Z * mB.M21; separation = Math.Abs(expression) - (eB.Y + eA.X * aMB.M01 + eA.Y * aMB.M11 + eA.Z * aMB.M21); if (separation > 0) { return; } if (type != CollisionQueryType.Boolean && -separation < smallestPenetrationDepth) { normal = mB.GetColumn(1); smallestPenetrationDepth = -separation; isNormalInverted = expression < 0; separatingAxisNumber = 5; } #endregion #region ----- Case 6: Separating Axis: OrientationB * (0, 0, 1) ----- expression = cB.X * mB.M02 + cB.Y * mB.M12 + cB.Z * mB.M22; separation = Math.Abs(expression) - (eB.Z + eA.X * aMB.M02 + eA.Y * aMB.M12 + eA.Z * aMB.M22); if (separation > 0) { return; } if (type != CollisionQueryType.Boolean && -separation < smallestPenetrationDepth) { normal = mB.GetColumn(2); smallestPenetrationDepth = -separation; isNormalInverted = expression < 0; separatingAxisNumber = 6; } #endregion // The next 9 tests are edge-edge cases. The normal vector has to be normalized // to get the right penetration depth. // normal = Normalize(edgeA x edgeB) Vector3F separatingAxis; float length; #region ----- Case 7: Separating Axis: (1, 0, 0) x (OrientationB * (1, 0, 0)) ----- expression = cB.Z * mB.M10 - cB.Y * mB.M20; separation = Math.Abs(expression) - (eA.Y * aMB.M20 + eA.Z * aMB.M10 + eB.Y * aMB.M02 + eB.Z * aMB.M01); if (separation > 0) { return; } if (type != CollisionQueryType.Boolean) { separatingAxis = new Vector3F(0, -mB.M20, mB.M10); length = separatingAxis.Length; separation /= length; if (-separation < smallestPenetrationDepth) { normal = separatingAxis / length; smallestPenetrationDepth = -separation; isNormalInverted = expression < 0; separatingAxisNumber = 7; } } #endregion #region ----- Case 8: Separating Axis: (1, 0, 0) x (OrientationB * (0, 1, 0)) ----- expression = cB.Z * mB.M11 - cB.Y * mB.M21; separation = Math.Abs(expression) - (eA.Y * aMB.M21 + eA.Z * aMB.M11 + eB.X * aMB.M02 + eB.Z * aMB.M00); if (separation > 0) { return; } if (type != CollisionQueryType.Boolean) { separatingAxis = new Vector3F(0, -mB.M21, mB.M11); length = separatingAxis.Length; separation /= length; if (-separation < smallestPenetrationDepth) { normal = separatingAxis / length; smallestPenetrationDepth = -separation; isNormalInverted = expression < 0; separatingAxisNumber = 8; } } #endregion #region ----- Case 9: Separating Axis: (1, 0, 0) x (OrientationB * (0, 0, 1)) ----- expression = cB.Z * mB.M12 - cB.Y * mB.M22; separation = Math.Abs(expression) - (eA.Y * aMB.M22 + eA.Z * aMB.M12 + eB.X * aMB.M01 + eB.Y * aMB.M00); if (separation > 0) { return; } if (type != CollisionQueryType.Boolean) { separatingAxis = new Vector3F(0, -mB.M22, mB.M12); length = separatingAxis.Length; separation /= length; if (-separation < smallestPenetrationDepth) { normal = separatingAxis / length; smallestPenetrationDepth = -separation; isNormalInverted = expression < 0; separatingAxisNumber = 9; } } #endregion #region ----- Case 10: Separating Axis: (0, 1, 0) x (OrientationB * (1, 0, 0)) ----- expression = cB.X * mB.M20 - cB.Z * mB.M00; separation = Math.Abs(expression) - (eA.X * aMB.M20 + eA.Z * aMB.M00 + eB.Y * aMB.M12 + eB.Z * aMB.M11); if (separation > 0) { return; } if (type != CollisionQueryType.Boolean) { separatingAxis = new Vector3F(mB.M20, 0, -mB.M00); length = separatingAxis.Length; separation /= length; if (-separation < smallestPenetrationDepth) { normal = separatingAxis / length; smallestPenetrationDepth = -separation; isNormalInverted = expression < 0; separatingAxisNumber = 10; } } #endregion #region ----- Case 11: Separating Axis: (0, 1, 0) x (OrientationB * (0, 1, 0)) ----- expression = cB.X * mB.M21 - cB.Z * mB.M01; separation = Math.Abs(expression) - (eA.X * aMB.M21 + eA.Z * aMB.M01 + eB.X * aMB.M12 + eB.Z * aMB.M10); if (separation > 0) { return; } if (type != CollisionQueryType.Boolean) { separatingAxis = new Vector3F(mB.M21, 0, -mB.M01); length = separatingAxis.Length; separation /= length; if (-separation < smallestPenetrationDepth) { normal = separatingAxis / length; smallestPenetrationDepth = -separation; isNormalInverted = expression < 0; separatingAxisNumber = 11; } } #endregion #region ----- Case 12: Separating Axis: (0, 1, 0) x (OrientationB * (0, 0, 1)) ----- expression = cB.X * mB.M22 - cB.Z * mB.M02; separation = Math.Abs(expression) - (eA.X * aMB.M22 + eA.Z * aMB.M02 + eB.X * aMB.M11 + eB.Y * aMB.M10); if (separation > 0) { return; } if (type != CollisionQueryType.Boolean) { separatingAxis = new Vector3F(mB.M22, 0, -mB.M02); length = separatingAxis.Length; separation /= length; if (-separation < smallestPenetrationDepth) { normal = separatingAxis / length; smallestPenetrationDepth = -separation; isNormalInverted = expression < 0; separatingAxisNumber = 12; } } #endregion #region ----- Case 13: Separating Axis: (0, 0, 1) x (OrientationB * (1, 0, 0)) ----- expression = cB.Y * mB.M00 - cB.X * mB.M10; separation = Math.Abs(expression) - (eA.X * aMB.M10 + eA.Y * aMB.M00 + eB.Y * aMB.M22 + eB.Z * aMB.M21); if (separation > 0) { return; } if (type != CollisionQueryType.Boolean) { separatingAxis = new Vector3F(-mB.M10, mB.M00, 0); length = separatingAxis.Length; separation /= length; if (-separation < smallestPenetrationDepth) { normal = separatingAxis / length; smallestPenetrationDepth = -separation; isNormalInverted = expression < 0; separatingAxisNumber = 13; } } #endregion #region ----- Case 14: Separating Axis: (0, 0, 1) x (OrientationB * (0, 1, 0)) ----- expression = cB.Y * mB.M01 - cB.X * mB.M11; separation = Math.Abs(expression) - (eA.X * aMB.M11 + eA.Y * aMB.M01 + eB.X * aMB.M22 + eB.Z * aMB.M20); if (separation > 0) { return; } if (type != CollisionQueryType.Boolean) { separatingAxis = new Vector3F(-mB.M11, mB.M01, 0); length = separatingAxis.Length; separation /= length; if (-separation < smallestPenetrationDepth) { normal = separatingAxis / length; smallestPenetrationDepth = -separation; isNormalInverted = expression < 0; separatingAxisNumber = 14; } } #endregion #region ----- Case 15: Separating Axis: (0, 0, 1) x (OrientationB * (0, 0, 1)) ----- expression = cB.Y * mB.M02 - cB.X * mB.M12; separation = Math.Abs(expression) - (eA.X * aMB.M12 + eA.Y * aMB.M02 + eB.X * aMB.M21 + eB.Y * aMB.M20); if (separation > 0) { return; } if (type != CollisionQueryType.Boolean) { separatingAxis = new Vector3F(-mB.M12, mB.M02, 0); length = separatingAxis.Length; separation /= length; if (-separation < smallestPenetrationDepth) { normal = separatingAxis / length; smallestPenetrationDepth = -separation; isNormalInverted = expression < 0; separatingAxisNumber = 15; } } #endregion // We have a contact. contactSet.HaveContact = true; // HaveContact queries can exit here. if (type == CollisionQueryType.Boolean) { return; } // Lets find the contact info. Debug.Assert(smallestPenetrationDepth >= 0, "The smallest penetration depth should be greater than or equal to 0."); if (isNormalInverted) { normal = -normal; } // Transform normal from local space of A to world space. Vector3F normalWorld = poseA.ToWorldDirection(normal); if (separatingAxisNumber > 6) { // The intersection was detected by an edge-edge test. // Get the intersecting edges. // Separating axes: // 7 = x edge on A, x edge on B // 8 = x edge on A, y edge on B // 9 = x edge on A, Z edge on B // 10 = y edge on A, x edge on B // ... // 15 = z edge on A, z edge on B var edgeA = boxA.GetEdge((separatingAxisNumber - 7) / 3, normal, scaleA); var edgeB = boxB.GetEdge((separatingAxisNumber - 7) % 3, Matrix33F.MultiplyTransposed(mB, -normal), scaleB); edgeB.Start = mB * edgeB.Start + cB; edgeB.End = mB * edgeB.End + cB; Vector3F position; Vector3F dummy; GeometryHelper.GetClosestPoints(edgeA, edgeB, out position, out dummy); position = position - normal * (smallestPenetrationDepth / 2); // Position is between the positions of the box surfaces. // Convert back position from local space of A to world space; position = poseA.ToWorldPosition(position); Contact contact = ContactHelper.CreateContact(contactSet, position, normalWorld, smallestPenetrationDepth, false); ContactHelper.Merge(contactSet, contact, type, CollisionDetection.ContactPositionTolerance); } else if (1 <= separatingAxisNumber && separatingAxisNumber <= 6) { // The intersection was detected by a face vs. * test. // The separating axis is perpendicular to a face. #region ----- Credits ----- // The face vs. * test is based on the algorithm of the Bullet Continuous Collision // Detection and Physics Library. DigitalRune Geometry contains a new and improved // implementation of the original algorithm. // // The box-box detector in Bullet contains the following remarks: // // Box-Box collision detection re-distributed under the ZLib license with permission from Russell L. Smith // Original version is from Open Dynamics Engine, Copyright (C) 2001,2002 Russell L. Smith. // All rights reserved. Email: [email protected] Web: www.q12.org // // Bullet Continuous Collision Detection and Physics Library // Copyright (c) 2003-2006 Erwin Coumans http://continuousphysics.com/Bullet/ // // This software is provided 'as-is', without any express or implied warranty. // In no event will the authors be held liable for any damages arising from the use of this software. // Permission is granted to anyone to use this software for any purpose, // including commercial applications, and to alter it and redistribute it freely, // subject to the following restrictions: // // 1. The origin of this software must not be misrepresented; you must not claim that you wrote the // original software. If you use this software in a product, an acknowledgment in the product // documentation would be appreciated but is not required. // 2. Altered source versions must be plainly marked as such, and must not be misrepresented as being // the original software. // 3. This notice may not be removed or altered from any source distribution. #endregion // We define the face perpendicular to the separating axis to be the "reference face". // The face of the other box closest to the reference face is called the "incident face". // Accordingly, we will call the box containing the reference face the "reference box" and // the box containing the incident face the "incident box". // // We will transform the incident face into the 2D space of reference face. Then we will // clip the incident face against the reference face. The polygon resulting from the // intersection will be transformed back into world space and the points of the polygon will // be the candidates for the contact points. Pose poseR; // Pose of reference box. Pose poseI; // Pose of incident box. Vector3F boxExtentR; // Half extent of reference box. Vector3F boxExtentI; // Half extent of incident box. // Contact normal (= normal of reference face) in world space. if (separatingAxisNumber <= 3) { poseR = poseA; poseI = poseB; boxExtentR = eA; boxExtentI = eB; isNormalInverted = false; } else { poseR = poseB; poseI = poseA; boxExtentR = eB; boxExtentI = eA; normalWorld = -normalWorld; isNormalInverted = true; } // Contact normal in local space of incident box. Vector3F normalI = poseI.ToLocalDirection(normalWorld); Vector3F absNormal = normalI; absNormal.Absolute(); Vector3F xAxisInc, yAxisInc; // The basis of the incident-face space. float absFaceOffsetI; // The offset of the incident face to the center of the box. Vector2F faceExtentI; // The half extent of the incident face. Vector3F faceNormal; // The normal of the incident face in world space. float faceDirection; // A value indicating the direction of the incident face. // Find the largest component of the normal. The largest component indicates which face is // the incident face. switch (Vector3F.Absolute(normalI).IndexOfLargestComponent) { case 0: faceExtentI.X = boxExtentI.Y; faceExtentI.Y = boxExtentI.Z; absFaceOffsetI = boxExtentI.X; faceNormal = poseI.Orientation.GetColumn(0); xAxisInc = poseI.Orientation.GetColumn(1); yAxisInc = poseI.Orientation.GetColumn(2); faceDirection = normalI.X; break; case 1: faceExtentI.X = boxExtentI.X; faceExtentI.Y = boxExtentI.Z; absFaceOffsetI = boxExtentI.Y; faceNormal = poseI.Orientation.GetColumn(1); xAxisInc = poseI.Orientation.GetColumn(0); yAxisInc = poseI.Orientation.GetColumn(2); faceDirection = normalI.Y; break; // case 2: default: faceExtentI.X = boxExtentI.X; faceExtentI.Y = boxExtentI.Y; absFaceOffsetI = boxExtentI.Z; faceNormal = poseI.Orientation.GetColumn(2); xAxisInc = poseI.Orientation.GetColumn(0); yAxisInc = poseI.Orientation.GetColumn(1); faceDirection = normalI.Z; break; } // Compute center of incident face relative to the center of the reference box in world space. float faceOffset = (faceDirection < 0) ? absFaceOffsetI : -absFaceOffsetI; Vector3F centerOfFaceI = faceNormal * faceOffset + poseI.Position - poseR.Position; // (Note: We will use the center of the incident face to compute the points of the incident // face and transform the points into the reference-face frame. The center of the incident // face is relative to the center of the reference box. We could also get center of the // incident face relative to the center of the reference face. But since we are projecting // the points from 3D to 2D this does not matter.) Vector3F xAxisR, yAxisR; // The basis of the reference-face space. float faceOffsetR; // The offset of the reference face to the center of the box. Vector2F faceExtentR; // The half extent of the reference face. switch (separatingAxisNumber) { case 1: case 4: faceExtentR.X = boxExtentR.Y; faceExtentR.Y = boxExtentR.Z; faceOffsetR = boxExtentR.X; xAxisR = poseR.Orientation.GetColumn(1); yAxisR = poseR.Orientation.GetColumn(2); break; case 2: case 5: faceExtentR.X = boxExtentR.X; faceExtentR.Y = boxExtentR.Z; faceOffsetR = boxExtentR.Y; xAxisR = poseR.Orientation.GetColumn(0); yAxisR = poseR.Orientation.GetColumn(2); break; // case 3: // case 6: default: faceExtentR.X = boxExtentR.X; faceExtentR.Y = boxExtentR.Y; faceOffsetR = boxExtentR.Z; xAxisR = poseR.Orientation.GetColumn(0); yAxisR = poseR.Orientation.GetColumn(1); break; } // Compute the center of the incident face in the reference-face frame. // We can simply project centerOfFaceI onto the x- and y-axis of the reference // face. Vector2F centerOfFaceIInR; //centerOfFaceIInR.X = Vector3F.Dot(centerOfFaceI, xAxisR); // ----- Optimized version: centerOfFaceIInR.X = centerOfFaceI.X * xAxisR.X + centerOfFaceI.Y * xAxisR.Y + centerOfFaceI.Z * xAxisR.Z; //centerOfFaceIInR.Y = Vector3F.Dot(centerOfFaceI, yAxisR); // ----- Optimized version: centerOfFaceIInR.Y = centerOfFaceI.X * yAxisR.X + centerOfFaceI.Y * yAxisR.Y + centerOfFaceI.Z * yAxisR.Z; // Now, we have the center of the incident face in reference-face coordinates. // To compute the corners of the incident face in reference-face coordinates, we need // transform faceExtentI (the half extent vector of the incident face) from the incident- // face frame to the reference-face frame to compute the corners. // // The reference-face frame has the basis // mR = (xAxisR, yAxisR, ?) // // The incident-face frame has the basis // mI = (xAxisI, yAxisI, ?) // // Rotation from incident-face frame to reference-face frame is // mIToR = mR^-1 * mI // // The corner offsets in incident-face space is are vectors (x, y, 0). To transform these // vectors from incident-face space to reference-face space we need to calculate: // mIToR * v // // Since the z-components are 0 and we are only interested in the resulting x, y coordinates // in reference-space when can reduce the rotation to a 2 x 2 matrix. (The other components // are not needed.) // ----- Optimized version: (Original on the right) Matrix22F mIToR; mIToR.M00 = xAxisR.X * xAxisInc.X + xAxisR.Y * xAxisInc.Y + xAxisR.Z * xAxisInc.Z; // mIToR.M00 = Vector3F.Dot(xAxisR, xAxisInc); mIToR.M01 = xAxisR.X * yAxisInc.X + xAxisR.Y * yAxisInc.Y + xAxisR.Z * yAxisInc.Z; // mIToR.M01 = Vector3F.Dot(xAxisR, yAxisInc); mIToR.M10 = yAxisR.X * xAxisInc.X + yAxisR.Y * xAxisInc.Y + yAxisR.Z * xAxisInc.Z; // mIToR.M10 = Vector3F.Dot(yAxisR, xAxisInc); mIToR.M11 = yAxisR.X * yAxisInc.X + yAxisR.Y * yAxisInc.Y + yAxisR.Z * yAxisInc.Z; // mIToR.M11 = Vector3F.Dot(yAxisR, yAxisInc); // The corner offsets in incident-face space are: // (-faceExtentI.X, -faceExtentI.Y) ... left, bottom corner // ( faceExtentI.X, -faceExtentI.Y) ... right, bottom corner // ( faceExtentI.X, faceExtentI.Y) ... right, top corner // (-faceExtentI.X, faceExtentI.Y) ... left, top corner // // Instead of transforming each corner offset, we can optimize the computation: Do the // matrix-vector multiplication once, keep the intermediate products, apply the sign // of the components when adding the intermediate results. float k1 = mIToR.M00 * faceExtentI.X; // Products of matrix-vector multiplication. float k2 = mIToR.M01 * faceExtentI.Y; float k3 = mIToR.M10 * faceExtentI.X; float k4 = mIToR.M11 * faceExtentI.Y; List <Vector2F> quad = DigitalRune.ResourcePools <Vector2F> .Lists.Obtain(); quad.Add(new Vector2F(centerOfFaceIInR.X - k1 - k2, centerOfFaceIInR.Y - k3 - k4)); quad.Add(new Vector2F(centerOfFaceIInR.X + k1 - k2, centerOfFaceIInR.Y + k3 - k4)); quad.Add(new Vector2F(centerOfFaceIInR.X + k1 + k2, centerOfFaceIInR.Y + k3 + k4)); quad.Add(new Vector2F(centerOfFaceIInR.X - k1 + k2, centerOfFaceIInR.Y - k3 + k4)); // Clip incident face (quadrilateral) against reference face (rectangle). List <Vector2F> contacts2D = ClipQuadrilateralAgainstRectangle(faceExtentR, quad); // Transform contact points back to world space and compute penetration depths. int numberOfContacts = contacts2D.Count; List <Vector3F> contacts3D = DigitalRune.ResourcePools <Vector3F> .Lists.Obtain(); List <float> penetrationDepths = DigitalRune.ResourcePools <float> .Lists.Obtain(); Matrix22F mRToI = mIToR.Inverse; for (int i = numberOfContacts - 1; i >= 0; i--) { Vector2F contact2DR = contacts2D[i]; // Contact in reference-face space. Vector2F contact2DI = mRToI * (contact2DR - centerOfFaceIInR); // Contact in incident-face space. // Transform point in incident-face space to world (relative to center of reference box). // contact3D = mI * (x, y, 0) + centerOfFaceI Vector3F contact3D; contact3D.X = xAxisInc.X * contact2DI.X + yAxisInc.X * contact2DI.Y + centerOfFaceI.X; contact3D.Y = xAxisInc.Y * contact2DI.X + yAxisInc.Y * contact2DI.Y + centerOfFaceI.Y; contact3D.Z = xAxisInc.Z * contact2DI.X + yAxisInc.Z * contact2DI.Y + centerOfFaceI.Z; // Compute penetration depth. //float penetrationDepth = faceOffsetR - Vector3F.Dot(normalWorld, contact3D); // ----- Optimized version: float penetrationDepth = faceOffsetR - (normalWorld.X * contact3D.X + normalWorld.Y * contact3D.Y + normalWorld.Z * contact3D.Z); if (penetrationDepth >= 0) { // Valid contact. contacts3D.Add(contact3D); penetrationDepths.Add(penetrationDepth); } else { // Remove bad contacts from the 2D contacts. // (We might still need the 2D contacts, if we need to reduce the contacts.) contacts2D.RemoveAt(i); } } numberOfContacts = contacts3D.Count; if (numberOfContacts == 0) { return; // Should never happen. } // Revert normal back to original direction. normal = (isNormalInverted) ? -normalWorld : normalWorld; // Note: normal ........ contact normal pointing from box A to B. // normalWorld ... contact normal pointing from reference box to incident box. if (numberOfContacts <= MaxNumberOfContacts) { // Add all contacts to contact set. for (int i = 0; i < numberOfContacts; i++) { float penetrationDepth = penetrationDepths[i]; // Position is between the positions of the box surfaces. Vector3F position = contacts3D[i] + poseR.Position + normalWorld * (penetrationDepth / 2); Contact contact = ContactHelper.CreateContact(contactSet, position, normal, penetrationDepth, false); ContactHelper.Merge(contactSet, contact, type, CollisionDetection.ContactPositionTolerance); } } else { // Reduce number of contacts, keep the contact with the max penetration depth. int indexOfDeepest = 0; float maxPenetrationDepth = penetrationDepths[0]; for (int i = 1; i < numberOfContacts; i++) { float penetrationDepth = penetrationDepths[i]; if (penetrationDepth > maxPenetrationDepth) { maxPenetrationDepth = penetrationDepth; indexOfDeepest = i; } } List <int> indicesOfContacts = ReduceContacts(contacts2D, indexOfDeepest, MaxNumberOfContacts); // Add selected contacts to contact set. numberOfContacts = indicesOfContacts.Count; for (int i = 0; i < numberOfContacts; i++) { int index = indicesOfContacts[i]; float penetrationDepth = penetrationDepths[index]; // Position is between the positions of the box surfaces. Vector3F position = contacts3D[index] + poseR.Position + normalWorld * (penetrationDepth / 2); Contact contact = ContactHelper.CreateContact(contactSet, position, normal, penetrationDepths[index], false); ContactHelper.Merge(contactSet, contact, type, CollisionDetection.ContactPositionTolerance); } DigitalRune.ResourcePools <int> .Lists.Recycle(indicesOfContacts); } DigitalRune.ResourcePools <Vector2F> .Lists.Recycle(contacts2D); DigitalRune.ResourcePools <Vector3F> .Lists.Recycle(contacts3D); DigitalRune.ResourcePools <float> .Lists.Recycle(penetrationDepths); } }
//public Vector3F RelativePosition //{ // get // { // if (BodyA == null) // throw new PhysicsException("BodyA must not be null."); // if (BodyB == null) // throw new PhysicsException("BodyB must not be null."); // // Anchor orientation in world space. // Matrix33F anchorOrientationA = BodyA.Pose.Orientation * AnchorOrientationALocal; // Matrix33F anchorOrientationB = BodyB.Pose.Orientation * AnchorOrientationBLocal; // // Anchor orientation of B relative to A. // Matrix33F relativeOrientation = anchorOrientationA.Transposed * anchorOrientationB; // // The Euler angles. // Vector3F angles = GetAngles(relativeOrientation); // return angles; // } //} #endregion //-------------------------------------------------------------- #region Creation & Cleanup //-------------------------------------------------------------- #endregion //-------------------------------------------------------------- #region Methods //-------------------------------------------------------------- /// <inheritdoc/> protected override void OnSetup() { // Get anchor orientations in world space. Matrix33F anchorOrientationA = BodyA.Pose.Orientation * AnchorOrientationALocal; Matrix33F anchorOrientationB = BodyB.Pose.Orientation * AnchorOrientationBLocal; // Get the quaternion that rotates something from anchor orientation A to // anchor orientation B: // QB = QTotal * QA // => QTotal = QB * QA.Inverse QuaternionF total = QuaternionF.CreateRotation(anchorOrientationB * anchorOrientationA.Transposed); // Compute swing axis and angle. Vector3F xAxisA = anchorOrientationA.GetColumn(0); Vector3F yAxisA = anchorOrientationA.GetColumn(1); Vector3F xAxisB = anchorOrientationB.GetColumn(0); QuaternionF swing = QuaternionF.CreateRotation(xAxisA, xAxisB); Vector3F swingAxis = new Vector3F(swing.X, swing.Y, swing.Z); if (!swingAxis.TryNormalize()) { swingAxis = yAxisA; } float swingAngle = swing.Angle; Debug.Assert( 0 <= swingAngle && swingAngle <= ConstantsF.Pi, "QuaternionF.CreateRotation(Vector3F, Vector3F) should only create rotations along the \"short arc\"."); // The swing limits create a deformed cone. If we look onto the x-axis of A: // y-axis goes to the right. z-axis goes up. Vector3F xAxisBInAnchorA = Matrix33F.MultiplyTransposed(anchorOrientationA, xAxisB); float directionY = xAxisBInAnchorA.Y; float directionZ = xAxisBInAnchorA.Z; // In this plane, we have an ellipse with the formula: // y²/a² + z²/b² = 1, where a and b are the ellipse radii. // We don't know the exact radii. We can compute them from the swing min/max angles. // To make it simpler, we do not use a flat ellipse. We use the swing z limit for a. // And the swing y limit for b. // We have a different ellipse for each quarter. float ellipseA = (directionY > 0) ? Maximum.Z : -Minimum.Z; float ellipseB = (directionZ > 0) ? -Minimum.Y : Maximum.Y; // The angles are in radians: angle = bow/radius. So our a and b are on the unit sphere. // This creates an elliptic thing on the unit sphere - not in a plane. We don't care because // we only need a smooth interpolation between the swing y and z limits. // No we look for the swing angle in the direction of xAxisB. // The next step can derived from following formulas: // y²/a² + z²/b² = 1 The ellipse formula. // slope = directionZ / directionY The direction in which we need the limit. // slope = z/y The (y,z) is the point on the ellipse in the given direction. // swingLimit = sqrt(y² + z²) This is the distance of (y,z) from the center. // Since our ellipse is on a sphere, swingLimit is an angle (= bow / radius). float swingLimit = ellipseB; if (!Numeric.IsZero(directionY)) { float slope = directionZ / directionY; float slopeSquared = slope * slope; float ellipseASquared = ellipseA * ellipseA; float ellipseBSquared = ellipseB * ellipseB; swingLimit = (float)Math.Sqrt((1 + slopeSquared) / (1 / ellipseASquared + slopeSquared / ellipseBSquared)); // The ellipse normal would give us a better swing axis. But our computed swingAngle // is not correct for this axis... // Create a swing axis from the ellipse normal. //float k = ellipseASquared / ellipseBSquared * directionZ / directionY; //var normal = anchorOrientationA * new Vector3F(0, -k, 1).Normalized; //if (Vector3F.Dot(normal, swingAxis) < 0) // swingAxis = -normal; //else // swingAxis = normal; } #if DEBUG //Debug.Assert(QuaternionF.Dot(swing, total) >= 0); var swingAxisALocal = Matrix33F.MultiplyTransposed(anchorOrientationA, swingAxis); Debug.Assert(Numeric.IsZero(swingAxisALocal.X)); #endif // We define our rotations like this: // First we twist around the x-axis of A. Then we swing. // QTotal = QSwing * QTwist // => QSwing.Inverse * QTotal = QTwist QuaternionF twist = swing.Conjugated * total; twist.Normalize(); // The quaternion returns an angle in the range [0, 2π]. float twistAngle = twist.Angle; // The minimum and maximum twist limits are in the range [-π, π]. if (twistAngle > ConstantsF.Pi) { // Convert the twistAngle to the range used by the twist limits. twistAngle = -(ConstantsF.TwoPi - twistAngle); Debug.Assert(-ConstantsF.TwoPi < twistAngle && twistAngle <= ConstantsF.TwoPi); } // The axis of the twist quaternion is parallel to xAxisA. Vector3F twistAxis = new Vector3F(twist.X, twist.Y, twist.Z); if (Vector3F.Dot(twistAxis, xAxisA) < 0) { // The axis of the twist quaternion points in the opposite direction of xAxisA. // The twist angle need to be inverted. twistAngle = -twistAngle; } // Remember old states. LimitState oldXLimitState = _limitStates[0]; LimitState oldYLimitState = _limitStates[1]; // Note: All axes between xAxisA and xAxisB should be valid twist axes. SetupConstraint(0, twistAngle, xAxisB, Minimum[0], Maximum[0]); SetupConstraint(1, swingAngle, swingAxis, -swingLimit, swingLimit); // Warm-start the constraints if the previous limit state matches the new limit state. Warmstart(0, oldXLimitState); Warmstart(1, oldYLimitState); }