/// <summary> /// Identifies the points on the surface of hull. /// </summary> /// <param name="points">List of points in the set.</param> /// <param name="outputSurfacePoints">Unique points on the surface of the convex hull.</param> public static void GetConvexHull(IList<Vector3> points, IList<Vector3> outputSurfacePoints) { var rawPoints = new QuickList<Vector3>(BufferPools<Vector3>.Locking, BufferPool.GetPoolIndex(points.Count)); rawPoints.AddRange(points); GetConvexHull(ref rawPoints, outputSurfacePoints); rawPoints.Dispose(); }
/// <summary> /// Casts a convex shape against the collidable. /// </summary> /// <param name="castShape">Shape to cast.</param> /// <param name="startingTransform">Initial transform of the shape.</param> /// <param name="sweep">Sweep to apply to the shape.</param> /// <param name="hit">Hit data, if any.</param> /// <returns>Whether or not the cast hit anything.</returns> public override bool ConvexCast(CollisionShapes.ConvexShapes.ConvexShape castShape, ref RigidTransform startingTransform, ref Vector3f sweep, out RayHit hit) { hit = new RayHit(); BoundingBox localSpaceBoundingBox; castShape.GetSweptLocalBoundingBox(ref startingTransform, ref worldTransform, ref sweep, out localSpaceBoundingBox); var tri = PhysicsThreadResources.GetTriangle(); var hitElements = new QuickList <int>(BufferPools <int> .Thread); if (Shape.GetOverlaps(localSpaceBoundingBox, ref hitElements)) { hit.T = float.MaxValue; for (int i = 0; i < hitElements.Count; i++) { Shape.GetTriangle(hitElements.Elements[i], ref worldTransform, out tri.vA, out tri.vB, out tri.vC); Vector3f center; Vector3f.Add(ref tri.vA, ref tri.vB, out center); Vector3f.Add(ref center, ref tri.vC, out center); Vector3f.Multiply(ref center, 1f / 3f, out center); Vector3f.Subtract(ref tri.vA, ref center, out tri.vA); Vector3f.Subtract(ref tri.vB, ref center, out tri.vB); Vector3f.Subtract(ref tri.vC, ref center, out tri.vC); tri.MaximumRadius = tri.vA.LengthSquared; float radius = tri.vB.LengthSquared; if (tri.MaximumRadius < radius) { tri.MaximumRadius = radius; } radius = tri.vC.LengthSquared; if (tri.MaximumRadius < radius) { tri.MaximumRadius = radius; } tri.MaximumRadius = (float)Math.Sqrt(tri.MaximumRadius); tri.collisionMargin = 0; var triangleTransform = new RigidTransform { Orientation = Quaternion.Identity, Position = center }; RayHit tempHit; if (MPRToolbox.Sweep(castShape, tri, ref sweep, ref Toolbox.ZeroVector, ref startingTransform, ref triangleTransform, out tempHit) && tempHit.T < hit.T) { hit = tempHit; } } tri.MaximumRadius = 0; PhysicsThreadResources.GiveBack(tri); hitElements.Dispose(); return(hit.T != float.MaxValue); } PhysicsThreadResources.GiveBack(tri); hitElements.Dispose(); return(false); }
unsafe void Refine(int workerIndex) { var spareNodes = new QuickList <int>(Pool, 8); var subtreeReferences = new QuickList <int>(Pool, BufferPool <int> .GetPoolIndex(MaximumSubtrees)); var treeletInternalNodes = new QuickList <int>(Pool, BufferPool <int> .GetPoolIndex(MaximumSubtrees)); int[] buffer; MemoryRegion region; BinnedResources resources; CreateBinnedResources(Pool, MaximumSubtrees, out buffer, out region, out resources); int refineIndex; while ((refineIndex = Interlocked.Increment(ref RefineIndex)) < RefinementTargets.Count) { subtreeReferences.Count = 0; treeletInternalNodes.Count = 0; bool nodesInvalidated; Tree.BinnedRefine(RefinementTargets.Elements[refineIndex], ref subtreeReferences, MaximumSubtrees, ref treeletInternalNodes, ref spareNodes, ref resources, out nodesInvalidated); //Allow other refines to traverse this node. Tree.nodes[RefinementTargets.Elements[refineIndex]].RefineFlag = 0; } Tree.RemoveUnusedInternalNodes(ref spareNodes); region.Dispose(); Pool.GiveBack(buffer); spareNodes.Dispose(); subtreeReferences.Count = 0; subtreeReferences.Dispose(); treeletInternalNodes.Count = 0; treeletInternalNodes.Dispose(); }
public unsafe void GetSelfOverlapsViaQueries <TResultList>(ref TResultList results) where TResultList : IList <Overlap> { var leafQueryResults = new QuickList <int>(BufferPools <int> .Thread); for (int i = 0; i < leafCount; ++i) { var leaf = leaves[i]; BoundingBoxWide leafBoundingBox; BoundingBoxWide.GetBoundingBox(ref Levels[leaf.LevelIndex].Nodes[leaf.NodeIndex].BoundingBoxes, leaf.ChildIndex, out leafBoundingBox); TestRecursive(0, 0, ref leafBoundingBox, ref leafQueryResults); for (int j = 0; j < leafQueryResults.Count; ++j) { //Only include results which which are forward in the list to avoid self tests. if (i < leafQueryResults.Elements[j]) { results.Add(new Overlap { A = i, B = leafQueryResults.Elements[j] }); } } leafQueryResults.Count = 0; } leafQueryResults.Dispose(); //Console.WriteLine("Query-based results:"); //for (int i = 0; i < results.Count; ++i) //{ // Console.WriteLine($"{results[i].A}, {results[i].B}"); //} }
/// <summary> /// Executes one pass of bottom-up refinement. /// </summary> public unsafe void BottomUpBinnedRefine(int maximumSubtrees) { //If this works out, should probably choose a more efficient flagging approach. //Note the size: it needs to contain all possible internal nodes. //TODO: This is actually bugged, because the refinement flags do not update if the nodes move. //And the nodes CAN move. var pool = BufferPools <int> .Thread; var spareNodes = new QuickList <int>(pool, 8); var subtreeReferences = new QuickList <int>(pool, BufferPool <int> .GetPoolIndex(maximumSubtrees)); var treeletInternalNodes = new QuickList <int>(pool, BufferPool <int> .GetPoolIndex(maximumSubtrees)); int[] buffer; MemoryRegion region; BinnedResources resources; CreateBinnedResources(pool, maximumSubtrees, out buffer, out region, out resources); var refinementFlags = new int[leafCount * 2 - 1]; for (int i = 0; i < nodeCount; ++i) { refinementFlags[i] = 0; } for (int i = 0; i < leafCount; ++i) { TryToBottomUpBinnedRefine(refinementFlags, leaves[i].NodeIndex, maximumSubtrees, ref subtreeReferences, ref treeletInternalNodes, ref resources, ref spareNodes); //Validate(); } //Console.WriteLine($"root children: {nodes->ChildCount}"); RemoveUnusedInternalNodes(ref spareNodes); spareNodes.Dispose(); subtreeReferences.Dispose(); region.Dispose(); pool.GiveBack(buffer); }
public unsafe void PartialRefine(int offset, int skip, ref QuickList <int> spareNodes, int maximumSubtrees, ref QuickList <int> treeletInternalNodes, ref BinnedResources binnedResources, out bool nodesInvalidated) { QuickList <int> subtreeReferences = new QuickList <int>(BufferPools <int> .Thread, BufferPool <int> .GetPoolIndex(maximumSubtrees)); PartialRefine(0, 0, offset, skip, ref subtreeReferences, ref treeletInternalNodes, ref spareNodes, maximumSubtrees, ref binnedResources, out nodesInvalidated); subtreeReferences.Dispose(); }
internal void Dispose() { BlockedEdgeRegions.Dispose(); BlockedVertexRegions.Dispose(); VertexContacts.Dispose(); EdgeContacts.Dispose(); }
unsafe void ValidateStaging(Node *stagingNodes, ref QuickList <int> subtreeNodePointers, int treeletParent, int treeletIndexInParent) { int foundSubtrees, foundLeafCount; QuickList <int> collectedSubtreeReferences = new QuickList <int>(BufferPools <int> .Thread); QuickList <int> internalReferences = new QuickList <int>(BufferPools <int> .Thread); internalReferences.Add(0); ValidateStaging(stagingNodes, 0, ref subtreeNodePointers, ref collectedSubtreeReferences, ref internalReferences, out foundSubtrees, out foundLeafCount); if (treeletParent < -1 || treeletParent >= nodeCount) { throw new Exception("Bad treelet parent."); } if (treeletIndexInParent < -1 || (treeletParent >= 0 && treeletIndexInParent >= nodes[treeletParent].ChildCount)) { throw new Exception("Bad treelet index in parent."); } if (treeletParent >= 0 && (&nodes[treeletParent].LeafCountA)[treeletIndexInParent] != foundLeafCount) { throw new Exception("Bad leaf count."); } if (subtreeNodePointers.Count != foundSubtrees) { throw new Exception("Bad subtree found count."); } for (int i = 0; i < collectedSubtreeReferences.Count; ++i) { if (!subtreeNodePointers.Contains(collectedSubtreeReferences[i]) || !collectedSubtreeReferences.Contains(subtreeNodePointers[i])) { throw new Exception("Bad subtree reference."); } } collectedSubtreeReferences.Dispose(); internalReferences.Dispose(); }
public void Dispose <TPool>(TPool pool) where TPool : IMemoryPool <int, TSpan> { AvailableIds.Dispose(pool); //This simplifies reuse and makes it harder to use invalid data. nextIndex = 0; AvailableIds = new QuickList <int, TSpan>(); }
protected override void UpdateContainedPairs() { RigidTransform rtMesh = mesh.WorldTransform; RigidTransform rtMobile = mobile.WorldTransform; QuickList <Vector3i> overlaps = new QuickList <Vector3i>(BufferPools <Vector3i> .Thread); mesh.ChunkShape.GetOverlaps(ref rtMesh, mobile.BoundingBox, ref overlaps); for (int i = 0; i < overlaps.Count; i++) { Vector3i pos = overlaps.Elements[i]; ReusableGenericCollidable <ConvexShape> colBox = new ReusableGenericCollidable <ConvexShape>(mesh.ChunkShape.ShapeAt(pos.X, pos.Y, pos.Z, out Vector3 offs)); Vector3 input = new Vector3(pos.X + offs.X, pos.Y + offs.Y, pos.Z + offs.Z); Vector3 transfd = Quaternion.Transform(input, rtMesh.Orientation); RigidTransform outp = new RigidTransform(transfd + rtMesh.Position, rtMesh.Orientation); colBox.WorldTransform = outp; colBox.UpdateBoundingBoxForTransform(ref outp); QuickList <Vector3i> overlaps2 = new QuickList <Vector3i>(BufferPools <Vector3i> .Thread); mobile.ChunkShape.GetOverlaps(ref rtMobile, colBox.BoundingBox, ref overlaps2); for (int x = 0; x < overlaps2.Count; x++) { Vector3i pos2 = overlaps2.Elements[x]; ReusableGenericCollidable <ConvexShape> colBox2 = new ReusableGenericCollidable <ConvexShape>(mobile.ChunkShape.ShapeAt(pos2.X, pos2.Y, pos2.Z, out Vector3 offs2)); colBox2.SetEntity(mobile.Entity); Vector3 input2 = new Vector3(pos2.X + offs2.X, pos2.Y + offs2.Y, pos2.Z + offs2.Z); Vector3 transfd2 = Quaternion.Transform(input2, rtMobile.Orientation); RigidTransform outp2 = new RigidTransform(transfd2 + rtMobile.Position, rtMobile.Orientation); colBox2.WorldTransform = outp2; TryToAdd(colBox, colBox2, mesh.Entity?.Material ?? mobile.Entity?.Material); } overlaps2.Dispose(); } overlaps.Dispose(); }
public void Flush(IThreadDispatcher threadDispatcher = null) { var deterministic = threadDispatcher != null && Simulation.Deterministic; OnPreflush(threadDispatcher, deterministic); //var start = Stopwatch.GetTimestamp(); flushJobs = new QuickList <NarrowPhaseFlushJob>(128, Pool); PairCache.PrepareFlushJobs(ref flushJobs); var removalBatchJobCount = ConstraintRemover.CreateFlushJobs(deterministic); //Note that we explicitly add the constraint remover jobs here. //The constraint remover can be used in two ways- sleeper style, and narrow phase style. //In sleeping, we're not actually removing constraints from the simulation completely, so it requires fewer jobs. //The constraint remover just lets you choose which jobs to call. The narrow phase needs all of them. flushJobs.EnsureCapacity(flushJobs.Count + removalBatchJobCount + 4, Pool); flushJobs.AddUnsafely(new NarrowPhaseFlushJob { Type = NarrowPhaseFlushJobType.RemoveConstraintsFromBodyLists }); flushJobs.AddUnsafely(new NarrowPhaseFlushJob { Type = NarrowPhaseFlushJobType.ReturnConstraintHandles }); flushJobs.AddUnsafely(new NarrowPhaseFlushJob { Type = NarrowPhaseFlushJobType.RemoveConstraintFromBatchReferencedHandles }); if (Solver.ActiveSet.Batches.Count > Solver.FallbackBatchThreshold) { flushJobs.AddUnsafely(new NarrowPhaseFlushJob { Type = NarrowPhaseFlushJobType.RemoveConstraintsFromFallbackBatch }); } for (int i = 0; i < removalBatchJobCount; ++i) { flushJobs.AddUnsafely(new NarrowPhaseFlushJob { Type = NarrowPhaseFlushJobType.RemoveConstraintFromTypeBatch, Index = i }); } if (threadDispatcher == null) { for (int i = 0; i < flushJobs.Count; ++i) { ExecuteFlushJob(ref flushJobs[i], Pool); } } else { flushJobIndex = -1; this.threadDispatcher = threadDispatcher; threadDispatcher.DispatchWorkers(flushWorkerLoop); this.threadDispatcher = null; } //var end = Stopwatch.GetTimestamp(); //Console.WriteLine($"Flush stage 3 time (us): {1e6 * (end - start) / Stopwatch.Frequency}"); flushJobs.Dispose(Pool); PairCache.Postflush(); ConstraintRemover.Postflush(); OnPostflush(threadDispatcher); }
/// <summary> /// Identifies the points on the surface of hull. /// </summary> /// <param name="points">List of points in the set.</param> /// <param name="outputSurfacePoints">Unique points on the surface of the convex hull.</param> public static void GetConvexHull(ref QuickList <Vector3> points, IList <Vector3> outputSurfacePoints) { var indices = new QuickList <int>(BufferPools <int> .Locking, BufferPool.GetPoolIndex(points.Count * 3)); GetConvexHull(ref points, ref indices, outputSurfacePoints); indices.Dispose(); }
private static void RemoveInsidePoints(ref QuickList <Vector3> points, ref QuickList <int> triangleIndices, ref QuickList <int> outsidePoints) { var insidePoints = new QuickList <int>(BufferPools <int> .Locking); //We're going to remove points from this list as we go to prune it down to the truly inner points. insidePoints.AddRange(outsidePoints); outsidePoints.Clear(); for (int i = 0; i < triangleIndices.Count && insidePoints.Count > 0; i += 3) { //Compute the triangle's plane in point-normal representation to test other points against. Vector3 normal; FindNormal(ref triangleIndices, ref points, i, out normal); Vector3 p = points.Elements[triangleIndices.Elements[i]]; for (int j = insidePoints.Count - 1; j >= 0; --j) { //Offset from the triangle to the current point, tested against the normal, determines if the current point is visible //from the triangle face. Vector3 offset = points.Elements[insidePoints.Elements[j]] - p; float dot = Vector3.Dot(offset, normal); //If it's visible, then it's outside! if (dot > 0) { //This point is known to be on the outside; put it on the outside! outsidePoints.Add(insidePoints.Elements[j]); insidePoints.FastRemoveAt(j); } } } insidePoints.Dispose(); }
public override void Update(Window window, Camera camera, Input input, float dt) { for (int iterationIndex = 0; iterationIndex < 100; ++iterationIndex) { var bodyIndicesToDeactivate = new QuickList <int>(Simulation.Bodies.ActiveSet.Count, BufferPool); for (int i = 0; i < Simulation.Bodies.ActiveSet.Count; ++i) { bodyIndicesToDeactivate.AllocateUnsafely() = i; } Simulation.Sleeper.Sleep(ref bodyIndicesToDeactivate); bodyIndicesToDeactivate.Dispose(BufferPool); var setsToActivate = new QuickList <int>(Simulation.Bodies.Sets.Length, BufferPool); for (int i = 1; i < Simulation.Bodies.Sets.Length; ++i) { if (Simulation.Bodies.Sets[i].Allocated) { setsToActivate.AllocateUnsafely() = i; } } Simulation.Awakener.AwakenSets(ref setsToActivate); setsToActivate.Dispose(BufferPool); } base.Update(window, camera, input, dt); }
public void Flush(IThreadDispatcher threadDispatcher = null, bool deterministic = false) { OnPreflush(threadDispatcher, deterministic); //var start = Stopwatch.GetTimestamp(); QuickList <NarrowPhaseFlushJob, Buffer <NarrowPhaseFlushJob> > .Create(Pool.SpecializeFor <NarrowPhaseFlushJob>(), 128, out flushJobs); PairCache.PrepareFlushJobs(ref flushJobs); //We indirectly pass the determinism state; it's used by the constraint remover bookkeeping. this.deterministic = deterministic; ConstraintRemover.CreateFlushJobs(ref flushJobs); if (threadDispatcher == null) { for (int i = 0; i < flushJobs.Count; ++i) { ExecuteFlushJob(ref flushJobs[i]); } } else { flushJobIndex = -1; threadDispatcher.DispatchWorkers(flushWorkerLoop); } //var end = Stopwatch.GetTimestamp(); //Console.WriteLine($"Flush stage 3 time (us): {1e6 * (end - start) / Stopwatch.Frequency}"); flushJobs.Dispose(Pool.SpecializeFor <NarrowPhaseFlushJob>()); PairCache.Postflush(); ConstraintRemover.Postflush(); OnPostflush(threadDispatcher); }
unsafe void RecursiveRefine(int nodeIndex, int maximumSubtrees, ref int treeSizeSeed, ref QuickList <int> treeletInternalNodes, ref QuickList <int> spareNodes, ref BinnedResources binnedResources, out bool nodesInvalidated) { QuickList <int> subtreeReferences = new QuickList <int>(BufferPools <int> .Thread, BufferPool <int> .GetPoolIndex(maximumSubtrees)); //Vary the size between 0.5 and 1 times the maximumSubtrees. ulong halfMaximumSubtrees = (ulong)(maximumSubtrees / 2); ++treeSizeSeed; var size = ((ulong)(treeSizeSeed * treeSizeSeed) * 413158511UL + 735632797UL) % halfMaximumSubtrees; var targetSubtreeCount = (int)(size + halfMaximumSubtrees); nodesInvalidated = false; bool invalidated; BinnedRefine(nodeIndex, ref subtreeReferences, targetSubtreeCount, ref treeletInternalNodes, ref spareNodes, ref binnedResources, out invalidated); if (invalidated) { nodesInvalidated = true; } for (int i = 0; i < subtreeReferences.Count; ++i) { if (subtreeReferences.Elements[i] >= 0) { RecursiveRefine(subtreeReferences.Elements[i], maximumSubtrees, ref treeSizeSeed, ref treeletInternalNodes, ref spareNodes, ref binnedResources, out invalidated); if (invalidated) { nodesInvalidated = true; } } } subtreeReferences.Count = 0; subtreeReferences.Dispose(); }
public override void Update(double dt) { RigidTransform transform = new RigidTransform(mesh.Position); RigidTransform convexTransform = convex.WorldTransform; ContactRefresher.ContactRefresh(contacts, supplementData, ref convexTransform, ref transform, contactIndicesToRemove); RemoveQueuedContacts(); var overlaps = new QuickList <Vector3i>(BufferPools <Vector3i> .Thread); mesh.ChunkShape.GetOverlaps(mesh.Position, convex.BoundingBox, ref overlaps); var candidatesToAdd = new QuickList <ContactData>(BufferPools <ContactData> .Thread, BufferPool <int> .GetPoolIndex(overlaps.Count)); for (int i = 0; i < overlaps.Count; i++) { if (!ActivePairs.TryGetValue(overlaps.Elements[i], out GeneralConvexPairTester manifold)) { manifold = GetPair(ref overlaps.Elements[i]); } else { ActivePairs.FastRemove(overlaps.Elements[i]); } activePairsBackBuffer.Add(overlaps.Elements[i], manifold); if (manifold.GenerateContactCandidate(out ContactData contactCandidate)) { candidatesToAdd.Add(ref contactCandidate); } } overlaps.Dispose(); for (int i = ActivePairs.Count - 1; i >= 0; i--) { ReturnPair(ActivePairs.Values[i]); ActivePairs.FastRemove(ActivePairs.Keys[i]); } var temp = ActivePairs; ActivePairs = activePairsBackBuffer; activePairsBackBuffer = temp; if (contacts.Count + candidatesToAdd.Count > 4) { var reducedCandidates = new QuickList <ContactData>(BufferPools <ContactData> .Thread, 3); ContactReducer.ReduceContacts(contacts, ref candidatesToAdd, contactIndicesToRemove, ref reducedCandidates); RemoveQueuedContacts(); for (int i = reducedCandidates.Count - 1; i >= 0; i--) { Add(ref reducedCandidates.Elements[i]); reducedCandidates.RemoveAt(i); } reducedCandidates.Dispose(); } else if (candidatesToAdd.Count > 0) { for (int i = 0; i < candidatesToAdd.Count; i++) { Add(ref candidatesToAdd.Elements[i]); } } candidatesToAdd.Dispose(); }
/// <summary> /// Cleans up after a multithreaded self test. /// </summary> public void CompleteSelfTest() { //Note that a tree with 0 or 1 entries won't have any jobs. if (jobs.Span.Allocated) { jobs.Dispose(Pool); } }
/// <summary> /// Cleans up after a multithreaded self test. /// </summary> public void CompleteTest() { //Note that we don't allocate a job list if there aren't any jobs. if (jobs.Span.Allocated) { jobs.Dispose(Pool.SpecializeFor <Job>()); } }
/// <summary> /// Identifies the points on the surface of hull. /// </summary> /// <param name="points">List of points in the set.</param> /// <param name="outputSurfacePoints">Unique points on the surface of the convex hull.</param> public static void GetConvexHull(IList <Vector3> points, IList <Vector3> outputSurfacePoints) { var rawPoints = new QuickList <Vector3>(BufferPools <Vector3> .Locking, BufferPool.GetPoolIndex(points.Count)); rawPoints.AddRange(points); GetConvexHull(ref rawPoints, outputSurfacePoints); rawPoints.Dispose(); }
public unsafe void TopDownAgglomerativeRefine() { var spareNodes = new QuickList <int>(BufferPools <int> .Thread, 8); TopDownAgglomerativeRefine(0, ref spareNodes); RemoveUnusedInternalNodes(ref spareNodes); spareNodes.Dispose(); }
public void Flush(IThreadDispatcher threadDispatcher = null, bool deterministic = false) { OnPreflush(threadDispatcher, deterministic); //var start = Stopwatch.GetTimestamp(); var jobPool = Pool.SpecializeFor <NarrowPhaseFlushJob>(); QuickList <NarrowPhaseFlushJob, Buffer <NarrowPhaseFlushJob> > .Create(jobPool, 128, out flushJobs); PairCache.PrepareFlushJobs(ref flushJobs); //We indirectly pass the determinism state; it's used by the constraint remover bookkeeping. this.deterministic = deterministic; var removalBatchJobCount = ConstraintRemover.CreateFlushJobs(); //Note that we explicitly add the constraint remover jobs here. //The constraint remover can be used in two ways- deactivation style, and narrow phase style. //In deactivation, we're not actually removing constraints from the simulation completely, so it requires fewer jobs. //The constraint remover just lets you choose which jobs to call. The narrow phase needs all of them. flushJobs.EnsureCapacity(flushJobs.Count + removalBatchJobCount + 3, jobPool); flushJobs.AddUnsafely(new NarrowPhaseFlushJob { Type = NarrowPhaseFlushJobType.RemoveConstraintsFromBodyLists }); flushJobs.AddUnsafely(new NarrowPhaseFlushJob { Type = NarrowPhaseFlushJobType.ReturnConstraintHandles }); flushJobs.AddUnsafely(new NarrowPhaseFlushJob { Type = NarrowPhaseFlushJobType.RemoveConstraintFromBatchReferencedHandles }); for (int i = 0; i < removalBatchJobCount; ++i) { flushJobs.AddUnsafely(new NarrowPhaseFlushJob { Type = NarrowPhaseFlushJobType.RemoveConstraintFromTypeBatch, Index = i }); } if (threadDispatcher == null) { for (int i = 0; i < flushJobs.Count; ++i) { ExecuteFlushJob(ref flushJobs[i], Pool); } } else { flushJobIndex = -1; this.threadDispatcher = threadDispatcher; threadDispatcher.DispatchWorkers(flushWorkerLoop); this.threadDispatcher = null; } //var end = Stopwatch.GetTimestamp(); //Console.WriteLine($"Flush stage 3 time (us): {1e6 * (end - start) / Stopwatch.Frequency}"); flushJobs.Dispose(Pool.SpecializeFor <NarrowPhaseFlushJob>()); PairCache.Postflush(); ConstraintRemover.Postflush(); OnPostflush(threadDispatcher); }
/// <summary> /// Removes redundant points. Two points are redundant if they occupy the same hash grid cell. /// </summary> /// <param name="points">List of points to prune.</param> /// <param name="cellSize">Size of cells to determine redundancy.</param> public static void RemoveRedundantPoints(IList<Vector3> points, double cellSize) { var rawPoints = new QuickList<Vector3>(BufferPools<Vector3>.Locking, BufferPool.GetPoolIndex(points.Count)); rawPoints.AddRange(points); RemoveRedundantPoints(ref rawPoints, cellSize); points.Clear(); for (int i = 0; i < rawPoints.Count; ++i) { points.Add(rawPoints.Elements[i]); } rawPoints.Dispose(); }
/// <summary> /// Wakes up all bodies and constraints within a set. Doesn't do anything if the set is awake (index zero). /// </summary> /// <param name="setIndex">Index of the set to awaken.</param> public void AwakenSet(int setIndex) { if (setIndex > 0) { ValidateSleepingSetIndex(setIndex); //TODO: Some fairly pointless work here- spans or other approaches could help with the API. var list = new QuickList <int>(1, pool); list.AddUnsafely(setIndex); AwakenSets(ref list); list.Dispose(pool); } }
/// <summary> /// Identifies the points on the surface of hull. /// </summary> /// <param name="points">List of points in the set.</param> /// <param name="outputTriangleIndices">List of indices into the input point set composing the triangulated surface of the convex hull. /// Each group of 3 indices represents a triangle on the surface of the hull.</param> /// <param name="outputSurfacePoints">Unique points on the surface of the convex hull.</param> public static void GetConvexHull(IList<Vector3> points, IList<int> outputTriangleIndices, IList<Vector3> outputSurfacePoints) { var rawPoints = new QuickList<Vector3>(BufferPools<Vector3>.Locking, BufferPool.GetPoolIndex(points.Count)); var rawIndices = new QuickList<int>(BufferPools<int>.Locking, BufferPool.GetPoolIndex(points.Count * 3)); rawPoints.AddRange(points); GetConvexHull(ref rawPoints, ref rawIndices, outputSurfacePoints); rawPoints.Dispose(); for (int i = 0; i < rawIndices.Count; i++) { outputTriangleIndices.Add(rawIndices[i]); } rawIndices.Dispose(); }
/// <summary> /// Removes redundant points. Two points are redundant if they occupy the same hash grid cell. /// </summary> /// <param name="points">List of points to prune.</param> /// <param name="cellSize">Size of cells to determine redundancy.</param> public static void RemoveRedundantPoints(IList <Vector3> points, double cellSize) { var rawPoints = new QuickList <Vector3>(BufferPools <Vector3> .Locking, BufferPool.GetPoolIndex(points.Count)); rawPoints.AddRange(points); RemoveRedundantPoints(ref rawPoints, cellSize); points.Clear(); for (int i = 0; i < rawPoints.Count; ++i) { points.Add(rawPoints.Elements[i]); } rawPoints.Dispose(); }
/// <summary> /// Identifies the points on the surface of hull. /// </summary> /// <param name="points">List of points in the set.</param> /// <param name="outputTriangleIndices">List of indices into the input point set composing the triangulated surface of the convex hull. /// Each group of 3 indices represents a triangle on the surface of the hull.</param> /// <param name="outputSurfacePoints">Unique points on the surface of the convex hull.</param> public static void GetConvexHull(IList <Vector3> points, IList <int> outputTriangleIndices, IList <Vector3> outputSurfacePoints) { var rawPoints = new QuickList <Vector3>(BufferPools <Vector3> .Locking, BufferPool.GetPoolIndex(points.Count)); var rawIndices = new QuickList <int>(BufferPools <int> .Locking, BufferPool.GetPoolIndex(points.Count * 3)); rawPoints.AddRange(points); GetConvexHull(ref rawPoints, ref rawIndices, outputSurfacePoints); rawPoints.Dispose(); for (int i = 0; i < rawIndices.Count; i++) { outputTriangleIndices.Add(rawIndices[i]); } rawIndices.Dispose(); }
/// <summary> /// Releases all resources held by the context. /// </summary> public void Dispose() { if (!disposed) { CleanUp(); NodePairsToTest.Count = 0; NodePairsToTest.Dispose(); for (int i = 0; i < ThreadCount; ++i) { WorkerOverlaps[i].Count = 0; WorkerOverlaps[i].Dispose(); } } }
/// <summary> /// Awakens a list of set indices. /// </summary> /// <param name="setIndices">List of set indices to wake up.</param> /// <param name="threadDispatcher">Thread dispatcher to use when waking the bodies. Pass null to run on a single thread.</param> public void AwakenSets(ref QuickList <int> setIndices, IThreadDispatcher threadDispatcher = null) { var uniqueSetIndices = new QuickList <int>(setIndices.Count, pool); var uniqueSet = new IndexSet(pool, bodies.Sets.Length); AccumulateUniqueIndices(ref setIndices, ref uniqueSet, ref uniqueSetIndices); uniqueSet.Dispose(pool); //Note that we use the same codepath as multithreading, we just don't use a multithreaded dispatch to execute jobs. //TODO: It would probably be a good idea to add a little heuristic to avoid doing multithreaded dispatches if there are only like 5 total bodies. //Shouldn't matter too much- the threaded variant should only really be used when doing big batched changes, so having a fixed constant cost isn't that bad. int threadCount = threadDispatcher == null ? 1 : threadDispatcher.ThreadCount; //Note that direct wakes always reset activity states. I suspect this is sufficiently universal that no one will ever want the alternative, //even though the narrowphase does avoid resetting activity states for the sake of faster resleeping when possible. var(phaseOneJobCount, phaseTwoJobCount) = PrepareJobs(ref uniqueSetIndices, true, threadCount); if (threadCount > 1) { this.jobIndex = -1; this.jobCount = phaseOneJobCount; threadDispatcher.DispatchWorkers(phaseOneWorkerDelegate); } else { for (int i = 0; i < phaseOneJobCount; ++i) { ExecutePhaseOneJob(i); } } if (threadCount > 1) { this.jobIndex = -1; this.jobCount = phaseTwoJobCount; threadDispatcher.DispatchWorkers(phaseTwoWorkerDelegate); } else { for (int i = 0; i < phaseTwoJobCount; ++i) { ExecutePhaseTwoJob(i); } } DisposeForCompletedAwakenings(ref uniqueSetIndices); uniqueSetIndices.Dispose(pool); }
public void CleanUp() { RefitNodes.Dispose(); for (int i = 0; i < RefinementCandidates.Count; ++i) { RefinementCandidates.Elements[i].Count = 0; RefinementCandidates.Elements[i].Dispose(); } RefinementCandidates.Clear(); RefinementTargets.Count = 0; RefinementTargets.Dispose(); CacheOptimizeStarts.Count = 0; CacheOptimizeStarts.Dispose(); }
protected override void UpdateContainedPairs(float dt) { var overlappedElements = new QuickList <int>(BufferPools <int> .Thread); BoundingBox localBoundingBox; System.Numerics.Vector3 sweep; Vector3Ex.Multiply(ref mobileMesh.entity.linearVelocity, dt, out sweep); mobileMesh.Shape.GetSweptLocalBoundingBox(ref mobileMesh.worldTransform, ref mesh.worldTransform, ref sweep, out localBoundingBox); mesh.Shape.GetOverlaps(localBoundingBox, ref overlappedElements); for (int i = 0; i < overlappedElements.Count; i++) { TryToAdd(overlappedElements.Elements[i]); } overlappedElements.Dispose(); }
protected override void UpdateContainedPairs() { RigidTransform rt = mesh.WorldTransform; QuickList <Vector3i> overlaps = new QuickList <Vector3i>(BufferPools <Vector3i> .Thread); mesh.ChunkShape.GetOverlaps(ref rt, convex.BoundingBox, ref overlaps); for (int i = 0; i < overlaps.Count; i++) { Vector3i pos = overlaps.Elements[i]; ReusableGenericCollidable <ConvexShape> colBox = new ReusableGenericCollidable <ConvexShape>(mesh.ChunkShape.ShapeAt(pos.X, pos.Y, pos.Z, out Vector3 offs)); colBox.SetEntity(mesh.Entity); Vector3 input = new Vector3(pos.X + offs.X, pos.Y + offs.Y, pos.Z + offs.Z); Vector3 transfd = Quaternion.Transform(input, rt.Orientation); RigidTransform outp = new RigidTransform(transfd + rt.Position, rt.Orientation); colBox.WorldTransform = outp; TryToAdd(colBox, convex, mesh.Entity?.Material, convex.Entity?.Material); } overlaps.Dispose(); }
public unsafe void TopDownBinnedRefine(int maximumSubtrees) { var pool = BufferPools <int> .Thread; var spareNodes = new QuickList <int>(pool, 8); var subtreeReferences = new QuickList <int>(pool, BufferPool <int> .GetPoolIndex(maximumSubtrees)); var treeletInternalNodes = new QuickList <int>(pool, BufferPool <int> .GetPoolIndex(maximumSubtrees)); int[] buffer; MemoryRegion region; BinnedResources resources; CreateBinnedResources(pool, maximumSubtrees, out buffer, out region, out resources); TopDownBinnedRefine(0, maximumSubtrees, ref subtreeReferences, ref treeletInternalNodes, ref spareNodes, ref resources); RemoveUnusedInternalNodes(ref spareNodes); region.Dispose(); pool.GiveBack(buffer); spareNodes.Dispose(); subtreeReferences.Dispose(); }
protected override void UpdateContainedPairs() { RigidTransform rtMesh = mesh.WorldTransform; RigidTransform rtMobile = mobile.WorldTransform; QuickList<Vector3i> overlaps = new QuickList<Vector3i>(BufferPools<Vector3i>.Thread); mesh.ChunkShape.GetOverlaps(ref rtMesh, mobile.BoundingBox, ref overlaps); for (int i = 0; i < overlaps.Count; i++) { Vector3i pos = overlaps.Elements[i]; Vector3 offs; ReusableGenericCollidable<ConvexShape> colBox = new ReusableGenericCollidable<ConvexShape>(mesh.ChunkShape.ShapeAt(pos.X, pos.Y, pos.Z, out offs)); Vector3 input = new Vector3(pos.X + offs.X, pos.Y + offs.Y, pos.Z + offs.Z); Vector3 transfd = Quaternion.Transform(input, rtMesh.Orientation); RigidTransform outp = new RigidTransform(transfd + rtMesh.Position, rtMesh.Orientation); colBox.WorldTransform = outp; colBox.UpdateBoundingBoxForTransform(ref outp); QuickList<Vector3i> overlaps2 = new QuickList<Vector3i>(BufferPools<Vector3i>.Thread); mobile.ChunkShape.GetOverlaps(ref rtMobile, colBox.BoundingBox, ref overlaps2); for (int x = 0; x < overlaps2.Count; x++) { Vector3i pos2 = overlaps2.Elements[x]; Vector3 offs2; ReusableGenericCollidable<ConvexShape> colBox2 = new ReusableGenericCollidable<ConvexShape>(mobile.ChunkShape.ShapeAt(pos2.X, pos2.Y, pos2.Z, out offs2)); colBox2.SetEntity(mobile.Entity); Vector3 input2 = new Vector3(pos2.X + offs2.X, pos2.Y + offs2.Y, pos2.Z + offs2.Z); Vector3 transfd2 = Quaternion.Transform(input2, rtMobile.Orientation); RigidTransform outp2 = new RigidTransform(transfd2 + rtMobile.Position, rtMobile.Orientation); colBox2.WorldTransform = outp2; TryToAdd(colBox, colBox2, mesh.Entity != null ? mesh.Entity.Material : null, mobile.Entity != null ? mobile.Entity.Material : null); } overlaps2.Dispose(); } overlaps.Dispose(); }
protected override void UpdateContainedPairs(float dt) { var overlappedElements = new QuickList<int>(BufferPools<int>.Thread); BoundingBox localBoundingBox; Vector3 sweep; Vector3.Multiply(ref mobileMesh.entity.linearVelocity, dt, out sweep); mobileMesh.Shape.GetSweptLocalBoundingBox(ref mobileMesh.worldTransform, ref mesh.worldTransform, ref sweep, out localBoundingBox); mesh.Shape.GetOverlaps(localBoundingBox, ref overlappedElements); for (int i = 0; i < overlappedElements.Count; i++) { TryToAdd(overlappedElements.Elements[i]); } overlappedElements.Dispose(); }
unsafe void Refine(int workerIndex) { var spareNodes = new QuickList<int>(Pool, 8); var subtreeReferences = new QuickList<int>(Pool, BufferPool<int>.GetPoolIndex(MaximumSubtrees)); var treeletInternalNodes = new QuickList<int>(Pool, BufferPool<int>.GetPoolIndex(MaximumSubtrees)); int[] buffer; MemoryRegion region; BinnedResources resources; CreateBinnedResources(Pool, MaximumSubtrees, out buffer, out region, out resources); int refineIndex; while ((refineIndex = Interlocked.Increment(ref RefineIndex)) < RefinementTargets.Count) { subtreeReferences.Count = 0; treeletInternalNodes.Count = 0; bool nodesInvalidated; Tree.BinnedRefine(RefinementTargets.Elements[refineIndex], ref subtreeReferences, MaximumSubtrees, ref treeletInternalNodes, ref spareNodes, ref resources, out nodesInvalidated); //Allow other refines to traverse this node. Tree.nodes[RefinementTargets.Elements[refineIndex]].RefineFlag = 0; } Tree.RemoveUnusedInternalNodes(ref spareNodes); region.Dispose(); Pool.GiveBack(buffer); spareNodes.Dispose(); subtreeReferences.Count = 0; subtreeReferences.Dispose(); treeletInternalNodes.Count = 0; treeletInternalNodes.Dispose(); }
/// <summary> /// Determines if a down step is possible, and if so, computes the location to which the character should teleport. /// </summary> /// <param name="newPosition">New position the character should teleport to if the down step is accepted.</param> /// <returns>Whether or not the character should attempt to step down.</returns> public bool TryToStepDown(out Vector3 newPosition) { //Don't bother trying to step down if we already have a support contact or if the support ray doesn't have traction. if (!(SupportFinder.Supports.Count == 0 && SupportFinder.SupportRayData != null && SupportFinder.SupportRayData.Value.HasTraction)) { newPosition = new Vector3(); return false; } if (!(SupportFinder.SupportRayData.Value.HitData.T - SupportFinder.BottomDistance > minimumDownStepHeight)) //Don't do expensive stuff if it's, at most, a super tiny step that gravity will take care of. { newPosition = new Vector3(); return false; } //Predict a hit location based on the time of impact and the normal at the intersection. //Take into account the radius of the character (don't forget the collision margin!) Vector3 normal = SupportFinder.SupportRayData.Value.HitData.Normal; Vector3 down = characterBody.orientationMatrix.Down; RigidTransform transform = characterBody.CollisionInformation.WorldTransform; //We know that the closest point to the plane will be the extreme point in the plane's direction. //Use it as the ray origin. Ray ray; characterBody.CollisionInformation.Shape.GetExtremePoint(normal, ref transform, out ray.Position); ray.Direction = down; //Intersect the ray against the plane defined by the support hit. Vector3 intersection; Plane plane = new Plane(normal, Vector3.Dot(SupportFinder.SupportRayData.Value.HitData.Location, normal)); Vector3 candidatePosition; //Define the interval bounds to be used later. //The words 'highest' and 'lowest' here refer to the position relative to the character's body. //The ray cast points downward relative to the character's body. float highestBound = 0; //The lowest possible distance is the ray distance plus the collision margin because the ray could theoretically be on the outskirts of the collision margin //where the shape would actually have to move more than the bottom distance difference would imply. //(Could compute the true lowest bound analytically based on the horizontal position of the ray...) float lowestBound = characterBody.CollisionInformation.Shape.CollisionMargin + SupportFinder.SupportRayData.Value.HitData.T - SupportFinder.BottomDistance; float currentOffset = lowestBound; float hintOffset; var tractionContacts = new QuickList<CharacterContact>(BufferPools<CharacterContact>.Thread); var supportContacts = new QuickList<CharacterContact>(BufferPools<CharacterContact>.Thread); var sideContacts = new QuickList<CharacterContact>(BufferPools<CharacterContact>.Thread); var headContacts = new QuickList<CharacterContact>(BufferPools<CharacterContact>.Thread); try { //This guess may either win immediately, or at least give us a better idea of where to search. float hitT; if (Toolbox.GetRayPlaneIntersection(ref ray, ref plane, out hitT, out intersection)) { currentOffset = hitT + CollisionDetectionSettings.AllowedPenetration * 0.5f; candidatePosition = characterBody.Position + down * currentOffset; switch (TryDownStepPosition(ref candidatePosition, ref down, ref tractionContacts, ref supportContacts, ref sideContacts, ref headContacts, out hintOffset)) { case CharacterContactPositionState.Accepted: currentOffset += hintOffset; //Only use the new position location if the movement distance was the right size. if (currentOffset > minimumDownStepHeight && currentOffset < maximumStepHeight) { newPosition = characterBody.Position + currentOffset * down; return true; } else { newPosition = new Vector3(); return false; } case CharacterContactPositionState.NoHit: highestBound = currentOffset + hintOffset; currentOffset = (lowestBound + currentOffset) * .5f; break; case CharacterContactPositionState.Obstructed: lowestBound = currentOffset; currentOffset = (highestBound + currentOffset) * .5f; break; case CharacterContactPositionState.TooDeep: currentOffset += hintOffset; lowestBound = currentOffset; break; } } //Our guesses failed. //Begin the regular process. Start at the time of impact of the ray itself. //How about trying the time of impact of the ray itself? //Since we wouldn't be here unless there were no contacts at the body's current position, //testing the ray cast location gives us the second bound we need to do an informed binary search. int attempts = 0; //Don't keep querying indefinitely. If we fail to reach it in a few informed steps, it's probably not worth continuing. //The bound size check prevents the system from continuing to search a meaninglessly tiny interval. while (attempts++ < 5 && lowestBound - highestBound > Toolbox.BigEpsilon) { candidatePosition = characterBody.Position + currentOffset * down; switch (TryDownStepPosition(ref candidatePosition, ref down, ref tractionContacts, ref supportContacts, ref sideContacts, ref headContacts, out hintOffset)) { case CharacterContactPositionState.Accepted: currentOffset += hintOffset; //Only use the new position location if the movement distance was the right size. if (currentOffset > minimumDownStepHeight && currentOffset < maximumStepHeight) { newPosition = characterBody.Position + currentOffset * down; return true; } else { newPosition = new Vector3(); return false; } case CharacterContactPositionState.NoHit: highestBound = currentOffset + hintOffset; currentOffset = (lowestBound + highestBound) * .5f; break; case CharacterContactPositionState.Obstructed: lowestBound = currentOffset; currentOffset = (highestBound + lowestBound) * .5f; break; case CharacterContactPositionState.TooDeep: currentOffset += hintOffset; lowestBound = currentOffset; break; } } //Couldn't find a candidate. newPosition = new Vector3(); return false; } finally { tractionContacts.Dispose(); supportContacts.Dispose(); sideContacts.Dispose(); headContacts.Dispose(); } }
/// <summary> /// Executes one pass of bottom-up refinement. /// </summary> public unsafe void BottomUpBinnedRefine(int maximumSubtrees) { //If this works out, should probably choose a more efficient flagging approach. //Note the size: it needs to contain all possible internal nodes. //TODO: This is actually bugged, because the refinement flags do not update if the nodes move. //And the nodes CAN move. var pool = BufferPools<int>.Thread; var spareNodes = new QuickList<int>(pool, 8); var subtreeReferences = new QuickList<int>(pool, BufferPool<int>.GetPoolIndex(maximumSubtrees)); var treeletInternalNodes = new QuickList<int>(pool, BufferPool<int>.GetPoolIndex(maximumSubtrees)); int[] buffer; MemoryRegion region; BinnedResources resources; CreateBinnedResources(pool, maximumSubtrees, out buffer, out region, out resources); var refinementFlags = new int[leafCount * 2 - 1]; for (int i = 0; i < nodeCount; ++i) { refinementFlags[i] = 0; } for (int i = 0; i < leafCount; ++i) { TryToBottomUpBinnedRefine(refinementFlags, leaves[i].NodeIndex, maximumSubtrees, ref subtreeReferences, ref treeletInternalNodes, ref resources, ref spareNodes); //Validate(); } //Console.WriteLine($"root children: {nodes->ChildCount}"); RemoveUnusedInternalNodes(ref spareNodes); spareNodes.Dispose(); subtreeReferences.Dispose(); region.Dispose(); pool.GiveBack(buffer); }
unsafe void ValidateStaging(Node* stagingNodes, ref QuickList<int> subtreeNodePointers, int treeletParent, int treeletIndexInParent) { int foundSubtrees, foundLeafCount; QuickList<int> collectedSubtreeReferences = new QuickList<int>(BufferPools<int>.Thread); QuickList<int> internalReferences = new QuickList<int>(BufferPools<int>.Thread); internalReferences.Add(0); ValidateStaging(stagingNodes, 0, ref subtreeNodePointers, ref collectedSubtreeReferences, ref internalReferences, out foundSubtrees, out foundLeafCount); if (treeletParent < -1 || treeletParent >= nodeCount) throw new Exception("Bad treelet parent."); if (treeletIndexInParent < -1 || (treeletParent >= 0 && treeletIndexInParent >= nodes[treeletParent].ChildCount)) throw new Exception("Bad treelet index in parent."); if (treeletParent >= 0 && (&nodes[treeletParent].LeafCountA)[treeletIndexInParent] != foundLeafCount) { throw new Exception("Bad leaf count."); } if (subtreeNodePointers.Count != foundSubtrees) { throw new Exception("Bad subtree found count."); } for (int i = 0; i < collectedSubtreeReferences.Count; ++i) { if (!subtreeNodePointers.Contains(collectedSubtreeReferences[i]) || !collectedSubtreeReferences.Contains(subtreeNodePointers[i])) throw new Exception("Bad subtree reference."); } collectedSubtreeReferences.Dispose(); internalReferences.Dispose(); }
unsafe void RecursiveRefine(int nodeIndex, int maximumSubtrees, ref int treeSizeSeed, ref QuickList<int> treeletInternalNodes, ref QuickList<int> spareNodes, ref BinnedResources binnedResources, out bool nodesInvalidated) { QuickList<int> subtreeReferences = new QuickList<int>(BufferPools<int>.Thread, BufferPool<int>.GetPoolIndex(maximumSubtrees)); //Vary the size between 0.5 and 1 times the maximumSubtrees. ulong halfMaximumSubtrees = (ulong)(maximumSubtrees / 2); ++treeSizeSeed; var size = ((ulong)(treeSizeSeed * treeSizeSeed) * 413158511UL + 735632797UL) % halfMaximumSubtrees; var targetSubtreeCount = (int)(size + halfMaximumSubtrees); nodesInvalidated = false; bool invalidated; BinnedRefine(nodeIndex, ref subtreeReferences, targetSubtreeCount, ref treeletInternalNodes, ref spareNodes, ref binnedResources, out invalidated); if (invalidated) { nodesInvalidated = true; } for (int i = 0; i < subtreeReferences.Count; ++i) { if (subtreeReferences.Elements[i] >= 0) { RecursiveRefine(subtreeReferences.Elements[i], maximumSubtrees, ref treeSizeSeed, ref treeletInternalNodes, ref spareNodes, ref binnedResources, out invalidated); if (invalidated) { nodesInvalidated = true; } } } subtreeReferences.Count = 0; subtreeReferences.Dispose(); }
/// <summary> /// Attempts to change the stance of the character if possible. /// </summary> /// <returns>Whether or not the character was able to change its stance.</returns> public bool UpdateStance(out Vector3 newPosition) { var currentPosition = characterBody.position; var down = characterBody.orientationMatrix.Down; newPosition = new Vector3(); if (CurrentStance != DesiredStance) { if (CurrentStance == Stance.Standing && DesiredStance == Stance.Crouching) { //Crouch. There's no complicated logic to crouching; you don't need to validate //a crouch before doing it. //You do, however, do a different kind of crouch if you're airborne. if (SupportFinder.HasSupport) { //Move the character towards the ground. newPosition = currentPosition + down * ((StandingHeight - CrouchingHeight) * .5f); characterBody.Height = CrouchingHeight; CurrentStance = Stance.Crouching; } else { //We're in the air, so we don't have to change the position at all- just change the height. //No queries needed since we're only shrinking. newPosition = currentPosition; characterBody.Height = CrouchingHeight; CurrentStance = Stance.Crouching; } return true; } else if (CurrentStance == Stance.Crouching && DesiredStance == Stance.Standing) { var tractionContacts = new QuickList<CharacterContact>(BufferPools<CharacterContact>.Thread); var supportContacts = new QuickList<CharacterContact>(BufferPools<CharacterContact>.Thread); var sideContacts = new QuickList<CharacterContact>(BufferPools<CharacterContact>.Thread); var headContacts = new QuickList<CharacterContact>(BufferPools<CharacterContact>.Thread); try { //Attempt to stand. if (SupportFinder.HasSupport) { //Standing requires a query to verify that the new state is safe. //TODO: State queries can be expensive if the character is crouching beneath something really detailed. //There are some situations where you may want to do an upwards-pointing ray cast first. If it hits something, //there's no need to do the full query. newPosition = currentPosition - down * ((StandingHeight - CrouchingHeight) * .5f); PrepareQueryObject(standingQueryObject, ref newPosition); QueryManager.QueryContacts(standingQueryObject, ref tractionContacts, ref supportContacts, ref sideContacts, ref headContacts); if (IsObstructed(ref sideContacts, ref headContacts)) { //Can't stand up if something is in the way! return false; } characterBody.Height = StandingHeight; CurrentStance = Stance.Standing; return true; } else { //This is a complicated case. We must perform a semi-downstep query. //It's different than a downstep because the head may be obstructed as well. float highestBound = 0; float lowestBound = (StandingHeight - CrouchingHeight) * .5f; float currentOffset = lowestBound; float maximum = lowestBound; int attempts = 0; //Don't keep querying indefinitely. If we fail to reach it in a few informed steps, it's probably not worth continuing. //The bound size check prevents the system from continuing to search a meaninglessly tiny interval. while (attempts++ < 5 && lowestBound - highestBound > Toolbox.BigEpsilon) { Vector3 candidatePosition = currentPosition + currentOffset * down; float hintOffset; switch (TrySupportLocation(ref candidatePosition, out hintOffset, ref tractionContacts, ref supportContacts, ref sideContacts, ref headContacts)) { case CharacterContactPositionState.Accepted: currentOffset += hintOffset; //Only use the new position location if the movement distance was the right size. if (currentOffset > 0 && currentOffset < maximum) { newPosition = currentPosition + currentOffset * down; characterBody.Height = StandingHeight; CurrentStance = Stance.Standing; return true; } else { return false; } case CharacterContactPositionState.NoHit: highestBound = currentOffset + hintOffset; currentOffset = (lowestBound + highestBound) * .5f; break; case CharacterContactPositionState.Obstructed: lowestBound = currentOffset; currentOffset = (highestBound + lowestBound) * .5f; break; case CharacterContactPositionState.TooDeep: currentOffset += hintOffset; lowestBound = currentOffset; break; } } //Couldn't find a hit. Go ahead and stand! newPosition = currentPosition; characterBody.Height = StandingHeight; CurrentStance = Stance.Standing; return true; } } finally { tractionContacts.Dispose(); supportContacts.Dispose(); sideContacts.Dispose(); headContacts.Dispose(); } } } return false; }
public unsafe int RefitAndRefine(int frameIndex, float refineAggressivenessScale = 1, float cacheOptimizeAggressivenessScale = 1) { //Don't proceed if the tree is empty. if (leafCount == 0) return 0; var pool = BufferPools<int>.Locking; int maximumSubtrees, estimatedRefinementCandidateCount, leafCountThreshold; GetRefitAndMarkTuning(out maximumSubtrees, out estimatedRefinementCandidateCount, out leafCountThreshold); var refinementCandidates = new QuickList<int>(pool, BufferPool<int>.GetPoolIndex(estimatedRefinementCandidateCount)); //Collect the refinement candidates. var costChange = RefitAndMark(leafCountThreshold, ref refinementCandidates); int targetRefinementCount, period, offset; GetRefineTuning(frameIndex, refinementCandidates.Count, refineAggressivenessScale, costChange, 1, out targetRefinementCount, out period, out offset); var refinementTargets = new QuickList<int>(pool, BufferPool<int>.GetPoolIndex(targetRefinementCount)); int actualRefinementTargetsCount = 0; int index = offset; for (int i = 0; i < targetRefinementCount - 1; ++i) { index += period; if (index >= refinementCandidates.Count) index -= refinementCandidates.Count; Debug.Assert(index < refinementCandidates.Count && index >= 0); refinementTargets.Elements[actualRefinementTargetsCount++] = refinementCandidates.Elements[index]; nodes[refinementCandidates.Elements[index]].RefineFlag = 1; } refinementTargets.Count = actualRefinementTargetsCount; refinementCandidates.Count = 0; refinementCandidates.Dispose(); if (nodes->RefineFlag == 0) { refinementTargets.Add(0); nodes->RefineFlag = 1; ++actualRefinementTargetsCount; } //Refine all marked targets. var spareNodes = new QuickList<int>(pool, 8); var subtreeReferences = new QuickList<int>(pool, BufferPool<int>.GetPoolIndex(maximumSubtrees)); var treeletInternalNodes = new QuickList<int>(pool, BufferPool<int>.GetPoolIndex(maximumSubtrees)); int[] buffer; MemoryRegion region; BinnedResources resources; CreateBinnedResources(pool, maximumSubtrees, out buffer, out region, out resources); for (int i = 0; i < refinementTargets.Count; ++i) { subtreeReferences.Count = 0; treeletInternalNodes.Count = 0; bool nodesInvalidated; BinnedRefine(refinementTargets.Elements[i], ref subtreeReferences, maximumSubtrees, ref treeletInternalNodes, ref spareNodes, ref resources, out nodesInvalidated); //TODO: Should this be moved into a post-loop? It could permit some double work, but that's not terrible. //It's not invalid from a multithreading perspective, either- setting the refine flag to zero is essentially an unlock. //If other threads don't see it updated due to cache issues, it doesn't really matter- it's not a signal or anything like that. nodes[refinementTargets.Elements[i]].RefineFlag = 0; } RemoveUnusedInternalNodes(ref spareNodes); region.Dispose(); pool.GiveBack(buffer); spareNodes.Dispose(); subtreeReferences.Count = 0; subtreeReferences.Dispose(); treeletInternalNodes.Count = 0; treeletInternalNodes.Dispose(); refinementTargets.Count = 0; refinementTargets.Dispose(); var cacheOptimizeCount = GetCacheOptimizeTuning(maximumSubtrees, costChange, cacheOptimizeAggressivenessScale); var startIndex = (int)(((long)frameIndex * cacheOptimizeCount) % nodeCount); //We could wrap around. But we could also not do that because it doesn't really matter! //var startTime = Stopwatch.GetTimestamp() / (double)Stopwatch.Frequency; var end = Math.Min(NodeCount, startIndex + cacheOptimizeCount); for (int i = startIndex; i < end; ++i) { IncrementalCacheOptimize(i); } //var endTime = Stopwatch.GetTimestamp() / (double)Stopwatch.Frequency; //Console.WriteLine($"Cache optimize time: {endTime - startTime}"); return actualRefinementTargetsCount; }
public static unsafe TestResults TestSingleArray(TestCollidable[] leaves, BoundingBox[] queries, BoundingBox positionBounds, int queryCount, int selfTestCount, int refitCount, int frameCount, float dt, ParallelLooper looper) { { var warmLeaves = GetLeaves(10, 10, 10, 10, 10); Tree tree = new Tree(); //for (int i = 0; i < leaves.Length; ++i) //{ // BoundingBox box; // leaves[i].GetBoundingBox(out box); // //tree.Insert(i, ref box); // tree.AddGlobal(i, ref box); //} int[] leafIds = new int[warmLeaves.Length]; BoundingBox[] leafBounds = new BoundingBox[warmLeaves.Length]; for (int i = 0; i < warmLeaves.Length; ++i) { leafIds[i] = i; warmLeaves[i].GetBoundingBox(out leafBounds[i]); } //tree.BuildMedianSplit(leafIds, leafBounds); //tree.BuildVolumeHeuristic(leafIds, leafBounds); tree.SweepBuild(leafIds, leafBounds); Console.WriteLine($"SingleArray Cachewarm Build: {tree.LeafCount}"); tree.Refit(); //tree.BottomUpAgglomerativeRefine(); //tree.TopDownAgglomerativeRefine(); //tree.BottomUpSweepRefine(); //tree.TopDownSweepRefine(); tree.RefitAndRefine(0); var context = new Tree.RefitAndRefineMultithreadedContext(tree); tree.RefitAndRefine(0, looper, context); var selfTestContext = new Tree.SelfTestMultithreadedContext(looper.ThreadCount, BufferPools<Overlap>.Locking); tree.GetSelfOverlaps(looper, selfTestContext); var list = new QuickList<int>(new BufferPool<int>()); BoundingBox aabb = new BoundingBox { Min = new Vector3(0, 0, 0), Max = new Vector3(1, 1, 1) }; tree.QueryRecursive(ref aabb, ref list); list.Dispose(); var overlaps = new QuickList<Overlap>(new BufferPool<Overlap>()); tree.GetSelfOverlaps(ref overlaps); overlaps = new QuickList<Overlap>(new BufferPool<Overlap>()); tree.GetSelfOverlapsArityDedicated(ref overlaps); tree.IncrementalCacheOptimize(0); overlaps = new QuickList<Overlap>(new BufferPool<Overlap>()); tree.GetSelfOverlapsViaQueries(ref overlaps); Console.WriteLine($"Cachewarm overlaps: {overlaps.Count}"); tree.Dispose(); } { Console.WriteLine($"SingleArray arity: {Tree.ChildrenCapacity}"); Tree tree = new Tree(Math.Max(1, leaves.Length)); var startTime = Stopwatch.GetTimestamp() / (double)Stopwatch.Frequency; for (int i = 0; i < leaves.Length; ++i) { var leafIndex = (int)((982451653L * i) % leaves.Length); BoundingBox box; leaves[leafIndex].GetBoundingBox(out box); tree.Add(leafIndex, ref box); //tree.AddGlobal(leafIndex, ref box); } //int[] leafIds = new int[leaves.Length]; //BoundingBox[] leafBounds = new BoundingBox[leaves.Length]; //for (int i = 0; i < leaves.Length; ++i) //{ // leafIds[i] = i; // leaves[i].GetBoundingBox(out leafBounds[i]); //} ////tree.BuildMedianSplit(leafIds, leafBounds); ////tree.BuildVolumeHeuristic(leafIds, leafBounds); //tree.SweepBuild(leafIds, leafBounds); var endTime = Stopwatch.GetTimestamp() / (double)Stopwatch.Frequency; Console.WriteLine($"SingleArray Build Time: {endTime - startTime}, depth: {tree.ComputeMaximumDepth()}"); int nodeCount, childCount; tree.MeasureNodeOccupancy(out nodeCount, out childCount); Console.WriteLine($"SingleArray Occupancy: {childCount / (double)nodeCount}"); Console.WriteLine($"Cost metric: {tree.MeasureCostMetric()}"); Console.WriteLine($"Cache Quality: {tree.MeasureCacheQuality()}"); tree.Validate(); startTime = Stopwatch.GetTimestamp() / (double)Stopwatch.Frequency; for (int i = 0; i < refitCount; ++i) { //for (int i = 0; i < tree.LeafCount; ++i) //{ // BoundingBox box; // leaves[tree.Leaves[i].Id].GetBoundingBox(out box); // tree.UpdateLeafBoundingBox(i, ref box); //} tree.Refit(); } endTime = Stopwatch.GetTimestamp() / (double)Stopwatch.Frequency; Console.WriteLine($"SingleArray Refit Time1: {endTime - startTime}"); var overlaps = new QuickList<Overlap>(new BufferPool<Overlap>()); startTime = Stopwatch.GetTimestamp() / (double)Stopwatch.Frequency; for (int i = 0; i < selfTestCount; ++i) { overlaps.Count = 0; tree.GetSelfOverlaps(ref overlaps); } endTime = Stopwatch.GetTimestamp() / (double)Stopwatch.Frequency; Console.WriteLine($"SingleArray SelfTree Time1: {endTime - startTime}, overlaps: {overlaps.Count}"); int[] buffer; MemoryRegion region; BinnedResources resources; const int maximumSubtrees = 262144; var spareNodes = new QuickList<int>(new BufferPool<int>(), 8); var subtreeReferences = new QuickList<int>(BufferPools<int>.Thread, BufferPool<int>.GetPoolIndex(maximumSubtrees)); var treeletInternalNodes = new QuickList<int>(BufferPools<int>.Thread, BufferPool<int>.GetPoolIndex(maximumSubtrees)); Tree.CreateBinnedResources(BufferPools<int>.Thread, maximumSubtrees, out buffer, out region, out resources); bool nodesInvalidated; overlaps = new QuickList<Overlap>(new BufferPool<Overlap>()); var refineContext = new Tree.RefitAndRefineMultithreadedContext(tree); var selfTestContext = new Tree.SelfTestMultithreadedContext(looper.ThreadCount, BufferPools<Overlap>.Locking); var visitedNodes = new QuickSet<int>(BufferPools<int>.Thread, BufferPools<int>.Thread); //**************** Dynamic Testing Random random = new Random(5); TestResults results = new TestResults("New", frameCount); startTime = Stopwatch.GetTimestamp() / (double)Stopwatch.Frequency; for (int t = 0; t < frameCount; ++t) { //Update the positions of objects. for (int i = 0; i < tree.LeafCount; ++i) { var leafId = tree.Leaves[i].Id; var leaf = leaves[leafId]; //Bounce off the walls. if (leaf.Position.X < positionBounds.Min.X && leaf.Velocity.X < 0) leaf.Velocity.X = -leaf.Velocity.X; if (leaf.Position.Y < positionBounds.Min.Y && leaf.Velocity.Y < 0) leaf.Velocity.Y = -leaf.Velocity.Y; if (leaf.Position.Z < positionBounds.Min.Z && leaf.Velocity.Z < 0) leaf.Velocity.Z = -leaf.Velocity.Z; if (leaf.Position.X > positionBounds.Max.X && leaf.Velocity.X > 0) leaf.Velocity.X = -leaf.Velocity.X; if (leaf.Position.Y > positionBounds.Max.Y && leaf.Velocity.Y > 0) leaf.Velocity.Y = -leaf.Velocity.Y; if (leaf.Position.Z > positionBounds.Max.Z && leaf.Velocity.Z > 0) leaf.Velocity.Z = -leaf.Velocity.Z; leaf.Position += leaf.Velocity * dt; BoundingBox boundingBox; leaf.GetBoundingBox(out boundingBox); tree.SetLeafBoundingBox(i, ref boundingBox); } var refineStartTime = Stopwatch.GetTimestamp() / (double)Stopwatch.Frequency; int refinementCount; if(looper.ThreadCount > 1) refinementCount = tree.RefitAndRefine(t, looper, refineContext); else refinementCount = tree.RefitAndRefine(t); var refineEndTime = Stopwatch.GetTimestamp() / (double)Stopwatch.Frequency; int overlapsCount; if (looper.ThreadCount > 1) { tree.GetSelfOverlaps(looper, selfTestContext); overlapsCount = 0; for (int i = 0; i < selfTestContext.WorkerOverlaps.Length; ++i) { overlapsCount += selfTestContext.WorkerOverlaps[i].Count; } } else { overlaps.Count = 0; tree.GetSelfOverlapsArityDedicated(ref overlaps); overlapsCount = overlaps.Count; } var testEndTime = Stopwatch.GetTimestamp() / (double)Stopwatch.Frequency; results.Refine[t] = 1000 * (refineEndTime - refineStartTime); results.SelfTest[t] = 1000 * (testEndTime - refineEndTime); results.Total[t] = 1000 * (testEndTime - refineStartTime); results.OverlapCounts[t] = overlapsCount; results.TreeCosts[t] = tree.MeasureCostMetric(); if (t % 16 == 0) { Console.WriteLine($"_________________{t}_________________"); Console.WriteLine($"Refinement count: {refinementCount}"); Console.WriteLine($"Refine time: {results.Refine[t]}"); Console.WriteLine($"Test time: {results.SelfTest[t]}"); Console.WriteLine($"TIME: {results.Total[t]}"); Console.WriteLine($"Cost metric: {results.TreeCosts[t]}"); Console.WriteLine($"Overlaps: {results.OverlapCounts[t]}"); Console.WriteLine($"Cache Quality: {tree.MeasureCacheQuality()}"); GC.Collect(); } tree.Validate(); } endTime = Stopwatch.GetTimestamp() / (double)Stopwatch.Frequency; tree.Validate(); Console.WriteLine($"SingleArray Cache Quality: {tree.MeasureCacheQuality()}"); Console.WriteLine($"Cost metric: {tree.MeasureCostMetric()}"); region.Dispose(); tree.RemoveUnusedInternalNodes(ref spareNodes); BufferPools<int>.Thread.GiveBack(buffer); //******************** tree.MeasureNodeOccupancy(out nodeCount, out childCount); Console.WriteLine($"SingleArray Occupancy: {childCount / (double)nodeCount}"); startTime = Stopwatch.GetTimestamp() / (double)Stopwatch.Frequency; for (int i = 0; i < refitCount; ++i) { //for (int i = 0; i < tree.LeafCount; ++i) //{ // BoundingBox box; // leaves[tree.Leaves[i].Id].GetBoundingBox(out box); // tree.UpdateLeafBoundingBox(i, ref box); //} tree.Refit(); } endTime = Stopwatch.GetTimestamp() / (double)Stopwatch.Frequency; Console.WriteLine($"SingleArray Refit Time2: {endTime - startTime}"); var list = new QuickList<int>(new BufferPool<int>()); var queryMask = queries.Length - 1; startTime = Stopwatch.GetTimestamp() / (double)Stopwatch.Frequency; for (int i = 0; i < queryCount; ++i) { list.Count = 0; //tree.Query2(ref queries[i & queryMask], ref list); tree.QueryRecursive(ref queries[i & queryMask], ref list); } endTime = Stopwatch.GetTimestamp() / (double)Stopwatch.Frequency; Console.WriteLine($"SingleArray Query Time: {endTime - startTime}, overlaps: {list.Count}"); Array.Clear(list.Elements, 0, list.Elements.Length); list.Dispose(); startTime = Stopwatch.GetTimestamp() / (double)Stopwatch.Frequency; for (int i = 0; i < selfTestCount; ++i) { overlaps.Count = 0; tree.GetSelfOverlaps(ref overlaps); } endTime = Stopwatch.GetTimestamp() / (double)Stopwatch.Frequency; Console.WriteLine($"SingleArray SelfTree Time: {endTime - startTime}, overlaps: {overlaps.Count}"); startTime = Stopwatch.GetTimestamp() / (double)Stopwatch.Frequency; for (int i = 0; i < selfTestCount; ++i) { overlaps.Count = 0; tree.GetSelfOverlapsArityDedicated(ref overlaps); } endTime = Stopwatch.GetTimestamp() / (double)Stopwatch.Frequency; Console.WriteLine($"SingleArray Arity-Dedicated SelfTree Time: {endTime - startTime}, overlaps: {overlaps.Count}"); startTime = Stopwatch.GetTimestamp() / (double)Stopwatch.Frequency; for (int i = 0; i < selfTestCount; ++i) { overlaps.Count = 0; tree.GetSelfOverlapsViaQueries(ref overlaps); } endTime = Stopwatch.GetTimestamp() / (double)Stopwatch.Frequency; Console.WriteLine($"SingleArray SelfQuery Time: {endTime - startTime}, overlaps: {overlaps.Count}"); tree.Dispose(); return results; } }
public static void TestVectorized(TestCollidable[] leaves, BoundingBox[] queries, int queryCount, int selfTestCount, int refitCount) { { var warmLeaves = GetLeaves(8, 8, 8, 10, 10); Tree<TestCollidable> tree = new Tree<TestCollidable>(); for (int i = 0; i < warmLeaves.Length; ++i) { tree.Insert(warmLeaves[i]); } Console.WriteLine($"Cachewarm Build: {tree.LeafCount}"); tree.RefitLeaves(); var list = new QuickList<int>(new BufferPool<int>()); BoundingBox aabb = new BoundingBox { Min = new Vector3(0, 0, 0), Max = new Vector3(1, 1, 1) }; tree.Query(ref aabb, ref list); list.Dispose(); var overlaps = new QuickList<Overlap>(new BufferPool<Overlap>()); tree.GetSelfOverlaps(ref overlaps); Console.WriteLine($"Warm overlaps: {overlaps.Count}"); overlaps = new QuickList<Overlap>(new BufferPool<Overlap>()); tree.GetSelfOverlapsViaQueries(ref overlaps); Console.WriteLine($"Warm overlaps: {overlaps.Count}"); overlaps = new QuickList<Overlap>(new BufferPool<Overlap>()); tree.GetSelfOverlapsViaStreamingQueries(ref overlaps); Console.WriteLine($"Warm overlaps: {overlaps.Count}"); } { Tree<TestCollidable> tree = new Tree<TestCollidable>(leaves.Length, 32); var startTime = Stopwatch.GetTimestamp() / (double)Stopwatch.Frequency; for (int i = 0; i < leaves.Length; ++i) { tree.Insert(leaves[(int)((982451653L * i) % leaves.Length)]); //tree.Insert(leaves[i]); } var endTime = Stopwatch.GetTimestamp() / (double)Stopwatch.Frequency; Console.WriteLine($"Build Time: {endTime - startTime}, depth: {tree.MaximumDepth}"); int nodeCount, childCount; tree.MeasureNodeOccupancy(out nodeCount, out childCount); Console.WriteLine($"Occupancy: {childCount / (double)nodeCount}"); startTime = Stopwatch.GetTimestamp() / (double)Stopwatch.Frequency; for (int i = 0; i < refitCount; ++i) { //tree.RefitLeaves(); tree.Refit(); } endTime = Stopwatch.GetTimestamp() / (double)Stopwatch.Frequency; Console.WriteLine($"Refit Time: {endTime - startTime}"); var list = new QuickList<int>(new BufferPool<int>()); var queryMask = queries.Length - 1; startTime = Stopwatch.GetTimestamp() / (double)Stopwatch.Frequency; for (int i = 0; i < queryCount; ++i) { list.Count = 0; //tree.Query(ref queries[i & queryMask], ref list); tree.QueryRecursive(ref queries[i & queryMask], ref list); } endTime = Stopwatch.GetTimestamp() / (double)Stopwatch.Frequency; Console.WriteLine($"Query Time: {endTime - startTime}, overlaps: {list.Count}"); list.Dispose(); var overlaps = new QuickList<Overlap>(new BufferPool<Overlap>()); startTime = Stopwatch.GetTimestamp() / (double)Stopwatch.Frequency; for (int i = 0; i < selfTestCount; ++i) { overlaps.Count = 0; tree.GetSelfOverlaps(ref overlaps); } endTime = Stopwatch.GetTimestamp() / (double)Stopwatch.Frequency; Console.WriteLine($"SelfTree Time: {endTime - startTime}, overlaps: {overlaps.Count}"); overlaps = new QuickList<Overlap>(new BufferPool<Overlap>()); startTime = Stopwatch.GetTimestamp() / (double)Stopwatch.Frequency; for (int i = 0; i < selfTestCount; ++i) { overlaps.Count = 0; tree.GetSelfOverlapsViaQueries(ref overlaps); } endTime = Stopwatch.GetTimestamp() / (double)Stopwatch.Frequency; Console.WriteLine($"SelfQuery Time: {endTime - startTime}, overlaps: {overlaps.Count}"); overlaps = new QuickList<Overlap>(new BufferPool<Overlap>()); startTime = Stopwatch.GetTimestamp() / (double)Stopwatch.Frequency; for (int i = 0; i < selfTestCount; ++i) { overlaps.Count = 0; tree.GetSelfOverlapsViaStreamingQueries(ref overlaps); } endTime = Stopwatch.GetTimestamp() / (double)Stopwatch.Frequency; Console.WriteLine($"StreamingSelfQuery Time: {endTime - startTime}, overlaps: {overlaps.Count}"); } }
protected override void UpdateContainedPairs() { RigidTransform rt = mesh.WorldTransform; QuickList<Vector3i> overlaps = new QuickList<Vector3i>(BufferPools<Vector3i>.Thread); mesh.ChunkShape.GetOverlaps(ref rt, convex.BoundingBox, ref overlaps); for (int i = 0; i < overlaps.Count; i++) { Vector3i pos = overlaps.Elements[i]; Vector3 offs; ReusableGenericCollidable<ConvexShape> colBox = new ReusableGenericCollidable<ConvexShape>(mesh.ChunkShape.ShapeAt(pos.X, pos.Y, pos.Z, out offs)); colBox.SetEntity(mesh.Entity); Vector3 input = new Vector3(pos.X + offs.X, pos.Y + offs.Y, pos.Z + offs.Z); Vector3 transfd = Quaternion.Transform(input, rt.Orientation); RigidTransform outp = new RigidTransform(transfd + rt.Position, rt.Orientation); colBox.WorldTransform = outp; TryToAdd(colBox, convex, mesh.Entity != null ? mesh.Entity.Material : null, convex.Entity != null ? convex.Entity.Material : null); } overlaps.Dispose(); }
private static void ComputeInitialTetrahedron(ref QuickList<Vector3> points, ref QuickList<int> outsidePointCandidates, ref QuickList<int> triangleIndices, out Vector3 centroid) { //Find four points on the hull. //We'll start with using the x axis to identify two points on the hull. int a, b, c, d; Vector3 direction; //Find the extreme points along the x axis. float minimumX = float.MaxValue, maximumX = -float.MaxValue; int minimumXIndex = 0, maximumXIndex = 0; for (int i = 0; i < points.Count; ++i) { var v = points.Elements[i]; if (v.X > maximumX) { maximumX = v.X; maximumXIndex = i; } else if (v.X < minimumX) { minimumX = v.X; minimumXIndex = i; } } a = minimumXIndex; b = maximumXIndex; //Check for redundancies.. if (a == b) throw new ArgumentException("Point set is degenerate; convex hulls must have volume."); //Now, use a second axis perpendicular to the two points we found. Vector3 ab = points.Elements[b] - points.Elements[a]; Vector3x.Cross(ref ab, ref Toolbox.UpVector, out direction); if (direction.LengthSquared() < Toolbox.Epsilon) Vector3x.Cross(ref ab, ref Toolbox.RightVector, out direction); float minimumDot, maximumDot; int minimumIndex, maximumIndex; GetExtremePoints(ref direction, ref points, out maximumDot, out minimumDot, out maximumIndex, out minimumIndex); //Compare the location of the extreme points to the location of the axis. float dot = Vector3.Dot(direction, points.Elements[a]); //Use the point further from the axis. if (Math.Abs(dot - minimumDot) > Math.Abs(dot - maximumDot)) { //In this case, we should use the minimum index. c = minimumIndex; } else { //In this case, we should use the maximum index. c = maximumIndex; } //Check for redundancies.. if (a == c || b == c) throw new ArgumentException("Point set is degenerate; convex hulls must have volume."); //Use a third axis perpendicular to the plane defined by the three unique points a, b, and c. Vector3 ac = points.Elements[c] - points.Elements[a]; Vector3x.Cross(ref ab, ref ac, out direction); GetExtremePoints(ref direction, ref points, out maximumDot, out minimumDot, out maximumIndex, out minimumIndex); //Compare the location of the extreme points to the location of the plane. dot = Vector3.Dot(direction, points.Elements[a]); //Use the point further from the plane. if (Math.Abs(dot - minimumDot) > Math.Abs(dot - maximumDot)) { //In this case, we should use the minimum index. d = minimumIndex; } else { //In this case, we should use the maximum index. d = maximumIndex; } //Check for redundancies.. if (a == d || b == d || c == d) throw new ArgumentException("Point set is degenerate; convex hulls must have volume."); //Add the triangles. triangleIndices.Add(a); triangleIndices.Add(b); triangleIndices.Add(c); triangleIndices.Add(a); triangleIndices.Add(b); triangleIndices.Add(d); triangleIndices.Add(a); triangleIndices.Add(c); triangleIndices.Add(d); triangleIndices.Add(b); triangleIndices.Add(c); triangleIndices.Add(d); //The centroid is guaranteed to be within the convex hull. It will be used to verify the windings of triangles throughout the hull process. centroid = (points.Elements[a] + points.Elements[b] + points.Elements[c] + points.Elements[d]) * 0.25f; for (int i = 0; i < triangleIndices.Count; i += 3) { var vA = points.Elements[triangleIndices.Elements[i]]; var vB = points.Elements[triangleIndices.Elements[i + 1]]; var vC = points.Elements[triangleIndices.Elements[i + 2]]; //Check the signed volume of a parallelepiped with the edges of this triangle and the centroid. Vector3 cross; ab = vB - vA; ac = vC - vA; Vector3x.Cross(ref ac, ref ab, out cross); Vector3 offset = vA - centroid; float volume = Vector3.Dot(offset, cross); //This volume/cross product could also be used to check for degeneracy, but we already tested for that. if (Math.Abs(volume) < Toolbox.BigEpsilon) { throw new ArgumentException("Point set is degenerate; convex hulls must have volume."); } if (volume < 0) { //If the signed volume is negative, that means the triangle's winding is opposite of what we want. //Flip it around! var temp = triangleIndices.Elements[i]; triangleIndices.Elements[i] = triangleIndices.Elements[i + 1]; triangleIndices.Elements[i + 1] = temp; } } //Points which belong to the tetrahedra are guaranteed to be 'in' the convex hull. Do not allow them to be considered. var tetrahedronIndices = new QuickList<int>(BufferPools<int>.Locking); tetrahedronIndices.Add(a); tetrahedronIndices.Add(b); tetrahedronIndices.Add(c); tetrahedronIndices.Add(d); //Sort the indices to allow a linear time loop. Array.Sort(tetrahedronIndices.Elements, 0, 4); int tetrahedronIndex = 0; for (int i = 0; i < points.Count; ++i) { if (tetrahedronIndex < 4 && i == tetrahedronIndices[tetrahedronIndex]) { //Don't add a tetrahedron index. Now that we've found this index, though, move on to the next one. ++tetrahedronIndex; } else { outsidePointCandidates.Add(i); } } tetrahedronIndices.Dispose(); }
/// <summary> /// Casts a convex shape against the collidable. /// </summary> /// <param name="castShape">Shape to cast.</param> /// <param name="startingTransform">Initial transform of the shape.</param> /// <param name="sweep">Sweep to apply to the shape.</param> /// <param name="hit">Hit data, if any.</param> /// <returns>Whether or not the cast hit anything.</returns> public override bool ConvexCast(CollisionShapes.ConvexShapes.ConvexShape castShape, ref RigidTransform startingTransform, ref System.Numerics.Vector3 sweep, out RayHit hit) { hit = new RayHit(); BoundingBox localSpaceBoundingBox; castShape.GetSweptLocalBoundingBox(ref startingTransform, ref worldTransform, ref sweep, out localSpaceBoundingBox); var tri = PhysicsThreadResources.GetTriangle(); var hitElements = new QuickList<int>(BufferPools<int>.Thread); if (Shape.GetOverlaps(localSpaceBoundingBox, ref hitElements)) { hit.T = float.MaxValue; for (int i = 0; i < hitElements.Count; i++) { Shape.GetTriangle(hitElements.Elements[i], ref worldTransform, out tri.vA, out tri.vB, out tri.vC); System.Numerics.Vector3 center; Vector3Ex.Add(ref tri.vA, ref tri.vB, out center); Vector3Ex.Add(ref center, ref tri.vC, out center); Vector3Ex.Multiply(ref center, 1f / 3f, out center); Vector3Ex.Subtract(ref tri.vA, ref center, out tri.vA); Vector3Ex.Subtract(ref tri.vB, ref center, out tri.vB); Vector3Ex.Subtract(ref tri.vC, ref center, out tri.vC); tri.MaximumRadius = tri.vA.LengthSquared(); float radius = tri.vB.LengthSquared(); if (tri.MaximumRadius < radius) tri.MaximumRadius = radius; radius = tri.vC.LengthSquared(); if (tri.MaximumRadius < radius) tri.MaximumRadius = radius; tri.MaximumRadius = (float)Math.Sqrt(tri.MaximumRadius); tri.collisionMargin = 0; var triangleTransform = new RigidTransform { Orientation = System.Numerics.Quaternion.Identity, Position = center }; RayHit tempHit; if (MPRToolbox.Sweep(castShape, tri, ref sweep, ref Toolbox.ZeroVector, ref startingTransform, ref triangleTransform, out tempHit) && tempHit.T < hit.T) { hit = tempHit; } } tri.MaximumRadius = 0; PhysicsThreadResources.GiveBack(tri); hitElements.Dispose(); return hit.T != float.MaxValue; } PhysicsThreadResources.GiveBack(tri); hitElements.Dispose(); return false; }
/// <summary> /// Executes one pass of bottom-up refinement. /// </summary> public unsafe void BottomUpAgglomerativeRefine() { //If this works out, should probably choose a more efficient flagging approach. //Note the size: it needs to contain all possible internal nodes. //TODO: This is actually bugged, because the refinement flags do not update if the nodes move. //And the nodes CAN move. var spareNodes = new QuickList<int>(BufferPools<int>.Thread, 8); var refinementFlags = new int[leafCount * 2 - 1]; for (int i = 0; i < nodeCount; ++i) { refinementFlags[i] = 0; } for (int i = 0; i < leafCount; ++i) { TryToBottomUpAgglomerativeRefine(refinementFlags, leaves[i].NodeIndex, ref spareNodes); //Validate(); } //Console.WriteLine($"root children: {nodes->ChildCount}"); RemoveUnusedInternalNodes(ref spareNodes); spareNodes.Dispose(); }
/// <summary> /// Identifies the indices of points in a set which are on the outer convex hull of the set. /// </summary> /// <param name="points">List of points in the set.</param> /// <param name="outputTriangleIndices">List of indices into the input point set composing the triangulated surface of the convex hull. /// Each group of 3 indices represents a triangle on the surface of the hull.</param> public static void GetConvexHull(ref QuickList<Vector3> points, ref QuickList<int> outputTriangleIndices) { if (points.Count == 0) { throw new ArgumentException("Point set must have volume."); } var outsidePoints = new QuickList<int>(BufferPools<int>.Locking, BufferPool.GetPoolIndex(points.Count - 4)); //Build the initial tetrahedron. //It will also give us the location of a point which is guaranteed to be within the //final convex hull. We can use this point to calibrate the winding of triangles. //A set of outside point candidates (all points other than those composing the tetrahedron) will be returned in the outsidePoints list. //That list will then be further pruned by the RemoveInsidePoints call. Vector3 insidePoint; ComputeInitialTetrahedron(ref points, ref outsidePoints, ref outputTriangleIndices, out insidePoint); //Compute outside points. RemoveInsidePoints(ref points, ref outputTriangleIndices, ref outsidePoints); var edges = new QuickList<int>(BufferPools<int>.Locking); var toRemove = new QuickList<int>(BufferPools<int>.Locking); var newTriangles = new QuickList<int>(BufferPools<int>.Locking); //We're now ready to begin the main loop. while (outsidePoints.Count > 0) { //While the convex hull is incomplete... for (int k = 0; k < outputTriangleIndices.Count; k += 3) { //Find the normal of the triangle Vector3 normal; FindNormal(ref outputTriangleIndices, ref points, k, out normal); //Get the furthest point in the direction of the normal. int maxIndexInOutsideList = GetExtremePoint(ref normal, ref points, ref outsidePoints); int maxIndex = outsidePoints.Elements[maxIndexInOutsideList]; Vector3 maximum = points.Elements[maxIndex]; //If the point is beyond the current triangle, continue. Vector3 offset = maximum - points.Elements[outputTriangleIndices.Elements[k]]; float dot = Vector3.Dot(normal, offset); if (dot > 0) { //It's been picked! Remove the maximum point from the outside. outsidePoints.FastRemoveAt(maxIndexInOutsideList); //Remove any triangles that can see the point, including itself! edges.Clear(); toRemove.Clear(); for (int n = outputTriangleIndices.Count - 3; n >= 0; n -= 3) { //Go through each triangle, if it can be seen, delete it and use maintainEdge on its edges. if (IsTriangleVisibleFromPoint(ref outputTriangleIndices, ref points, n, ref maximum)) { //This triangle can see it! //TODO: CONSIDER CONSISTENT WINDING HAPPYTIMES MaintainEdge(outputTriangleIndices[n], outputTriangleIndices[n + 1], ref edges); MaintainEdge(outputTriangleIndices[n], outputTriangleIndices[n + 2], ref edges); MaintainEdge(outputTriangleIndices[n + 1], outputTriangleIndices[n + 2], ref edges); //Because fast removals are being used, the order is very important. //It's pulling indices in from the end of the list in order, and also ensuring //that we never issue a removal order beyond the end of the list. outputTriangleIndices.FastRemoveAt(n + 2); outputTriangleIndices.FastRemoveAt(n + 1); outputTriangleIndices.FastRemoveAt(n); } } //Create new triangles. for (int n = 0; n < edges.Count; n += 2) { //For each edge, create a triangle with the extreme point. newTriangles.Add(edges[n]); newTriangles.Add(edges[n + 1]); newTriangles.Add(maxIndex); } //Only verify the windings of the new triangles. VerifyWindings(ref newTriangles, ref points, ref insidePoint); outputTriangleIndices.AddRange(ref newTriangles); newTriangles.Count = 0; //Remove all points from the outsidePoints if they are inside the polyhedron RemoveInsidePoints(ref points, ref outputTriangleIndices, ref outsidePoints); //The list has been significantly messed with, so restart the loop. break; } } } outsidePoints.Dispose(); edges.Dispose(); toRemove.Dispose(); newTriangles.Dispose(); }
public unsafe void TopDownAgglomerativeRefine() { var spareNodes = new QuickList<int>(BufferPools<int>.Thread, 8); TopDownAgglomerativeRefine(0, ref spareNodes); RemoveUnusedInternalNodes(ref spareNodes); spareNodes.Dispose(); }
public unsafe void PartialRefine(int offset, int skip, ref QuickList<int> spareNodes, int maximumSubtrees, ref QuickList<int> treeletInternalNodes, ref BinnedResources binnedResources, out bool nodesInvalidated) { QuickList<int> subtreeReferences = new QuickList<int>(BufferPools<int>.Thread, BufferPool<int>.GetPoolIndex(maximumSubtrees)); PartialRefine(0, 0, offset, skip, ref subtreeReferences, ref treeletInternalNodes, ref spareNodes, maximumSubtrees, ref binnedResources, out nodesInvalidated); subtreeReferences.Dispose(); }
/// <summary> /// Collects a limited set of subtrees hanging from the specified node and performs a local treelet rebuild using a bottom-up agglomerative approach. /// </summary> /// <param name="nodeIndex">Root of the refinement treelet.</param> /// <param name="nodesInvalidated">True if the refinement process invalidated node pointers, false otherwise.</param> public unsafe void AgglomerativeRefine(int nodeIndex, ref QuickList<int> spareNodes, out bool nodesInvalidated) { var maximumSubtrees = ChildrenCapacity * ChildrenCapacity; var poolIndex = BufferPool<int>.GetPoolIndex(maximumSubtrees); var subtrees = new QuickList<int>(BufferPools<int>.Thread, poolIndex); var treeletInternalNodes = new QuickList<int>(BufferPools<int>.Thread, poolIndex); float originalTreeletCost; var entries = stackalloc SubtreeHeapEntry[maximumSubtrees]; CollectSubtrees(nodeIndex, maximumSubtrees, entries, ref subtrees, ref treeletInternalNodes, out originalTreeletCost); //We're going to create a little binary tree via agglomeration, and then we'll collapse it into an n-ary tree. //Note the size: we first put every possible subtree in, so subtrees.Count. //Then, we add up subtrees.Count - 1 internal nodes without removing earlier slots. int tempNodesCapacity = subtrees.Count * 2 - 1; var tempNodes = stackalloc TempNode[tempNodesCapacity]; int tempNodeCount = subtrees.Count; int remainingNodesCapacity = subtrees.Count; var remainingNodes = stackalloc int[remainingNodesCapacity]; int remainingNodesCount = subtrees.Count; for (int i = 0; i < subtrees.Count; ++i) { var tempNode = tempNodes + i; tempNode->A = Encode(i); if (subtrees.Elements[i] >= 0) { //It's an internal node, so look at the parent. var subtreeNode = nodes + subtrees.Elements[i]; tempNode->BoundingBox = (&nodes[subtreeNode->Parent].A)[subtreeNode->IndexInParent]; tempNode->LeafCount = (&nodes[subtreeNode->Parent].LeafCountA)[subtreeNode->IndexInParent]; } else { //It's a leaf node, so grab the bounding box from the owning node. var leafIndex = Encode(subtrees.Elements[i]); var leaf = leaves + leafIndex; var parentNode = nodes + leaf->NodeIndex; tempNode->BoundingBox = (&parentNode->A)[leaf->ChildIndex]; tempNode->LeafCount = 1; } //Add a reference to the remaining list. remainingNodes[i] = i; } while (remainingNodesCount >= 2) { //Determine which pair of subtrees has the smallest cost. //(Smallest absolute cost is used instead of *increase* in cost because absolute tends to move bigger objects up the tree, which is desirable.) float bestCost = float.MaxValue; int bestA = 0, bestB = 0; for (int i = 0; i < remainingNodesCount; ++i) { for (int j = i + 1; j < remainingNodesCount; ++j) { var nodeIndexA = remainingNodes[i]; var nodeIndexB = remainingNodes[j]; BoundingBox merged; BoundingBox.Merge(ref tempNodes[nodeIndexA].BoundingBox, ref tempNodes[nodeIndexB].BoundingBox, out merged); var cost = ComputeBoundsMetric(ref merged); if (cost < bestCost) { bestCost = cost; bestA = i; bestB = j; } } } { //Create a new temp node based on the best pair. TempNode newTempNode; newTempNode.A = remainingNodes[bestA]; newTempNode.B = remainingNodes[bestB]; //Remerging here may or may not be faster than repeatedly caching 'best' candidates from above. It is a really, really cheap operation, after all, apart from cache issues. BoundingBox.Merge(ref tempNodes[newTempNode.A].BoundingBox, ref tempNodes[newTempNode.B].BoundingBox, out newTempNode.BoundingBox); newTempNode.LeafCount = tempNodes[newTempNode.A].LeafCount + tempNodes[newTempNode.B].LeafCount; //Remove the best options from the list. //BestA is always lower than bestB, so remove bestB first to avoid corrupting bestA index. TempNode.FastRemoveAt(bestB, remainingNodes, ref remainingNodesCount); TempNode.FastRemoveAt(bestA, remainingNodes, ref remainingNodesCount); //Add the reference to the new node. var newIndex = TempNode.Add(ref newTempNode, tempNodes, ref tempNodeCount); remainingNodes[remainingNodesCount++] = newIndex; } } //The 2-ary proto-treelet is ready. //Collapse it into an n-ary tree. const int collapseCount = ChildrenCapacity == 32 ? 4 : ChildrenCapacity == 16 ? 3 : ChildrenCapacity == 8 ? 2 : ChildrenCapacity == 4 ? 1 : 0; //Remember: All positive indices in the tempnodes array refer to other temp nodes: they are internal references. Encoded references point back to indices in the subtrees list. Debug.Assert(remainingNodesCount == 1); int parent = nodes[nodeIndex].Parent; int indexInParent = nodes[nodeIndex].IndexInParent; var stagingNodeCapacity = maximumSubtrees - 1; var stagingNodes = stackalloc Node[maximumSubtrees - 1]; int stagingNodeCount = 0; float newTreeletCost; var stagingRootIndex = BuildStagingChild(parent, indexInParent, tempNodes, tempNodeCount - 1, collapseCount, stagingNodes, ref stagingNodeCount, out newTreeletCost); Debug.Assert(stagingNodeCount < stagingNodeCapacity); if (newTreeletCost < originalTreeletCost) { //The refinement is an actual improvement. //Apply the staged nodes to real nodes! int nextInternalNodeIndexToUse = 0; ReifyStagingNodes(nodeIndex, stagingNodes, ref subtrees, ref treeletInternalNodes, ref nextInternalNodeIndexToUse, ref spareNodes, out nodesInvalidated); //If any nodes are left over, put them into the spares list for later reuse. for (int i = nextInternalNodeIndexToUse; i < treeletInternalNodes.Count; ++i) { spareNodes.Add(treeletInternalNodes.Elements[i]); } } else { nodesInvalidated = false; } subtrees.Dispose(); treeletInternalNodes.Dispose(); }
public unsafe void TopDownBinnedRefine(int maximumSubtrees) { var pool = BufferPools<int>.Thread; var spareNodes = new QuickList<int>(pool, 8); var subtreeReferences = new QuickList<int>(pool, BufferPool<int>.GetPoolIndex(maximumSubtrees)); var treeletInternalNodes = new QuickList<int>(pool, BufferPool<int>.GetPoolIndex(maximumSubtrees)); int[] buffer; MemoryRegion region; BinnedResources resources; CreateBinnedResources(pool, maximumSubtrees, out buffer, out region, out resources); TopDownBinnedRefine(0, maximumSubtrees, ref subtreeReferences, ref treeletInternalNodes, ref spareNodes, ref resources); RemoveUnusedInternalNodes(ref spareNodes); region.Dispose(); pool.GiveBack(buffer); spareNodes.Dispose(); subtreeReferences.Dispose(); }
/// <summary> /// Identifies the points on the surface of hull. /// </summary> /// <param name="points">List of points in the set.</param> /// <param name="outputSurfacePoints">Unique points on the surface of the convex hull.</param> public static void GetConvexHull(ref QuickList<Vector3> points, IList<Vector3> outputSurfacePoints) { var indices = new QuickList<int>(BufferPools<int>.Locking, BufferPool.GetPoolIndex(points.Count * 3)); GetConvexHull(ref points, ref indices, outputSurfacePoints); indices.Dispose(); }
bool TryToStepUsingContact(ref ContactData contact, ref Vector3 down, out Vector3 newPosition) { Vector3 position = characterBody.Position; //The normal of the contact may not be facing perfectly out to the side. //The detection process allows a bit of slop. //Correct it by removing any component of the normal along the local up vector. Vector3 normal = contact.Normal; float dot; Vector3.Dot(ref normal, ref down, out dot); Vector3 error; Vector3.Multiply(ref down, dot, out error); Vector3.Subtract(ref normal, ref error, out normal); normal.Normalize(); //Now we need to ray cast out from the center of the character in the direction of this normal to check for obstructions. //Compute the ray origin location. Fire it out of the top of the character; if we're stepping, this must be a valid location. //Putting it as high as possible helps to reject more invalid step geometry. Ray ray; float downRayLength = characterBody.Height;// MaximumStepHeight + upStepMargin; Vector3.Multiply(ref down, characterBody.Height * .5f - downRayLength, out ray.Position); Vector3.Add(ref ray.Position, ref position, out ray.Position); ray.Direction = normal; //Include a little margin in the length. //Technically, the character only needs to teleport horizontally by the complicated commented expression. //That puts it just far enough to have traction on the new surface. //In practice, the current contact refreshing approach used for many pair types causes contacts to persist horizontally a bit, //which can cause side effects for the character. float horizontalOffsetAmount = characterBody.CollisionInformation.Shape.CollisionMargin;// (float)((1 - character.SupportFinder.sinMaximumSlope) * character.Body.CollisionInformation.Shape.CollisionMargin + 0); float length = characterBody.Radius + horizontalOffsetAmount;// -contact.PenetrationDepth; if (QueryManager.RayCastHitAnything(ray, length)) { //The step is obstructed! newPosition = new Vector3(); return false; } //The down-cast ray origin has been verified by the previous ray cast. //Let's look for a support! Vector3 horizontalOffset; Vector3.Multiply(ref normal, length, out horizontalOffset); Vector3.Add(ref ray.Position, ref horizontalOffset, out ray.Position); ray.Direction = down; //Find the earliest hit, if any. RayHit earliestHit; if (!QueryManager.RayCast(ray, downRayLength, out earliestHit) || //Can't do anything if it didn't hit. earliestHit.T <= 0 || //Can't do anything if the hit was invalid. earliestHit.T - downRayLength > -minimumUpStepHeight || //Don't bother doing anything if the step is too small. earliestHit.T - downRayLength < -maximumStepHeight - upStepMargin) //Can't do anything if the step is too tall. { //No valid hit was detected. newPosition = new Vector3(); return false; } //Ensure the candidate surface supports traction. Vector3 supportNormal; Vector3.Normalize(ref earliestHit.Normal, out supportNormal); //Calibrate the normal to face in the same direction as the down vector for consistency. Vector3.Dot(ref supportNormal, ref down, out dot); if (dot < 0) { Vector3.Negate(ref supportNormal, out supportNormal); dot = -dot; } //If the new surface does not have traction, do not attempt to step up. if (dot < ContactCategorizer.TractionThreshold) { newPosition = new Vector3(); return false; } //Since contact queries are frequently expensive compared to ray cast tests, //do one more ray cast test. This time, starting from the same position, cast upwards. //In order to step up, the previous down-ray hit must be at least a character height away from the result of the up-ray. Vector3.Negate(ref down, out ray.Direction); //Find the earliest hit, if any. //RayHit earliestHitUp = new RayHit(); //earliestHitUp.T = float.MaxValue; float upLength = characterBody.Height - earliestHit.T; //If the sum of the up and down distances is less than the height, the character can't fit. if (QueryManager.RayCastHitAnything(ray, upLength)) { newPosition = new Vector3(); return false; } //By now, a valid ray hit has been found. Now we need to validate it using contact queries. //This process is very similar in concept to the down step verification, but it has some extra //requirements. //Predict a hit location based on the time of impact and the normal at the intersection. //Take into account the radius of the character (don't forget the collision margin!) RigidTransform transform = characterBody.CollisionInformation.WorldTransform; //The transform must be modified to position the query body at the right location. //The horizontal offset of the queries ensures that a tractionable part of the character will be put onto the new support. Vector3.Multiply(ref normal, horizontalOffsetAmount, out horizontalOffset); Vector3.Add(ref transform.Position, ref horizontalOffset, out transform.Position); Vector3 verticalOffset; Vector3.Multiply(ref down, -downRayLength, out verticalOffset); Vector3.Add(ref transform.Position, ref verticalOffset, out transform.Position); //We know that the closest point to the plane will be the extreme point in the plane's direction. //Use it as the ray origin. Ray downRay; characterBody.CollisionInformation.Shape.GetExtremePoint(supportNormal, ref transform, out downRay.Position); downRay.Direction = down; //Intersect the ray against the plane defined by the support hit. Vector3 intersection; Vector3.Dot(ref earliestHit.Location, ref supportNormal, out dot); Plane plane = new Plane(supportNormal, dot); Vector3 candidatePosition; //Define the interval bounds to be used later. //The words 'highest' and 'lowest' here refer to the position relative to the character's body. //The ray cast points downward relative to the character's body. float highestBound = -maximumStepHeight; float lowestBound = characterBody.CollisionInformation.Shape.CollisionMargin - downRayLength + earliestHit.T; float currentOffset = lowestBound; float hintOffset; var tractionContacts = new QuickList<CharacterContact>(BufferPools<CharacterContact>.Thread); var supportContacts = new QuickList<CharacterContact>(BufferPools<CharacterContact>.Thread); var sideContacts = new QuickList<CharacterContact>(BufferPools<CharacterContact>.Thread); var headContacts = new QuickList<CharacterContact>(BufferPools<CharacterContact>.Thread); try { //This guess may either win immediately, or at least give us a better idea of where to search. float hitT; if (Toolbox.GetRayPlaneIntersection(ref downRay, ref plane, out hitT, out intersection)) { hitT = -downRayLength + hitT + CollisionDetectionSettings.AllowedPenetration; if (hitT < highestBound) { //Don't try a location known to be too high. hitT = highestBound; } currentOffset = hitT; if (currentOffset > lowestBound) lowestBound = currentOffset; candidatePosition = characterBody.Position + down * currentOffset + horizontalOffset; switch (TryUpStepPosition(ref normal, ref candidatePosition, ref down, ref tractionContacts, ref supportContacts, ref sideContacts, ref headContacts, out hintOffset)) { case CharacterContactPositionState.Accepted: currentOffset += hintOffset; //Only use the new position location if the movement distance was the right size. if (currentOffset < 0 && currentOffset > -maximumStepHeight - CollisionDetectionSettings.AllowedPenetration) { //It's possible that we let a just-barely-too-high step occur, limited by the allowed penetration. //Just clamp the overall motion and let it penetrate a bit. newPosition = characterBody.Position + Math.Max(-maximumStepHeight, currentOffset) * down + horizontalOffset; return true; } else { newPosition = new Vector3(); return false; } case CharacterContactPositionState.Rejected: newPosition = new Vector3(); return false; case CharacterContactPositionState.NoHit: highestBound = currentOffset + hintOffset; currentOffset = (lowestBound + currentOffset) * .5f; break; case CharacterContactPositionState.Obstructed: lowestBound = currentOffset; currentOffset = (highestBound + currentOffset) * .5f; break; case CharacterContactPositionState.HeadObstructed: highestBound = currentOffset + hintOffset; currentOffset = (lowestBound + currentOffset) * .5f; break; case CharacterContactPositionState.TooDeep: currentOffset += hintOffset; lowestBound = currentOffset; break; } } //TODO: If the ray cast doesn't hit, that could be used to early out... Then again, it pretty much can't happen. //Our guesses failed. //Begin the regular process. Start at the time of impact of the ray itself. //How about trying the time of impact of the ray itself? //Since we wouldn't be here unless there were no contacts at the body's current position, //testing the ray cast location gives us the second bound we need to do an informed binary search. int attempts = 0; //Don't keep querying indefinitely. If we fail to reach it in a few informed steps, it's probably not worth continuing. //The bound size check prevents the system from continuing to search a meaninglessly tiny interval. while (attempts++ < 5 && lowestBound - highestBound > Toolbox.BigEpsilon) { candidatePosition = characterBody.Position + currentOffset * down + horizontalOffset; switch (TryUpStepPosition(ref normal, ref candidatePosition, ref down, ref tractionContacts, ref supportContacts, ref sideContacts, ref headContacts, out hintOffset)) { case CharacterContactPositionState.Accepted: currentOffset += hintOffset; //Only use the new position location if the movement distance was the right size. if (currentOffset < 0 && currentOffset > -maximumStepHeight - CollisionDetectionSettings.AllowedPenetration) { //It's possible that we let a just-barely-too-high step occur, limited by the allowed penetration. //Just clamp the overall motion and let it penetrate a bit. newPosition = characterBody.Position + Math.Max(-maximumStepHeight, currentOffset) * down + horizontalOffset; return true; } else { newPosition = new Vector3(); return false; } case CharacterContactPositionState.Rejected: newPosition = new Vector3(); return false; case CharacterContactPositionState.NoHit: highestBound = currentOffset + hintOffset; currentOffset = (lowestBound + highestBound) * .5f; break; case CharacterContactPositionState.Obstructed: lowestBound = currentOffset; currentOffset = (highestBound + lowestBound) * .5f; break; case CharacterContactPositionState.HeadObstructed: highestBound = currentOffset + hintOffset; currentOffset = (lowestBound + currentOffset) * .5f; break; case CharacterContactPositionState.TooDeep: currentOffset += hintOffset; lowestBound = currentOffset; break; } } } finally { tractionContacts.Dispose(); supportContacts.Dispose(); sideContacts.Dispose(); headContacts.Dispose(); } //Couldn't find a candidate. newPosition = new Vector3(); return false; }
public override void Update( float dt ) { //Refresh the contact manifold for this frame. var transform = new RigidTransform( voxelBlob.Position ); var convexTransform = convex.WorldTransform; ContactRefresher.ContactRefresh( contacts, supplementData, ref convexTransform, ref transform, contactIndicesToRemove ); RemoveQueuedContacts(); //Collect the set of overlapped cell indices. //Not the fastest way to do this, but it's relatively simple and easy. var overlaps = new QuickList<Int3>( BufferPools<Int3>.Thread ); voxelBlob.Shape.GetOverlaps( voxelBlob.Position, convex.BoundingBox, ref overlaps ); var candidatesToAdd = new QuickList<ContactData>( BufferPools<ContactData>.Thread, BufferPool<int>.GetPoolIndex( overlaps.Count ) ); for( int i = 0; i < overlaps.Count; ++i ) { GeneralConvexPairTester manifold; if( !ActivePairs.TryGetValue( overlaps.Elements[i], out manifold ) ) { //This manifold did not previously exist. manifold = GetPair( ref overlaps.Elements[i] ); } else { //It did previously exist. ActivePairs.FastRemove( overlaps.Elements[i] ); } activePairsBackBuffer.Add( overlaps.Elements[i], manifold ); ContactData contactCandidate; if( manifold.GenerateContactCandidate( out contactCandidate ) ) { candidatesToAdd.Add( ref contactCandidate ); } } overlaps.Dispose(); //Any pairs remaining in the activePairs set no longer exist. Clean them up. for( int i = ActivePairs.Count - 1; i >= 0; --i ) { ReturnPair( ActivePairs.Values[i] ); ActivePairs.FastRemove( ActivePairs.Keys[i] ); } //Swap the pair sets. var temp = ActivePairs; ActivePairs = activePairsBackBuffer; activePairsBackBuffer = temp; //Check if adding the new contacts would overflow the manifold. if( contacts.Count + candidatesToAdd.Count > 4 ) { //Adding all the contacts would overflow the manifold. Reduce to the best subset. var reducedCandidates = new QuickList<ContactData>( BufferPools<ContactData>.Thread, 3 ); ContactReducer.ReduceContacts( contacts, ref candidatesToAdd, contactIndicesToRemove, ref reducedCandidates ); RemoveQueuedContacts(); for( int i = reducedCandidates.Count - 1; i >= 0; i-- ) { Add( ref reducedCandidates.Elements[i] ); reducedCandidates.RemoveAt( i ); } reducedCandidates.Dispose(); } else if( candidatesToAdd.Count > 0 ) { //Won't overflow the manifold, so just toss it in. for( int i = 0; i < candidatesToAdd.Count; i++ ) { Add( ref candidatesToAdd.Elements[i] ); } } candidatesToAdd.Dispose(); }
/// <summary> /// Checks if a transition from the current stance to the target stance is possible given the current environment. /// </summary> /// <param name="targetStance">Stance to check for transition safety.</param> /// <param name="newHeight">If the transition is safe, the new height of the character. Zero otherwise.</param> /// <param name="newPosition">If the transition is safe, the new location of the character body if the transition occurred. Zero vector otherwise.</param> /// <returns>True if the target stance is different than the current stance and the transition is valid, false otherwise.</returns> public bool CheckTransition(Stance targetStance, out float newHeight, out Vector3 newPosition) { var currentPosition = characterBody.position; var down = characterBody.orientationMatrix.Down; newPosition = new Vector3(); newHeight = 0; if (CurrentStance != targetStance) { float currentHeight; switch (CurrentStance) { case Stance.Prone: currentHeight = proneHeight; break; case Stance.Crouching: currentHeight = crouchingHeight; break; default: currentHeight = standingHeight; break; } float targetHeight; switch (targetStance) { case Stance.Prone: targetHeight = proneHeight; break; case Stance.Crouching: targetHeight = crouchingHeight; break; default: targetHeight = standingHeight; break; } if (currentHeight >= targetHeight) { //The character is getting smaller, so no validation queries are required. if (SupportFinder.HasSupport) { //On the ground, so need to move the position down. newPosition = currentPosition + down * ((currentHeight - targetHeight) * 0.5f); } else { //We're in the air, so we don't have to change the position at all- just change the height. newPosition = currentPosition; } newHeight = targetHeight; return true; } //The character is getting bigger, so validation is required. ConvexCollidable<CylinderShape> queryObject; switch (targetStance) { case Stance.Prone: queryObject = proneQueryObject; break; case Stance.Crouching: queryObject = crouchingQueryObject; break; default: queryObject = standingQueryObject; break; } var tractionContacts = new QuickList<CharacterContact>(BufferPools<CharacterContact>.Thread); var supportContacts = new QuickList<CharacterContact>(BufferPools<CharacterContact>.Thread); var sideContacts = new QuickList<CharacterContact>(BufferPools<CharacterContact>.Thread); var headContacts = new QuickList<CharacterContact>(BufferPools<CharacterContact>.Thread); try { if (SupportFinder.HasSupport) { //Increasing in size requires a query to verify that the new state is safe. //TODO: State queries can be expensive if the character is crouching beneath something really detailed. //There are some situations where you may want to do an upwards-pointing ray cast first. If it hits something, //there's no need to do the full query. newPosition = currentPosition - down * ((targetHeight - currentHeight) * .5f); PrepareQueryObject(queryObject, ref newPosition); QueryManager.QueryContacts(queryObject, ref tractionContacts, ref supportContacts, ref sideContacts, ref headContacts); if (IsObstructed(ref sideContacts, ref headContacts)) { //Can't stand up if something is in the way! return false; } newHeight = targetHeight; return true; } else { //This is a complicated case. We must perform a semi-downstep query. //It's different than a downstep because the head may be obstructed as well. float highestBound = 0; float lowestBound = (targetHeight - currentHeight) * .5f; float currentOffset = lowestBound; float maximum = lowestBound; int attempts = 0; //Don't keep querying indefinitely. If we fail to reach it in a few informed steps, it's probably not worth continuing. //The bound size check prevents the system from continuing to search a meaninglessly tiny interval. while (attempts++ < 5 && lowestBound - highestBound > Toolbox.BigEpsilon) { Vector3 candidatePosition = currentPosition + currentOffset * down; float hintOffset; switch (TrySupportLocation(queryObject, ref candidatePosition, out hintOffset, ref tractionContacts, ref supportContacts, ref sideContacts, ref headContacts)) { case CharacterContactPositionState.Accepted: currentOffset += hintOffset; //Only use the new position location if the movement distance was the right size. if (currentOffset > 0 && currentOffset < maximum) { newPosition = currentPosition + currentOffset * down; newHeight = targetHeight; return true; } else { return false; } case CharacterContactPositionState.NoHit: highestBound = currentOffset + hintOffset; currentOffset = (lowestBound + highestBound) * .5f; break; case CharacterContactPositionState.Obstructed: lowestBound = currentOffset; currentOffset = (highestBound + lowestBound) * .5f; break; case CharacterContactPositionState.TooDeep: currentOffset += hintOffset; lowestBound = currentOffset; break; } } //Couldn't find a hit. Go ahead and get bigger! newPosition = currentPosition; newHeight = targetHeight; return true; } } finally { tractionContacts.Dispose(); supportContacts.Dispose(); sideContacts.Dispose(); headContacts.Dispose(); } } return false; }
private static void RemoveInsidePoints(ref QuickList<Vector3> points, ref QuickList<int> triangleIndices, ref QuickList<int> outsidePoints) { var insidePoints = new QuickList<int>(BufferPools<int>.Locking); //We're going to remove points from this list as we go to prune it down to the truly inner points. insidePoints.AddRange(outsidePoints); outsidePoints.Clear(); for (int i = 0; i < triangleIndices.Count && insidePoints.Count > 0; i += 3) { //Compute the triangle's plane in point-normal representation to test other points against. Vector3 normal; FindNormal(ref triangleIndices, ref points, i, out normal); Vector3 p = points.Elements[triangleIndices.Elements[i]]; for (int j = insidePoints.Count - 1; j >= 0; --j) { //Offset from the triangle to the current point, tested against the normal, determines if the current point is visible //from the triangle face. Vector3 offset = points.Elements[insidePoints.Elements[j]] - p; float dot = Vector3.Dot(offset, normal); //If it's visible, then it's outside! if (dot > 0) { //This point is known to be on the outside; put it on the outside! outsidePoints.Add(insidePoints.Elements[j]); insidePoints.FastRemoveAt(j); } } } insidePoints.Dispose(); }
public override void Update(double dt) { RigidTransform transform = new RigidTransform(mesh.Position); RigidTransform convexTransform = convex.WorldTransform; ContactRefresher.ContactRefresh(contacts, supplementData, ref convexTransform, ref transform, contactIndicesToRemove); RemoveQueuedContacts(); var overlaps = new QuickList<Vector3i>(BufferPools<Vector3i>.Thread); mesh.ChunkShape.GetOverlaps(mesh.Position, convex.BoundingBox, ref overlaps); var candidatesToAdd = new QuickList<ContactData>(BufferPools<ContactData>.Thread, BufferPool<int>.GetPoolIndex(overlaps.Count)); for (int i = 0; i < overlaps.Count; i++) { GeneralConvexPairTester manifold; if (!ActivePairs.TryGetValue(overlaps.Elements[i], out manifold)) { manifold = GetPair(ref overlaps.Elements[i]); } else { ActivePairs.FastRemove(overlaps.Elements[i]); } activePairsBackBuffer.Add(overlaps.Elements[i], manifold); ContactData contactCandidate; if (manifold.GenerateContactCandidate(out contactCandidate)) { candidatesToAdd.Add(ref contactCandidate); } } overlaps.Dispose(); for (int i = ActivePairs.Count - 1; i >= 0; i--) { ReturnPair(ActivePairs.Values[i]); ActivePairs.FastRemove(ActivePairs.Keys[i]); } var temp = ActivePairs; ActivePairs = activePairsBackBuffer; activePairsBackBuffer = temp; if (contacts.Count + candidatesToAdd.Count > 4) { var reducedCandidates = new QuickList<ContactData>(BufferPools<ContactData>.Thread, 3); ContactReducer.ReduceContacts(contacts, ref candidatesToAdd, contactIndicesToRemove, ref reducedCandidates); RemoveQueuedContacts(); for (int i = reducedCandidates.Count - 1; i >= 0; i--) { Add(ref reducedCandidates.Elements[i]); reducedCandidates.RemoveAt(i); } reducedCandidates.Dispose(); } else if (candidatesToAdd.Count > 0) { for (int i = 0; i < candidatesToAdd.Count; i++) { Add(ref candidatesToAdd.Elements[i]); } } candidatesToAdd.Dispose(); }
public static void TestBaseline(TestCollidable[] leaves, BoundingBox[] queries, int queryCount, int selfTestCount, int refitCount) { { var warmLeaves = GetLeaves(10, 10, 10, 10, 10); BaselineTree tree = new BaselineTree(); //for (int i = 0; i < leaves.Length; ++i) //{ // tree.Insert(warmLeaves[i]); //} //tree.BuildMedianSplit(warmLeaves); tree.BuildVolumeHeuristic(warmLeaves); Console.WriteLine($"Baseline Cachewarm Build: {tree.LeafCount}"); tree.RefitLeaves(); tree.Refit(); var list = new QuickList<int>(new BufferPool<int>()); BoundingBox aabb = new BoundingBox { Min = new Vector3(0, 0, 0), Max = new Vector3(1, 1, 1) }; tree.QueryRecursive(ref aabb, ref list); list.Dispose(); var overlaps = new QuickList<Overlap<TestCollidable>>(new BufferPool<Overlap<TestCollidable>>()); tree.GetSelfOverlaps(ref overlaps); Console.WriteLine($"Cachewarm overlaps: {overlaps.Count}"); overlaps = new QuickList<Overlap<TestCollidable>>(new BufferPool<Overlap<TestCollidable>>()); tree.GetSelfOverlapsViaQueries(ref overlaps); Console.WriteLine($"Cachewarm overlaps: {overlaps.Count}"); tree.Dispose(); } { Console.WriteLine($"Baseline arity: {BaselineTree.ChildrenCapacity}"); BaselineTree tree = new BaselineTree(leaves.Length, 32); var startTime = Stopwatch.GetTimestamp() / (double)Stopwatch.Frequency; //for (int i = 0; i < leaves.Length; ++i) //{ // tree.Insert(leaves[(int)((982451653L * i) % leaves.Length)]); // //tree.InsertGlobal(leaves[(int)((982451653L * i) % leaves.Length)]); // //tree.Insert(leaves[i]); // //tree.InsertGlobal(leaves[i]); //} //tree.BuildMedianSplit(leaves); tree.BuildVolumeHeuristic(leaves); var endTime = Stopwatch.GetTimestamp() / (double)Stopwatch.Frequency; Console.WriteLine($"Baseline Build Time: {endTime - startTime}, depth: {tree.MaximumDepth}"); Console.WriteLine($"Cost heuristic: {tree.MeasureCostHeuristic()}"); tree.Validate(); //var leafCount = tree.LeafCount; //for (int i = 0; i < leafCount; ++i) //{ // tree.RemoveAt(0); // tree.Validate(); //} int nodeCount, childCount; tree.MeasureNodeOccupancy(out nodeCount, out childCount); Console.WriteLine($"Baseline Occupancy: {childCount / (double)nodeCount}"); startTime = Stopwatch.GetTimestamp() / (double)Stopwatch.Frequency; for (int i = 0; i < refitCount; ++i) { //tree.RefitLeaves(); tree.Refit(); } endTime = Stopwatch.GetTimestamp() / (double)Stopwatch.Frequency; Console.WriteLine($"Baseline Refit Time: {endTime - startTime}"); var list = new QuickList<int>(new BufferPool<int>()); var queryMask = queries.Length - 1; startTime = Stopwatch.GetTimestamp() / (double)Stopwatch.Frequency; for (int i = 0; i < queryCount; ++i) { list.Count = 0; //tree.Query(ref queries[i & queryMask], ref list); tree.QueryRecursive(ref queries[i & queryMask], ref list); } endTime = Stopwatch.GetTimestamp() / (double)Stopwatch.Frequency; Console.WriteLine($"Baseline Query Time: {endTime - startTime}, overlaps: {list.Count}"); Array.Clear(list.Elements, 0, list.Elements.Length); list.Dispose(); var overlaps = new QuickList<Overlap<TestCollidable>>(new BufferPool<Overlap<TestCollidable>>()); startTime = Stopwatch.GetTimestamp() / (double)Stopwatch.Frequency; for (int i = 0; i < selfTestCount; ++i) { overlaps.Count = 0; tree.GetSelfOverlaps(ref overlaps); } endTime = Stopwatch.GetTimestamp() / (double)Stopwatch.Frequency; Console.WriteLine($"Baseline SelfTree Time: {endTime - startTime}, overlaps: {overlaps.Count}"); overlaps = new QuickList<Overlap<TestCollidable>>(new BufferPool<Overlap<TestCollidable>>()); startTime = Stopwatch.GetTimestamp() / (double)Stopwatch.Frequency; for (int i = 0; i < selfTestCount; ++i) { overlaps.Count = 0; tree.GetSelfOverlapsViaQueries(ref overlaps); } endTime = Stopwatch.GetTimestamp() / (double)Stopwatch.Frequency; Console.WriteLine($"Baseline SelfQuery Time: {endTime - startTime}, overlaps: {overlaps.Count}"); tree.Dispose(); } }