private void _processMessageData() { Debugger.Assert(__nMsgLength != 0); bool bNeedFurtherProcessing = _pConnectionHost.ReceivingFilter(__receiveBuffer, __nMsgLength); if (!bNeedFurtherProcessing) { return; } /// Mono has a bug on AES. Must recreate transform object each time. if (_pDecryptor != null) { _pDecryptor.Dispose(); } _pDecryptor = _pAESMgr.CreateDecryptor(); /// Check AES blocks size Debugger.Assert((__nMsgLength - MsgBase.HEAD_LENGTH) % 16 == 0); byte[] bytesDecoded = _pDecryptor.TransformFinalBlock(__receiveBuffer, MsgBase.HEAD_LENGTH , __nMsgLength - MsgBase.HEAD_LENGTH); ByteBuffer pByteBuffer = new ByteBuffer(bytesDecoded); //get message meta byte encodeType = pByteBuffer.FReadByte(); int nMessageID = pByteBuffer.FReadInt(); ushort sCheckCode = pByteBuffer.FReadUShort(); int nTimestamp = pByteBuffer.FReadInt(); //get message body byte[] bodyBytes = pByteBuffer.FReadBytes(pByteBuffer.size - MsgBase.META_DATA_LENGTH); Debugger.Assert(_pConnectionHost.ContainsServerMsgID(nMessageID)); ///...Debugger.Assert(_isValidMsgFormat()); if (nMessageID == SERVER_SESSION_UPDATE_KEY) { /// Update local time stamp _nLastSvrTime = nTimestamp; _nLastLocalTime = NetworkStub.CurrentTimeSeconds(); /// process session key _onUpdateSessionKey(bodyBytes); } else { /// Process normal message MsgBase message = new MsgBase(nMessageID); message.timeStamp = nTimestamp; Debugger.Assert(message.type != SERVER_SESSION_UPDATE_KEY); message.DeserializeFrom(bodyBytes); __receiveMsgQueue.Add(message); } }
protected override void OnAdded(int collectionIndex, int index, TSource value) { var key = _keySelector.Invoke(value); _rightKeys.Add(index, new SourcePair(key, value)); Add(key, value, _rightCount, _leftCount, OnAddedToRight); }
public unsafe bool Allocate(ulong id, int vertexCount, out int start, out Buffer <Vector3> vertices) { if (TryGetExistingMesh(id, out start, out vertices)) { return(false); } if (allocator.Allocate(id, vertexCount, out var longStart)) { start = (int)longStart; vertices = this.vertices.Slice(start, vertexCount); pendingUploads.Add(new UploadRequest { Start = start, Count = vertexCount }, Pool); return(true); } //Didn't fit. We need to resize. var copyCount = TriangleBuffer.Capacity + vertexCount; var newSize = 1 << SpanHelper.GetContainingPowerOf2(copyCount); Pool.ResizeToAtLeast(ref this.vertices, newSize, copyCount); allocator.Capacity = newSize; allocator.Allocate(id, vertexCount, out longStart); start = (int)longStart; vertices = this.vertices.Slice(start, vertexCount); //A resize forces an upload of everything, so any previous pending uploads are unnecessary. pendingUploads.Count = 0; pendingUploads.Add(new UploadRequest { Start = 0, Count = copyCount }, Pool); return(true); }
private static void MaintainEdge(int a, int b, ref QuickList <int> edges) { bool contained = false; int index = 0; for (int k = 0; k < edges.Count; k += 2) { if ((edges[k] == a && edges[k + 1] == b) || (edges[k] == b && edges[k + 1] == a)) { contained = true; index = k; } } //If it isn't present, add it to the edge list. if (!contained) { edges.Add(a); edges.Add(b); } else { //If it is present, that means both edge-connected triangles were deleted now, so get rid of it. edges.FastRemoveAt(index + 1); edges.FastRemoveAt(index); } }
public unsafe bool Allocate(ulong id, int vertexCount, out int start, out Buffer <Vector3> vertices) { if (allocator.TryGetAllocationRegion(id, out var allocation)) { Debug.Assert(allocation.End - allocation.Start == vertexCount, "If you're trying to allocate room for a bunch of triangles and we found it already, it better match the expected size."); start = (int)allocation.Start; vertices = this.vertices.Slice(start, vertexCount); return(false); } if (allocator.Allocate(id, vertexCount, out var longStart)) { start = (int)longStart; vertices = this.vertices.Slice(start, vertexCount); pendingUploads.Add(new UploadRequest { Start = start, Count = vertexCount }, Pool.SpecializeFor <UploadRequest>()); return(true); } //Didn't fit. We need to resize. var copyCount = TriangleBuffer.Capacity + vertexCount; var newSize = SpanHelper.GetContainingPowerOf2(copyCount); Pool.Resize(ref this.vertices, newSize, copyCount); allocator.Capacity = newSize; allocator.Allocate(id, vertexCount, out longStart); start = (int)longStart; vertices = this.vertices.Slice(start, vertexCount); //A resize forces an upload of everything, so any previous pending uploads are unnecessary. pendingUploads.Count = 0; pendingUploads.Add(new UploadRequest { Start = 0, Count = copyCount }, Pool.SpecializeFor <UploadRequest>()); return(true); }
private void _onConnectedCallback(IAsyncResult pAsyncResult) { lock (__lock) { try { TcpClient pTCPClient = pAsyncResult.AsyncState as TcpClient; Debugger.Assert(System.Object.ReferenceEquals(pTCPClient, _tcpClient)); pTCPClient.EndConnect(pAsyncResult); _tcpClient.NoDelay = true; _networkStream = pTCPClient.GetStream(); Debugger.Assert(_networkStream.CanRead); __bIsReceiveHeader = true; _networkStream.BeginRead(__receiveBuffer, 0, HEAD_SIZE, _onReadCallback, 0); _connectionState = ConnectionState.CONNECTED; _InternalMsg sysMsg = new _InternalMsg(_InternalMsgType.Connected); __sysMsgQueue.Add(sysMsg); } catch (Exception e) { _InternalMsg errMsg = new _InternalMsg(_InternalMsgType.ConnectFailed, e); __sysMsgQueue.Add(errMsg); } } }
void AddDigit(ref double value, ref double multiplier, ref PassthroughArrayPool <char> pool) { var digit = (int)((value * multiplier) % 10); characters.Add(GetCharForDigit(digit), pool); value -= digit / multiplier; multiplier *= 10; }
private static void ReturnToPool(SpatialPartition partition) { partitions.Remove(partition); // Return the partition to the pool, if there's enough room. if (partitionPool.Count + 1 < MaxPoolSize) { partitionPool.Add(partition); } partition.IsActive = false; }
protected override void OnAdded(int collectionIndex, int index, int value) { if (value >= 0 && value < SourceList.Count) { var item = GetItem(index, true); _indexes.Add(index, item); RecalculateIndexes(index + 1, _indexes.Count - 1); ResultList.Add(item.Index, SourceList[value]); } else { _indexes.Add(index, GetItem(index, false)); } }
protected override void OnAdded(int index, TSource value) { var item = new ItemData(_keySelector.Invoke(value), value); item.SourceIndex = index; _sourceData.Add(index, item); for (int i = index + 1; i < _sourceData.Count; ++i) { _sourceData[i].SourceIndex = i; } AddToGroup(item); }
public void Add(ref StreamingTarget parent, int childGroupIndex, int childIndexInGroup, Vector <int>[] masks) { if (LastLeavesCount < Vector <float> .Count) { LeafGroups.Elements[LeafGroups.Count - 1].Add(ref parent.LeafGroups.Elements[childGroupIndex], childIndexInGroup, LastLeavesCount, masks); ++LastLeavesCount; } else { var newLeaves = new StreamingLeafGroup(); newLeaves.Add(ref parent.LeafGroups.Elements[childGroupIndex], childIndexInGroup, 0, masks); LeafGroups.Add(ref newLeaves); LastLeavesCount = 1; } }
void AddShape(Shapes shapes, TypedIndex shapeIndex, ref RigidPose pose, ref Vector3 color) { switch (shapeIndex.Type) { case Sphere.Id: { SphereInstance instance; instance.Position = pose.Position; instance.Radius = shapes.GetShape <Sphere>(shapeIndex.Index).Radius; Helpers.PackOrientation(ref pose.Orientation, out instance.PackedOrientation); instance.PackedColor = Helpers.PackColor(ref color); spheres.Add(ref instance, new PassthroughArrayPool <SphereInstance>()); } break; case Capsule.Id: { CapsuleInstance instance; instance.Position = pose.Position; ref var capsule = ref shapes.GetShape <Capsule>(shapeIndex.Index); instance.Radius = capsule.Radius; instance.HalfLength = capsule.HalfLength; instance.PackedOrientation = Helpers.PackOrientationU64(ref pose.Orientation); instance.PackedColor = Helpers.PackColor(ref color); capsules.Add(ref instance, new PassthroughArrayPool <CapsuleInstance>()); } break;
/// <summary> /// Registers a GameObject with the engine. /// </summary> /// <param name="go">GameObject which was created.</param> internal static void AddGameObject(GameObject go) { lock (gameObjectHandlerLock) { if (go.AddedToGameManager) { return; } AllGameObjects.Add(go); CurrentScene.GameObjectsToRemove.Remove(go); if (go.IgnoreCulling) { SpatialPartitionManager.AddIgnoredObject(go); } if (go.IsDynamic) { DynamicGameObjects.Add(go); } else if (!go.IgnoreCulling) { SpatialPartitionManager.Insert(go); } if (go.DestroyOnLoad) { CurrentScene.AttachedGameObjects.Add(go); } go.AddedToGameManager = true; EngineUtility.TransformHierarchyDirty = true; } }
private static void RemoveInsidePoints(ref QuickList <Vector3> points, ref QuickList <int> triangleIndices, ref QuickList <int> outsidePoints) { var insidePoints = new QuickList <int>(BufferPools <int> .Locking); //We're going to remove points from this list as we go to prune it down to the truly inner points. insidePoints.AddRange(outsidePoints); outsidePoints.Clear(); for (int i = 0; i < triangleIndices.Count && insidePoints.Count > 0; i += 3) { //Compute the triangle's plane in point-normal representation to test other points against. Vector3 normal; FindNormal(ref triangleIndices, ref points, i, out normal); Vector3 p = points.Elements[triangleIndices.Elements[i]]; for (int j = insidePoints.Count - 1; j >= 0; --j) { //Offset from the triangle to the current point, tested against the normal, determines if the current point is visible //from the triangle face. Vector3 offset = points.Elements[insidePoints.Elements[j]] - p; float dot = Vector3.Dot(offset, normal); //If it's visible, then it's outside! if (dot > 0) { //This point is known to be on the outside; put it on the outside! outsidePoints.Add(insidePoints.Elements[j]); insidePoints.FastRemoveAt(j); } } } insidePoints.Dispose(); }
internal void _AddToQueue(Coroutine pCoroutine) { Debugger.DebugSection(() => { if (pCoroutine._bPooled) { Debugger.Assert(pCoroutine.state == CoroutineState.InUse); } else { Debugger.Assert(pCoroutine.state == CoroutineState.Stopped); } }); Debugger.Assert(!_IsCoroutineInQueue(pCoroutine)); pCoroutine._bInQueue = true; if (_InUpdating) { __arrAddedCoroutines.Add(pCoroutine); } else { _arrCoroutines.Add(pCoroutine); } }
protected override void OnAdded(int index, TSource value) { var key = _keySelector.Invoke(value); var targetIndex = FindByKey(key, index); var itemSet = new ItemSet(key, value) { SourceIndex = index, TargetIndex = targetIndex }; _sourceList.Add(index, itemSet); ResultList.Add(targetIndex, itemSet); UpdateSourceIndexes(index + 1); UpdateTargetIndexes(targetIndex + 1); }
unsafe void ValidateStaging(Node *stagingNodes, int stagingNodeIndex, ref QuickList <int> subtreeNodePointers, ref QuickList <int> collectedSubtreeReferences, ref QuickList <int> internalReferences, out int foundSubtrees, out int foundLeafCount) { var stagingNode = stagingNodes + stagingNodeIndex; var children = &stagingNode->ChildA; var leafCounts = &stagingNode->LeafCountA; foundSubtrees = foundLeafCount = 0; for (int i = 0; i < stagingNode->ChildCount; ++i) { if (children[i] >= 0) { int childFoundSubtrees, childFoundLeafCount; if (internalReferences.Contains(children[i])) { throw new Exception("A child points to an internal node that was visited. Possible loop, or just general invalid."); } internalReferences.Add(children[i]); ValidateStaging(stagingNodes, children[i], ref subtreeNodePointers, ref collectedSubtreeReferences, ref internalReferences, out childFoundSubtrees, out childFoundLeafCount); if (childFoundLeafCount != leafCounts[i]) { throw new Exception("Bad leaf count."); } foundSubtrees += childFoundSubtrees; foundLeafCount += childFoundLeafCount; } else { var subtreeNodePointerIndex = Encode(children[i]); var subtreeNodePointer = subtreeNodePointers.Elements[subtreeNodePointerIndex]; //Rather than looking up the shuffled SweepSubtree for information, just go back to the source. if (subtreeNodePointer >= 0) { var node = nodes + subtreeNodePointer; var totalLeafCount = 0; for (int childIndex = 0; childIndex < node->ChildCount; ++childIndex) { totalLeafCount += (&node->LeafCountA)[childIndex]; } if (leafCounts[i] != totalLeafCount) { throw new Exception("bad leaf count."); } foundLeafCount += totalLeafCount; } else { var leafIndex = Encode(subtreeNodePointer); if (leafCounts[i] != 1) { throw new Exception("bad leaf count."); } foundLeafCount += 1; } ++foundSubtrees; collectedSubtreeReferences.Add(subtreeNodePointer); } } }
internal void GetTilesFromRegion(QuickList <PartitionTile> tileList, Rectangle regionBounds) { int X = regionBounds.X / partitionTileSize; int Y = regionBounds.Y / partitionTileSize; int width = RoundPrecise(regionBounds.Width) / partitionTileSize; int height = RoundPrecise(regionBounds.Height) / partitionTileSize; regionBounds = new Rectangle(X, Y, width, height); int xIndex = 0, yIndex = 0; for (int x = regionBounds.X; x < regionBounds.Width + regionBounds.X; x++) { for (int y = regionBounds.Y; y < regionBounds.Height + regionBounds.Y; y++) { if (x < 0 || y < 0 || x > partitionTilesX - 1 || y > partitionTilesY - 1) { yIndex++; continue; } tileList.Add(GridPartitionTiles[x][y]); yIndex++; } xIndex++; } }
// TODO: Optimize me! public void GetOverlaps(Vector3 gridPosition, BoundingBox boundingBox, ref QuickList <Vector3i> overlaps) { BoundingBox b2 = new BoundingBox(); Vector3.Subtract(ref boundingBox.Min, ref gridPosition, out b2.Min); Vector3.Subtract(ref boundingBox.Max, ref gridPosition, out b2.Max); var min = new Vector3i { X = Math.Max(0, (int)b2.Min.X), Y = Math.Max(0, (int)b2.Min.Y), Z = Math.Max(0, (int)b2.Min.Z) }; var max = new Vector3i { X = Math.Min(CHUNK_SIZE - 1, (int)b2.Max.X), Y = Math.Min(CHUNK_SIZE - 1, (int)b2.Max.Y), Z = Math.Min(CHUNK_SIZE - 1, (int)b2.Max.Z) }; for (int x = min.X; x <= max.X; x++) { for (int y = min.Y; y <= max.Y; y++) { for (int z = min.Z; z <= max.Z; z++) { if (Blocks[BlockIndex(x, y, z)].Material.GetSolidity() == MaterialSolidity.FULLSOLID) { overlaps.Add(new Vector3i { X = x, Y = y, Z = z }); } } } } }
/// <summary> /// 创建一个Object对象 /// </summary> /// <returns></returns> private int CreateOneObject() { object obj = null; try { obj = Activator.CreateInstance(m_destType, m_ctorArgs); } catch (Exception) { m_maxObjCount = CurrentObjCount; if (m_minObjCount > CurrentObjCount) { m_minObjCount = CurrentObjCount; } if (MemoryUseOut != null) { MemoryUseOut(); } return(-1); } int key = obj.GetHashCode(); m_hashTableObjs.Add(key, obj); m_hashTableStatus.Add(key, true); m_keyList.Add(key); m_idleObjCount++; return(key); }
public void LoopBody(int bodyIndex) { if (bodyIndex != SourceIndex) { ConstraintBodyIndices.Add(bodyIndex, IntPool); } }
// TODO: Optimize me! public void GetOverlaps(ref RigidTransform transform, BoundingBox boundingBox, ref QuickList <Vector3i> overlaps) { RigidTransform.TransformByInverse(ref boundingBox.Min, ref transform, out Vector3 tmin); RigidTransform.TransformByInverse(ref boundingBox.Max, ref transform, out Vector3 tmax); BoundingBox b2 = new BoundingBox(Vector3.Min(tmin, tmax), Vector3.Max(tmin, tmax)); var min = new Vector3i { X = Math.Max(0, (int)b2.Min.X), Y = Math.Max(0, (int)b2.Min.Y), Z = Math.Max(0, (int)b2.Min.Z) }; var max = new Vector3i { X = Math.Min(ChunkSize.X - 1, (int)b2.Max.X), Y = Math.Min(ChunkSize.Y - 1, (int)b2.Max.Y), Z = Math.Min(ChunkSize.Z - 1, (int)b2.Max.Z) }; for (int x = min.X; x <= max.X; x++) { for (int y = min.Y; y <= max.Y; y++) { for (int z = min.Z; z <= max.Z; z++) { if (Blocks[BlockIndex(x, y, z)].Material.GetSolidity() == MaterialSolidity.FULLSOLID) { overlaps.Add(new Vector3i { X = x, Y = y, Z = z }); } } } } }
public void GetOverlaps(Vector3 gridPosition, BoundingBox boundingBox, ref QuickList <Int3> overlaps) { Vector3.Subtract(ref boundingBox.Min, ref gridPosition, out boundingBox.Min); Vector3.Subtract(ref boundingBox.Max, ref gridPosition, out boundingBox.Max); var inverseWidth = 1f / CellWidth; var min = new Int3 { X = Math.Max(0, (int)(boundingBox.Min.X * inverseWidth)), Y = Math.Max(0, (int)(boundingBox.Min.Y * inverseWidth)), Z = Math.Max(0, (int)(boundingBox.Min.Z * inverseWidth)) }; var max = new Int3 { X = Math.Min(Cells.GetLength(0) - 1, (int)(boundingBox.Max.X * inverseWidth)), Y = Math.Min(Cells.GetLength(1) - 1, (int)(boundingBox.Max.Y * inverseWidth)), Z = Math.Min(Cells.GetLength(2) - 1, (int)(boundingBox.Max.Z * inverseWidth)) }; for (int i = min.X; i <= max.X; ++i) { for (int j = min.Y; j <= max.Y; ++j) { for (int k = min.Z; k <= max.Z; ++k) { if (Cells[i, j, k]) { overlaps.Add(new Int3 { X = i, Y = j, Z = k }); } } } } }
unsafe void PushSame(int index, int leafCount, ref PriorityQueue queue, ref QuickList<TestPair2> pairsToTest) { queue.Insert(pairsToTest.Count, (float)(Math.Log(leafCount) * leafCount)); //queue.Insert(pairsToTest.Count, leafCount); //queue.Insert(pairsToTest.Count, leafCount); pairsToTest.Add(new TestPair2 { A = index, Type = PairType.SameNode }); }
unsafe void CollectNodePairsInNode <TResultList>(int nodeIndex, int leafCount, int collisionTestThreshold, ref QuickList <Overlap> nodePairsToTest, ref TResultList results) where TResultList : IList <Overlap> { if (leafCount <= collisionTestThreshold) { nodePairsToTest.Add(new Overlap { A = nodeIndex, B = nodeIndex }); return; } var node = nodes + nodeIndex; var nodeChildA = node->ChildA; var nodeChildB = node->ChildB; var ab = BoundingBox.Intersects(ref node->A, ref node->B); if (nodeChildA >= 0) { CollectNodePairsInNode(nodeChildA, node->LeafCountA, collisionTestThreshold, ref nodePairsToTest, ref results); } if (nodeChildB >= 0) { CollectNodePairsInNode(nodeChildB, node->LeafCountB, collisionTestThreshold, ref nodePairsToTest, ref results); } //Test all different nodes. if (ab) { TestForCollectNodePairs(ref node->A, ref node->B, nodeChildA, nodeChildB, node->LeafCountA, node->LeafCountB, collisionTestThreshold, ref nodePairsToTest, ref results); } }
unsafe void ValidateStaging(Node *stagingNodes, ref QuickList <int> subtreeNodePointers, int treeletParent, int treeletIndexInParent) { int foundSubtrees, foundLeafCount; QuickList <int> collectedSubtreeReferences = new QuickList <int>(BufferPools <int> .Thread); QuickList <int> internalReferences = new QuickList <int>(BufferPools <int> .Thread); internalReferences.Add(0); ValidateStaging(stagingNodes, 0, ref subtreeNodePointers, ref collectedSubtreeReferences, ref internalReferences, out foundSubtrees, out foundLeafCount); if (treeletParent < -1 || treeletParent >= nodeCount) { throw new Exception("Bad treelet parent."); } if (treeletIndexInParent < -1 || (treeletParent >= 0 && treeletIndexInParent >= nodes[treeletParent].ChildCount)) { throw new Exception("Bad treelet index in parent."); } if (treeletParent >= 0 && (&nodes[treeletParent].LeafCountA)[treeletIndexInParent] != foundLeafCount) { throw new Exception("Bad leaf count."); } if (subtreeNodePointers.Count != foundSubtrees) { throw new Exception("Bad subtree found count."); } for (int i = 0; i < collectedSubtreeReferences.Count; ++i) { if (!subtreeNodePointers.Contains(collectedSubtreeReferences[i]) || !collectedSubtreeReferences.Contains(subtreeNodePointers[i])) { throw new Exception("Bad subtree reference."); } } collectedSubtreeReferences.Dispose(); internalReferences.Dispose(); }
public override void Update(double dt) { RigidTransform transform = new RigidTransform(mesh.Position); RigidTransform convexTransform = convex.WorldTransform; ContactRefresher.ContactRefresh(contacts, supplementData, ref convexTransform, ref transform, contactIndicesToRemove); RemoveQueuedContacts(); var overlaps = new QuickList <Vector3i>(BufferPools <Vector3i> .Thread); mesh.ChunkShape.GetOverlaps(mesh.Position, convex.BoundingBox, ref overlaps); var candidatesToAdd = new QuickList <ContactData>(BufferPools <ContactData> .Thread, BufferPool <int> .GetPoolIndex(overlaps.Count)); for (int i = 0; i < overlaps.Count; i++) { if (!ActivePairs.TryGetValue(overlaps.Elements[i], out GeneralConvexPairTester manifold)) { manifold = GetPair(ref overlaps.Elements[i]); } else { ActivePairs.FastRemove(overlaps.Elements[i]); } activePairsBackBuffer.Add(overlaps.Elements[i], manifold); if (manifold.GenerateContactCandidate(out ContactData contactCandidate)) { candidatesToAdd.Add(ref contactCandidate); } } overlaps.Dispose(); for (int i = ActivePairs.Count - 1; i >= 0; i--) { ReturnPair(ActivePairs.Values[i]); ActivePairs.FastRemove(ActivePairs.Keys[i]); } var temp = ActivePairs; ActivePairs = activePairsBackBuffer; activePairsBackBuffer = temp; if (contacts.Count + candidatesToAdd.Count > 4) { var reducedCandidates = new QuickList <ContactData>(BufferPools <ContactData> .Thread, 3); ContactReducer.ReduceContacts(contacts, ref candidatesToAdd, contactIndicesToRemove, ref reducedCandidates); RemoveQueuedContacts(); for (int i = reducedCandidates.Count - 1; i >= 0; i--) { Add(ref reducedCandidates.Elements[i]); reducedCandidates.RemoveAt(i); } reducedCandidates.Dispose(); } else if (candidatesToAdd.Count > 0) { for (int i = 0; i < candidatesToAdd.Count; i++) { Add(ref candidatesToAdd.Elements[i]); } } candidatesToAdd.Dispose(); }
public void Setup() { for (int i = 0; i < 100_000; i++) { normalList.Add(i); quickList.Add(i); } }
private void _onFatalError(Exception e) { _InternalMsg errMsg = new _InternalMsg(_InternalMsgType.Disconnected, e); __sysMsgQueue.Add(errMsg); this.Disconnect(); }
unsafe void PushDifferent(int a, int b, int leafCountA, int leafCountB, ref PriorityQueue queue, ref QuickList<TestPair2> pairsToTest) { //queue.Insert(pairsToTest.Count, (float)(Math.Log(leafCountA) * leafCountA + Math.Log(leafCountB) * leafCountB)); var max = Math.Max(leafCountA, leafCountB); queue.Insert(pairsToTest.Count, (float)(Math.Log(max) * max)); //queue.Insert(pairsToTest.Count, Math.Max(leafCountA, leafCountB)); pairsToTest.Add(new TestPair2 { A = a, B = b, Type = PairType.InternalInternal }); }
unsafe void CollectSubtreesForNodeDirect(int nodeIndex, int remainingDepth, ref QuickList<int> subtrees, ref QuickQueue<int> internalNodes, out float treeletCost) { internalNodes.Enqueue(nodeIndex); treeletCost = 0; var node = nodes + nodeIndex; var children = &node->ChildA; var bounds = &node->A; --remainingDepth; if (remainingDepth >= 0) { for (int i = 0; i < node->ChildCount; ++i) { if (children[i] >= 0) { treeletCost += ComputeBoundsMetric(ref bounds[i]); float childCost; CollectSubtreesForNodeDirect(children[i], remainingDepth, ref subtrees, ref internalNodes, out childCost); treeletCost += childCost; } else { //It's a leaf, immediately add it to subtrees. subtrees.Add(children[i]); } } } else { //Recursion has bottomed out. Add every child. //Once again, note that the treelet costs of these nodes are not considered, even if they are internal. //That's because the subtree internal nodes cannot change size due to the refinement. for (int i = 0; i < node->ChildCount; ++i) { subtrees.Add(children[i]); } } }
public unsafe void Insert(Node* node, Node* nodes, ref QuickList<int> subtrees) { var children = &node->ChildA; var bounds = &node->A; for (int childIndex = 0; childIndex < node->ChildCount; ++childIndex) { if (children[childIndex] >= 0) { int index = Count; var cost = Tree.ComputeBoundsMetric(ref bounds[childIndex]);// - node->PreviousMetric; ++Count; //Sift up. while (index > 0) { var parentIndex = (index - 1) >> 1; var parent = Entries + parentIndex; if (parent->Cost < cost) { //Pull the parent down. Entries[index] = *parent; index = parentIndex; } else { //Found the insertion spot. break; } } var entry = Entries + index; entry->Index = children[childIndex]; entry->Cost = cost; } else { //Immediately add leaf nodes. subtrees.Add(children[childIndex]); } } }
public static void TestListResizing() { Random random = new Random(5); UnsafeBufferPool<int> pool = new UnsafeBufferPool<int>(); QuickList<int> list = new QuickList<int>(pool, 2); List<int> controlList = new List<int>(); for (int iterationIndex = 0; iterationIndex < 100000; ++iterationIndex) { if (random.NextDouble() < 0.7) { list.Add(iterationIndex); controlList.Add(iterationIndex); } if (random.NextDouble() < 0.2) { var indexToRemove = random.Next(list.Count); list.RemoveAt(indexToRemove); controlList.RemoveAt(indexToRemove); } if (iterationIndex % 1000 == 0) { list.EnsureCapacity(list.Count * 3); } else if (iterationIndex % 7777 == 0) { list.Compact(); } } Assert.IsTrue(list.Count == controlList.Count); for (int i = 0; i < list.Count; ++i) { var a = list[i]; var b = controlList[i]; Assert.IsTrue(a == b); Assert.IsTrue(list.Count == controlList.Count); } }
public override void Update( float dt ) { //Refresh the contact manifold for this frame. var transform = new RigidTransform( voxelBlob.Position ); var convexTransform = convex.WorldTransform; ContactRefresher.ContactRefresh( contacts, supplementData, ref convexTransform, ref transform, contactIndicesToRemove ); RemoveQueuedContacts(); //Collect the set of overlapped cell indices. //Not the fastest way to do this, but it's relatively simple and easy. var overlaps = new QuickList<Int3>( BufferPools<Int3>.Thread ); voxelBlob.Shape.GetOverlaps( voxelBlob.Position, convex.BoundingBox, ref overlaps ); var candidatesToAdd = new QuickList<ContactData>( BufferPools<ContactData>.Thread, BufferPool<int>.GetPoolIndex( overlaps.Count ) ); for( int i = 0; i < overlaps.Count; ++i ) { GeneralConvexPairTester manifold; if( !ActivePairs.TryGetValue( overlaps.Elements[i], out manifold ) ) { //This manifold did not previously exist. manifold = GetPair( ref overlaps.Elements[i] ); } else { //It did previously exist. ActivePairs.FastRemove( overlaps.Elements[i] ); } activePairsBackBuffer.Add( overlaps.Elements[i], manifold ); ContactData contactCandidate; if( manifold.GenerateContactCandidate( out contactCandidate ) ) { candidatesToAdd.Add( ref contactCandidate ); } } overlaps.Dispose(); //Any pairs remaining in the activePairs set no longer exist. Clean them up. for( int i = ActivePairs.Count - 1; i >= 0; --i ) { ReturnPair( ActivePairs.Values[i] ); ActivePairs.FastRemove( ActivePairs.Keys[i] ); } //Swap the pair sets. var temp = ActivePairs; ActivePairs = activePairsBackBuffer; activePairsBackBuffer = temp; //Check if adding the new contacts would overflow the manifold. if( contacts.Count + candidatesToAdd.Count > 4 ) { //Adding all the contacts would overflow the manifold. Reduce to the best subset. var reducedCandidates = new QuickList<ContactData>( BufferPools<ContactData>.Thread, 3 ); ContactReducer.ReduceContacts( contacts, ref candidatesToAdd, contactIndicesToRemove, ref reducedCandidates ); RemoveQueuedContacts(); for( int i = reducedCandidates.Count - 1; i >= 0; i-- ) { Add( ref reducedCandidates.Elements[i] ); reducedCandidates.RemoveAt( i ); } reducedCandidates.Dispose(); } else if( candidatesToAdd.Count > 0 ) { //Won't overflow the manifold, so just toss it in. for( int i = 0; i < candidatesToAdd.Count; i++ ) { Add( ref candidatesToAdd.Elements[i] ); } } candidatesToAdd.Dispose(); }
/// <summary> /// Identifies the indices of points in a set which are on the outer convex hull of the set. /// </summary> /// <param name="points">List of points in the set.</param> /// <param name="outputTriangleIndices">List of indices into the input point set composing the triangulated surface of the convex hull. /// Each group of 3 indices represents a triangle on the surface of the hull.</param> public static void GetConvexHull(ref QuickList<Vector3> points, ref QuickList<int> outputTriangleIndices) { if (points.Count == 0) { throw new ArgumentException("Point set must have volume."); } var outsidePoints = new QuickList<int>(BufferPools<int>.Locking, BufferPool.GetPoolIndex(points.Count - 4)); //Build the initial tetrahedron. //It will also give us the location of a point which is guaranteed to be within the //final convex hull. We can use this point to calibrate the winding of triangles. //A set of outside point candidates (all points other than those composing the tetrahedron) will be returned in the outsidePoints list. //That list will then be further pruned by the RemoveInsidePoints call. Vector3 insidePoint; ComputeInitialTetrahedron(ref points, ref outsidePoints, ref outputTriangleIndices, out insidePoint); //Compute outside points. RemoveInsidePoints(ref points, ref outputTriangleIndices, ref outsidePoints); var edges = new QuickList<int>(BufferPools<int>.Locking); var toRemove = new QuickList<int>(BufferPools<int>.Locking); var newTriangles = new QuickList<int>(BufferPools<int>.Locking); //We're now ready to begin the main loop. while (outsidePoints.Count > 0) { //While the convex hull is incomplete... for (int k = 0; k < outputTriangleIndices.Count; k += 3) { //Find the normal of the triangle Vector3 normal; FindNormal(ref outputTriangleIndices, ref points, k, out normal); //Get the furthest point in the direction of the normal. int maxIndexInOutsideList = GetExtremePoint(ref normal, ref points, ref outsidePoints); int maxIndex = outsidePoints.Elements[maxIndexInOutsideList]; Vector3 maximum = points.Elements[maxIndex]; //If the point is beyond the current triangle, continue. Vector3 offset = maximum - points.Elements[outputTriangleIndices.Elements[k]]; float dot = Vector3.Dot(normal, offset); if (dot > 0) { //It's been picked! Remove the maximum point from the outside. outsidePoints.FastRemoveAt(maxIndexInOutsideList); //Remove any triangles that can see the point, including itself! edges.Clear(); toRemove.Clear(); for (int n = outputTriangleIndices.Count - 3; n >= 0; n -= 3) { //Go through each triangle, if it can be seen, delete it and use maintainEdge on its edges. if (IsTriangleVisibleFromPoint(ref outputTriangleIndices, ref points, n, ref maximum)) { //This triangle can see it! //TODO: CONSIDER CONSISTENT WINDING HAPPYTIMES MaintainEdge(outputTriangleIndices[n], outputTriangleIndices[n + 1], ref edges); MaintainEdge(outputTriangleIndices[n], outputTriangleIndices[n + 2], ref edges); MaintainEdge(outputTriangleIndices[n + 1], outputTriangleIndices[n + 2], ref edges); //Because fast removals are being used, the order is very important. //It's pulling indices in from the end of the list in order, and also ensuring //that we never issue a removal order beyond the end of the list. outputTriangleIndices.FastRemoveAt(n + 2); outputTriangleIndices.FastRemoveAt(n + 1); outputTriangleIndices.FastRemoveAt(n); } } //Create new triangles. for (int n = 0; n < edges.Count; n += 2) { //For each edge, create a triangle with the extreme point. newTriangles.Add(edges[n]); newTriangles.Add(edges[n + 1]); newTriangles.Add(maxIndex); } //Only verify the windings of the new triangles. VerifyWindings(ref newTriangles, ref points, ref insidePoint); outputTriangleIndices.AddRange(ref newTriangles); newTriangles.Count = 0; //Remove all points from the outsidePoints if they are inside the polyhedron RemoveInsidePoints(ref points, ref outputTriangleIndices, ref outsidePoints); //The list has been significantly messed with, so restart the loop. break; } } } outsidePoints.Dispose(); edges.Dispose(); toRemove.Dispose(); newTriangles.Dispose(); }
/// <summary> /// Collects a limited set of subtrees hanging from the specified node and performs a local treelet rebuild using a bottom-up agglomerative approach. /// </summary> /// <param name="nodeIndex">Root of the refinement treelet.</param> /// <param name="nodesInvalidated">True if the refinement process invalidated node pointers, false otherwise.</param> public unsafe void AgglomerativeRefine(int nodeIndex, ref QuickList<int> spareNodes, out bool nodesInvalidated) { var maximumSubtrees = ChildrenCapacity * ChildrenCapacity; var poolIndex = BufferPool<int>.GetPoolIndex(maximumSubtrees); var subtrees = new QuickList<int>(BufferPools<int>.Thread, poolIndex); var treeletInternalNodes = new QuickList<int>(BufferPools<int>.Thread, poolIndex); float originalTreeletCost; var entries = stackalloc SubtreeHeapEntry[maximumSubtrees]; CollectSubtrees(nodeIndex, maximumSubtrees, entries, ref subtrees, ref treeletInternalNodes, out originalTreeletCost); //We're going to create a little binary tree via agglomeration, and then we'll collapse it into an n-ary tree. //Note the size: we first put every possible subtree in, so subtrees.Count. //Then, we add up subtrees.Count - 1 internal nodes without removing earlier slots. int tempNodesCapacity = subtrees.Count * 2 - 1; var tempNodes = stackalloc TempNode[tempNodesCapacity]; int tempNodeCount = subtrees.Count; int remainingNodesCapacity = subtrees.Count; var remainingNodes = stackalloc int[remainingNodesCapacity]; int remainingNodesCount = subtrees.Count; for (int i = 0; i < subtrees.Count; ++i) { var tempNode = tempNodes + i; tempNode->A = Encode(i); if (subtrees.Elements[i] >= 0) { //It's an internal node, so look at the parent. var subtreeNode = nodes + subtrees.Elements[i]; tempNode->BoundingBox = (&nodes[subtreeNode->Parent].A)[subtreeNode->IndexInParent]; tempNode->LeafCount = (&nodes[subtreeNode->Parent].LeafCountA)[subtreeNode->IndexInParent]; } else { //It's a leaf node, so grab the bounding box from the owning node. var leafIndex = Encode(subtrees.Elements[i]); var leaf = leaves + leafIndex; var parentNode = nodes + leaf->NodeIndex; tempNode->BoundingBox = (&parentNode->A)[leaf->ChildIndex]; tempNode->LeafCount = 1; } //Add a reference to the remaining list. remainingNodes[i] = i; } while (remainingNodesCount >= 2) { //Determine which pair of subtrees has the smallest cost. //(Smallest absolute cost is used instead of *increase* in cost because absolute tends to move bigger objects up the tree, which is desirable.) float bestCost = float.MaxValue; int bestA = 0, bestB = 0; for (int i = 0; i < remainingNodesCount; ++i) { for (int j = i + 1; j < remainingNodesCount; ++j) { var nodeIndexA = remainingNodes[i]; var nodeIndexB = remainingNodes[j]; BoundingBox merged; BoundingBox.Merge(ref tempNodes[nodeIndexA].BoundingBox, ref tempNodes[nodeIndexB].BoundingBox, out merged); var cost = ComputeBoundsMetric(ref merged); if (cost < bestCost) { bestCost = cost; bestA = i; bestB = j; } } } { //Create a new temp node based on the best pair. TempNode newTempNode; newTempNode.A = remainingNodes[bestA]; newTempNode.B = remainingNodes[bestB]; //Remerging here may or may not be faster than repeatedly caching 'best' candidates from above. It is a really, really cheap operation, after all, apart from cache issues. BoundingBox.Merge(ref tempNodes[newTempNode.A].BoundingBox, ref tempNodes[newTempNode.B].BoundingBox, out newTempNode.BoundingBox); newTempNode.LeafCount = tempNodes[newTempNode.A].LeafCount + tempNodes[newTempNode.B].LeafCount; //Remove the best options from the list. //BestA is always lower than bestB, so remove bestB first to avoid corrupting bestA index. TempNode.FastRemoveAt(bestB, remainingNodes, ref remainingNodesCount); TempNode.FastRemoveAt(bestA, remainingNodes, ref remainingNodesCount); //Add the reference to the new node. var newIndex = TempNode.Add(ref newTempNode, tempNodes, ref tempNodeCount); remainingNodes[remainingNodesCount++] = newIndex; } } //The 2-ary proto-treelet is ready. //Collapse it into an n-ary tree. const int collapseCount = ChildrenCapacity == 32 ? 4 : ChildrenCapacity == 16 ? 3 : ChildrenCapacity == 8 ? 2 : ChildrenCapacity == 4 ? 1 : 0; //Remember: All positive indices in the tempnodes array refer to other temp nodes: they are internal references. Encoded references point back to indices in the subtrees list. Debug.Assert(remainingNodesCount == 1); int parent = nodes[nodeIndex].Parent; int indexInParent = nodes[nodeIndex].IndexInParent; var stagingNodeCapacity = maximumSubtrees - 1; var stagingNodes = stackalloc Node[maximumSubtrees - 1]; int stagingNodeCount = 0; float newTreeletCost; var stagingRootIndex = BuildStagingChild(parent, indexInParent, tempNodes, tempNodeCount - 1, collapseCount, stagingNodes, ref stagingNodeCount, out newTreeletCost); Debug.Assert(stagingNodeCount < stagingNodeCapacity); if (newTreeletCost < originalTreeletCost) { //The refinement is an actual improvement. //Apply the staged nodes to real nodes! int nextInternalNodeIndexToUse = 0; ReifyStagingNodes(nodeIndex, stagingNodes, ref subtrees, ref treeletInternalNodes, ref nextInternalNodeIndexToUse, ref spareNodes, out nodesInvalidated); //If any nodes are left over, put them into the spares list for later reuse. for (int i = nextInternalNodeIndexToUse; i < treeletInternalNodes.Count; ++i) { spareNodes.Add(treeletInternalNodes.Elements[i]); } } else { nodesInvalidated = false; } subtrees.Dispose(); treeletInternalNodes.Dispose(); }
private static void ComputeInitialTetrahedron(ref QuickList<Vector3> points, ref QuickList<int> outsidePointCandidates, ref QuickList<int> triangleIndices, out Vector3 centroid) { //Find four points on the hull. //We'll start with using the x axis to identify two points on the hull. int a, b, c, d; Vector3 direction; //Find the extreme points along the x axis. float minimumX = float.MaxValue, maximumX = -float.MaxValue; int minimumXIndex = 0, maximumXIndex = 0; for (int i = 0; i < points.Count; ++i) { var v = points.Elements[i]; if (v.X > maximumX) { maximumX = v.X; maximumXIndex = i; } else if (v.X < minimumX) { minimumX = v.X; minimumXIndex = i; } } a = minimumXIndex; b = maximumXIndex; //Check for redundancies.. if (a == b) throw new ArgumentException("Point set is degenerate; convex hulls must have volume."); //Now, use a second axis perpendicular to the two points we found. Vector3 ab = points.Elements[b] - points.Elements[a]; Vector3x.Cross(ref ab, ref Toolbox.UpVector, out direction); if (direction.LengthSquared() < Toolbox.Epsilon) Vector3x.Cross(ref ab, ref Toolbox.RightVector, out direction); float minimumDot, maximumDot; int minimumIndex, maximumIndex; GetExtremePoints(ref direction, ref points, out maximumDot, out minimumDot, out maximumIndex, out minimumIndex); //Compare the location of the extreme points to the location of the axis. float dot = Vector3.Dot(direction, points.Elements[a]); //Use the point further from the axis. if (Math.Abs(dot - minimumDot) > Math.Abs(dot - maximumDot)) { //In this case, we should use the minimum index. c = minimumIndex; } else { //In this case, we should use the maximum index. c = maximumIndex; } //Check for redundancies.. if (a == c || b == c) throw new ArgumentException("Point set is degenerate; convex hulls must have volume."); //Use a third axis perpendicular to the plane defined by the three unique points a, b, and c. Vector3 ac = points.Elements[c] - points.Elements[a]; Vector3x.Cross(ref ab, ref ac, out direction); GetExtremePoints(ref direction, ref points, out maximumDot, out minimumDot, out maximumIndex, out minimumIndex); //Compare the location of the extreme points to the location of the plane. dot = Vector3.Dot(direction, points.Elements[a]); //Use the point further from the plane. if (Math.Abs(dot - minimumDot) > Math.Abs(dot - maximumDot)) { //In this case, we should use the minimum index. d = minimumIndex; } else { //In this case, we should use the maximum index. d = maximumIndex; } //Check for redundancies.. if (a == d || b == d || c == d) throw new ArgumentException("Point set is degenerate; convex hulls must have volume."); //Add the triangles. triangleIndices.Add(a); triangleIndices.Add(b); triangleIndices.Add(c); triangleIndices.Add(a); triangleIndices.Add(b); triangleIndices.Add(d); triangleIndices.Add(a); triangleIndices.Add(c); triangleIndices.Add(d); triangleIndices.Add(b); triangleIndices.Add(c); triangleIndices.Add(d); //The centroid is guaranteed to be within the convex hull. It will be used to verify the windings of triangles throughout the hull process. centroid = (points.Elements[a] + points.Elements[b] + points.Elements[c] + points.Elements[d]) * 0.25f; for (int i = 0; i < triangleIndices.Count; i += 3) { var vA = points.Elements[triangleIndices.Elements[i]]; var vB = points.Elements[triangleIndices.Elements[i + 1]]; var vC = points.Elements[triangleIndices.Elements[i + 2]]; //Check the signed volume of a parallelepiped with the edges of this triangle and the centroid. Vector3 cross; ab = vB - vA; ac = vC - vA; Vector3x.Cross(ref ac, ref ab, out cross); Vector3 offset = vA - centroid; float volume = Vector3.Dot(offset, cross); //This volume/cross product could also be used to check for degeneracy, but we already tested for that. if (Math.Abs(volume) < Toolbox.BigEpsilon) { throw new ArgumentException("Point set is degenerate; convex hulls must have volume."); } if (volume < 0) { //If the signed volume is negative, that means the triangle's winding is opposite of what we want. //Flip it around! var temp = triangleIndices.Elements[i]; triangleIndices.Elements[i] = triangleIndices.Elements[i + 1]; triangleIndices.Elements[i + 1] = temp; } } //Points which belong to the tetrahedra are guaranteed to be 'in' the convex hull. Do not allow them to be considered. var tetrahedronIndices = new QuickList<int>(BufferPools<int>.Locking); tetrahedronIndices.Add(a); tetrahedronIndices.Add(b); tetrahedronIndices.Add(c); tetrahedronIndices.Add(d); //Sort the indices to allow a linear time loop. Array.Sort(tetrahedronIndices.Elements, 0, 4); int tetrahedronIndex = 0; for (int i = 0; i < points.Count; ++i) { if (tetrahedronIndex < 4 && i == tetrahedronIndices[tetrahedronIndex]) { //Don't add a tetrahedron index. Now that we've found this index, though, move on to the next one. ++tetrahedronIndex; } else { outsidePointCandidates.Add(i); } } tetrahedronIndices.Dispose(); }
private static void MaintainEdge(int a, int b, ref QuickList<int> edges) { bool contained = false; int index = 0; for (int k = 0; k < edges.Count; k += 2) { if ((edges[k] == a && edges[k + 1] == b) || (edges[k] == b && edges[k + 1] == a)) { contained = true; index = k; } } //If it isn't present, add it to the edge list. if (!contained) { edges.Add(a); edges.Add(b); } else { //If it is present, that means both edge-connected triangles were deleted now, so get rid of it. edges.FastRemoveAt(index + 1); edges.FastRemoveAt(index); } }
public bool TryPop(Node* nodes, ref int remainingSubtreeSpace, ref QuickList<int> subtrees, out int index, out float cost) { while (Count > 0) { //Repeatedly pop minimum until you find one that can fit. //Given the unique access nature, the fact that you're destroying the heap when there's not much space left doesn't matter. //In the event that you consume all the nodes, that just means there aren't any entries which would fit in the subtree set anymore. SubtreeHeapEntry entry; Pop(out entry); var node = nodes + entry.Index; //Choose to expand this node, or not. //Only choose to expand if its children will fit. //Any time a node is expanded, the existing node is removed from the set of potential subtrees stored in the priorityQueue. //So, the change in remainingSubtreeSpace = maximumSubtreesCount - (priorityQueue.Count + subtrees.Count) is childCount - 1. //This is ALWAYS the case. var changeInChildCount = node->ChildCount - 1; if (remainingSubtreeSpace >= changeInChildCount && node->RefineFlag == 0) { //Debug.Fail("don't forget to reenable the refine flag condition"); //This node's children can be included successfully in the remaining space. index = entry.Index; cost = entry.Cost; remainingSubtreeSpace -= changeInChildCount; return true; } else { //Either this node's children did not fit, or it was a refinement target. Refinement targets cannot be expanded. //Since we won't be able to find this later, it needs to be added now. //We popped the previous entry off the queue, so the remainingSubtreeSpace does not change by re-adding it. //(remainingSubtreeSpace = maximumSubtreesCount - (priorityQueue.Count + subtrees.Count)) subtrees.Add(entry.Index); } } index = -1; cost = -1; return false; }
private static void RemoveInsidePoints(ref QuickList<Vector3> points, ref QuickList<int> triangleIndices, ref QuickList<int> outsidePoints) { var insidePoints = new QuickList<int>(BufferPools<int>.Locking); //We're going to remove points from this list as we go to prune it down to the truly inner points. insidePoints.AddRange(outsidePoints); outsidePoints.Clear(); for (int i = 0; i < triangleIndices.Count && insidePoints.Count > 0; i += 3) { //Compute the triangle's plane in point-normal representation to test other points against. Vector3 normal; FindNormal(ref triangleIndices, ref points, i, out normal); Vector3 p = points.Elements[triangleIndices.Elements[i]]; for (int j = insidePoints.Count - 1; j >= 0; --j) { //Offset from the triangle to the current point, tested against the normal, determines if the current point is visible //from the triangle face. Vector3 offset = points.Elements[insidePoints.Elements[j]] - p; float dot = Vector3.Dot(offset, normal); //If it's visible, then it's outside! if (dot > 0) { //This point is known to be on the outside; put it on the outside! outsidePoints.Add(insidePoints.Elements[j]); insidePoints.FastRemoveAt(j); } } } insidePoints.Dispose(); }
public static void TestChurnStability() { var allocator = new Allocator(2048); var random = new Random(5); ulong idCounter = 0; var allocatedIds = new QuickList<ulong>(BufferPools<ulong>.Locking); var unallocatedIds = new QuickList<ulong>(BufferPools<ulong>.Locking); for (int i = 0; i < 512; ++i) { long start; var id = idCounter++; //allocator.ValidatePointers(); if (allocator.Allocate(id, 1 + random.Next(5), out start)) { allocatedIds.Add(id); } else { unallocatedIds.Add(id); } //allocator.ValidatePointers(); } for (int timestepIndex = 0; timestepIndex < 100000; ++timestepIndex) { //First add and remove a bunch randomly. for (int i = random.Next(Math.Min(allocatedIds.Count, 15)); i >= 0; --i) { var indexToRemove = random.Next(allocatedIds.Count); //allocator.ValidatePointers(); Assert.IsTrue(allocator.Deallocate(allocatedIds.Elements[indexToRemove])); //allocator.ValidatePointers(); unallocatedIds.Add(allocatedIds.Elements[indexToRemove]); allocatedIds.FastRemoveAt(indexToRemove); } for (int i = random.Next(Math.Min(unallocatedIds.Count, 15)); i >= 0; --i) { var indexToAllocate = random.Next(unallocatedIds.Count); long start; //allocator.ValidatePointers(); if (allocator.Allocate(unallocatedIds.Elements[indexToAllocate], random.Next(3), out start)) { //allocator.ValidatePointers(); allocatedIds.Add(unallocatedIds.Elements[indexToAllocate]); unallocatedIds.FastRemoveAt(indexToAllocate); } //allocator.ValidatePointers(); } //Check to ensure that everything's still coherent. for (int i = 0; i < allocatedIds.Count; ++i) { Assert.IsTrue(allocator.Contains(allocatedIds.Elements[i])); } for (int i = 0; i < unallocatedIds.Count; ++i) { Assert.IsFalse(allocator.Contains(unallocatedIds.Elements[i])); } } //Wind it down. for (int i = 0; i < allocatedIds.Count; ++i) { Assert.IsTrue(allocator.Deallocate(allocatedIds.Elements[i])); } //Confirm cleanup. for (int i = 0; i < allocatedIds.Count; ++i) { Assert.IsFalse(allocator.Contains(allocatedIds.Elements[i])); } for (int i = 0; i < unallocatedIds.Count; ++i) { Assert.IsFalse(allocator.Contains(unallocatedIds.Elements[i])); } }
// TODO: Optimize me! public void GetOverlaps(ref RigidTransform transform, BoundingBox boundingBox, ref QuickList<Vector3i> overlaps) { Vector3 tmin, tmax; RigidTransform.TransformByInverse(ref boundingBox.Min, ref transform, out tmin); RigidTransform.TransformByInverse(ref boundingBox.Max, ref transform, out tmax); BoundingBox b2 = new BoundingBox(Vector3.Min(tmin, tmax), Vector3.Max(tmin, tmax)); var min = new Vector3i { X = Math.Max(0, (int)b2.Min.X), Y = Math.Max(0, (int)b2.Min.Y), Z = Math.Max(0, (int)b2.Min.Z) }; var max = new Vector3i { X = Math.Min(ChunkSize.X - 1, (int)b2.Max.X), Y = Math.Min(ChunkSize.Y - 1, (int)b2.Max.Y), Z = Math.Min(ChunkSize.Z - 1, (int)b2.Max.Z) }; for (int x = min.X; x <= max.X; x++) { for (int y = min.Y; y <= max.Y; y++) { for (int z = min.Z; z <= max.Z; z++) { if (Blocks[BlockIndex(x, y, z)].Material.GetSolidity() == MaterialSolidity.FULLSOLID) { overlaps.Add(new Vector3i { X = x, Y = y, Z = z }); } } } } }
//This works in the general case where there can be any number of contacts and candidates. Could specialize it as an optimization to single-contact added incremental manifolds. ///<summary> /// Reduces the contact manifold to a good subset. ///</summary> ///<param name="contacts">Contacts to reduce.</param> ///<param name="contactCandidates">Contact candidates to include in the reduction process.</param> ///<param name="contactsToRemove">Contacts that need to removed to reach the reduced state.</param> ///<param name="toAdd">Contact candidates that should be added to reach the reduced state.</param> ///<exception cref="InvalidOperationException">Thrown when the set being reduced is empty.</exception> public static void ReduceContacts(RawList<Contact> contacts, ref QuickList<ContactData> contactCandidates, RawList<int> contactsToRemove, ref QuickList<ContactData> toAdd) { //Find the deepest point of all contacts/candidates, as well as a compounded 'normal' vector. float maximumDepth = -float.MaxValue; int deepestIndex = -1; Vector3 normal = Toolbox.ZeroVector; for (int i = 0; i < contacts.Count; i++) { Vector3.Add(ref normal, ref contacts.Elements[i].Normal, out normal); if (contacts.Elements[i].PenetrationDepth > maximumDepth) { deepestIndex = i; maximumDepth = contacts.Elements[i].PenetrationDepth; } } for (int i = 0; i < contactCandidates.Count; i++) { Vector3.Add(ref normal, ref contactCandidates.Elements[i].Normal, out normal); if (contactCandidates.Elements[i].PenetrationDepth > maximumDepth) { deepestIndex = contacts.Count + i; maximumDepth = contactCandidates.Elements[i].PenetrationDepth; } } //If the normals oppose each other, this can happen. It doesn't need to be normalized, but having SOME normal is necessary. if (normal.LengthSquared() < Toolbox.Epsilon) if (contacts.Count > 0) normal = contacts.Elements[0].Normal; else if (contactCandidates.Count > 0) normal = contactCandidates.Elements[0].Normal; //This method is only called when there's too many contacts, so if contacts is empty, the candidates must NOT be empty. else //This method should not have been called at all if it gets here. throw new ArgumentException("Cannot reduce an empty contact set."); //Find the contact (candidate) that is furthest away from the deepest contact (candidate). Vector3 deepestPosition; if (deepestIndex < contacts.Count) deepestPosition = contacts.Elements[deepestIndex].Position; else deepestPosition = contactCandidates.Elements[deepestIndex - contacts.Count].Position; float distanceSquared; float furthestDistance = 0; int furthestIndex = -1; for (int i = 0; i < contacts.Count; i++) { Vector3.DistanceSquared(ref contacts.Elements[i].Position, ref deepestPosition, out distanceSquared); if (distanceSquared > furthestDistance) { furthestDistance = distanceSquared; furthestIndex = i; } } for (int i = 0; i < contactCandidates.Count; i++) { Vector3.DistanceSquared(ref contactCandidates.Elements[i].Position, ref deepestPosition, out distanceSquared); if (distanceSquared > furthestDistance) { furthestDistance = distanceSquared; furthestIndex = contacts.Count + i; } } if (furthestIndex == -1) { //Either this method was called when it shouldn't have been, or all contacts and contact candidates are at the same location. if (contacts.Count > 0) { for (int i = 1; i < contacts.Count; i++) { contactsToRemove.Add(i); } return; } if (contactCandidates.Count > 0) { toAdd.Add(ref contactCandidates.Elements[0]); return; } throw new ArgumentException("Cannot reduce an empty contact set."); } Vector3 furthestPosition; if (furthestIndex < contacts.Count) furthestPosition = contacts.Elements[furthestIndex].Position; else furthestPosition = contactCandidates.Elements[furthestIndex - contacts.Count].Position; Vector3 xAxis; Vector3.Subtract(ref deepestPosition, ref furthestPosition, out xAxis); //Create the second axis of the 2d 'coordinate system' of the manifold. Vector3 yAxis; Vector3.Cross(ref xAxis, ref normal, out yAxis); //Determine the furthest points along the axis. float minYAxisDot = float.MaxValue, maxYAxisDot = -float.MaxValue; int minYAxisIndex = -1, maxYAxisIndex = -1; for (int i = 0; i < contacts.Count; i++) { float dot; Vector3.Dot(ref contacts.Elements[i].Position, ref yAxis, out dot); if (dot < minYAxisDot) { minYAxisIndex = i; minYAxisDot = dot; } if (dot > maxYAxisDot) { maxYAxisIndex = i; maxYAxisDot = dot; } } for (int i = 0; i < contactCandidates.Count; i++) { float dot; Vector3.Dot(ref contactCandidates.Elements[i].Position, ref yAxis, out dot); if (dot < minYAxisDot) { minYAxisIndex = i + contacts.Count; minYAxisDot = dot; } if (dot > maxYAxisDot) { maxYAxisIndex = i + contacts.Count; maxYAxisDot = dot; } } //the deepestIndex, furthestIndex, minYAxisIndex, and maxYAxisIndex are the extremal points. //Cycle through the existing contacts. If any DO NOT MATCH the existing candidates, add them to the toRemove list. //Cycle through the candidates. If any match, add them to the toAdd list. //Repeated entries in the reduced manifold aren't a problem. //-Contacts list does not include repeats with itself. //-A contact is only removed if it doesn't match anything. //-Contact candidates do not repeat with themselves. //-Contact candidates do not repeat with contacts. //-Contact candidates are added if they match any of the indices. for (int i = 0; i < contactCandidates.Count; i++) { int totalIndex = i + contacts.Count; if (totalIndex == deepestIndex || totalIndex == furthestIndex || totalIndex == minYAxisIndex || totalIndex == maxYAxisIndex) { //This contact is present in the new manifold. Add it. toAdd.Add(ref contactCandidates.Elements[i]); } } for (int i = 0; i < contacts.Count; i++) { if (!(i == deepestIndex || i == furthestIndex || i == minYAxisIndex || i == maxYAxisIndex)) { //This contact is not present in the new manifold. Remove it. contactsToRemove.Add(i); } } }
public void GetOverlaps( Vector3 gridPosition, BoundingBox boundingBox, ref QuickList<Int3> overlaps ) { Vector3.Subtract( ref boundingBox.Min, ref gridPosition, out boundingBox.Min ); Vector3.Subtract( ref boundingBox.Max, ref gridPosition, out boundingBox.Max ); var inverseWidth = 1f / CellWidth; var min = new Int3 { X = Math.Max( 0, (uint)( boundingBox.Min.X * inverseWidth ) ), Y = Math.Max( 0, (uint)( boundingBox.Min.Y * inverseWidth ) ), Z = Math.Max( 0, (uint)( boundingBox.Min.Z * inverseWidth ) ) }; var max = new Int3 { X = Math.Min( VoxelSector.ZVOXELBLOCSIZE_X - 1, (uint)( boundingBox.Max.X * inverseWidth ) ), Y = Math.Min( VoxelSector.ZVOXELBLOCSIZE_Y - 1, (uint)( boundingBox.Max.Y * inverseWidth ) ), Z = Math.Min( VoxelSector.ZVOXELBLOCSIZE_Z - 1, (uint)( boundingBox.Max.Z * inverseWidth ) ) }; for( uint i = min.X; i <= max.X; ++i ) { for( uint j = min.Y; j <= max.Y; ++j ) { for( uint k = min.Z; k <= max.Z; ++k ) { uint offset = i * VoxelSector.ZVOXELBLOCSIZE_Y + j + k * VoxelSector.ZVOXELBLOCSIZE_X * VoxelSector.ZVOXELBLOCSIZE_Y; if( Cells[offset] != VoxelShape.Empty ) { overlaps.Add( new Int3 { X = i, Y = j, Z = k } ); } } } } }
protected override void ProcessCandidates(ref QuickList<ContactData> candidates) { //If the candidates list is empty, then let's see if the convex is in the 'thickness' of the terrain. if (candidates.Count == 0 & terrain.thickness > 0) { RayHit rayHit; Ray ray = new Ray { Position = convex.worldTransform.Position, Direction = terrain.worldTransform.LinearTransform.Up }; ray.Direction.Normalize(); //The raycast has to use doublesidedness, since we're casting from the bottom up. if (terrain.Shape.RayCast(ref ray, terrain.thickness, ref terrain.worldTransform, TriangleSidedness.DoubleSided, out rayHit)) { //Found a hit! rayHit.Normal.Normalize(); float dot; Vector3.Dot(ref ray.Direction, ref rayHit.Normal, out dot); var newContact = new ContactData { Normal = rayHit.Normal, Position = convex.worldTransform.Position, Id = 2, PenetrationDepth = -rayHit.T * dot + convex.Shape.MinimumRadius }; newContact.Validate(); bool found = false; for (int i = 0; i < contacts.Count; i++) { if (contacts.Elements[i].Id == 2) { //As set above, an id of 2 corresponds to a contact created from this raycast process. contacts.Elements[i].Normal = newContact.Normal; contacts.Elements[i].Position = newContact.Position; contacts.Elements[i].PenetrationDepth = newContact.PenetrationDepth; supplementData.Elements[i].BasePenetrationDepth = newContact.PenetrationDepth; supplementData.Elements[i].LocalOffsetA = new Vector3(); supplementData.Elements[i].LocalOffsetB = ray.Position; //convex local position in mesh. found = true; break; } } if (!found) candidates.Add(ref newContact); } } }
unsafe void CollectNodesForMultithreadedRefit(int nodeIndex, int multithreadingLeafCountThreshold, ref QuickList<int> refitAndMarkTargets, int refinementLeafCountThreshold, ref QuickList<int> refinementCandidates) { var node = nodes + nodeIndex; var children = &node->ChildA; var leafCounts = &node->LeafCountA; Debug.Assert(node->RefineFlag == 0); for (int i = 0; i < node->ChildCount; ++i) { if (children[i] >= 0) { //Each node stores how many children are involved in the multithreaded refit. //This allows the postphase to climb the tree in a thread safe way. ++node->RefineFlag; if (leafCounts[i] <= multithreadingLeafCountThreshold) { if (leafCounts[i] <= refinementLeafCountThreshold) { //It's possible that a wavefront node is this high in the tree, so it has to be captured here because the postpass won't find it. refinementCandidates.Add(children[i]); //Console.WriteLine("hit@@@@@@@@@@@@@@@@@@@@"); //Encoding the child index tells the thread to use RefitAndMeasure instead of RefitAndMark since this was a wavefront node. refitAndMarkTargets.Add(Encode(children[i])); } else { refitAndMarkTargets.Add(children[i]); } } else { CollectNodesForMultithreadedRefit(children[i], multithreadingLeafCountThreshold, ref refitAndMarkTargets, refinementLeafCountThreshold, ref refinementCandidates); } } } }
public unsafe void BinnedRefine(int nodeIndex, ref QuickList<int> subtreeReferences, int maximumSubtrees, ref QuickList<int> treeletInternalNodes, ref QuickList<int> spareNodes, ref BinnedResources resources, out bool nodesInvalidated) { Debug.Assert(subtreeReferences.Count == 0, "The subtree references list should be empty since it's about to get filled."); Debug.Assert(subtreeReferences.Elements.Length >= maximumSubtrees, "Subtree references list should have a backing array large enough to hold all possible subtrees."); Debug.Assert(treeletInternalNodes.Count == 0, "The treelet internal nodes list should be empty since it's about to get filled."); Debug.Assert(treeletInternalNodes.Elements.Length >= maximumSubtrees - 1, "Internal nodes queue should have a backing array large enough to hold all possible treelet internal nodes."); float originalTreeletCost; CollectSubtrees(nodeIndex, maximumSubtrees, resources.SubtreeHeapEntries, ref subtreeReferences, ref treeletInternalNodes, out originalTreeletCost); Debug.Assert(subtreeReferences.Count <= maximumSubtrees); //CollectSubtreesDirect(nodeIndex, maximumSubtrees, ref subtreeReferences, ref treeletInternalNodes, out originalTreeletCost); //Console.WriteLine($"Number of subtrees: {subtreeReferences.Count}"); //Gather necessary information from nodes. for (int i = 0; i < subtreeReferences.Count; ++i) { resources.IndexMap[i] = i; if (subtreeReferences.Elements[i] >= 0) { //It's an internal node. var subtreeNode = nodes + subtreeReferences.Elements[i]; var parentNode = nodes + subtreeNode->Parent; resources.BoundingBoxes[i] = (&parentNode->A)[subtreeNode->IndexInParent]; resources.Centroids[i] = resources.BoundingBoxes[i].Min + resources.BoundingBoxes[i].Max; resources.LeafCounts[i] = (&parentNode->LeafCountA)[subtreeNode->IndexInParent]; } else { //It's a leaf node. var leaf = leaves + Encode(subtreeReferences.Elements[i]); resources.BoundingBoxes[i] = (&nodes[leaf->NodeIndex].A)[leaf->ChildIndex]; resources.Centroids[i] = resources.BoundingBoxes[i].Min + resources.BoundingBoxes[i].Max; resources.LeafCounts[i] = 1; } } var node = nodes + nodeIndex; int parent = node->Parent; int indexInParent = node->IndexInParent; //Now perform a top-down sweep build. //TODO: this staging creation section is really the only part that is sweep-specific. The rest is common to any other kind of subtree-collection based refinement. //If you end up making others, keep this in mind. int stagingNodeCount = 0; float newTreeletCost; CreateStagingNodeBinned(ref resources, 0, subtreeReferences.Count, ref stagingNodeCount, out newTreeletCost); //Copy the refine flag over from the treelet root so that it persists. resources.StagingNodes[0].RefineFlag = node->RefineFlag; //ValidateStaging(stagingNodes, sweepSubtrees, ref subtreeReferences, parent, indexInParent); if (true)//newTreeletCost < originalTreeletCost) { //The refinement is an actual improvement. //Apply the staged nodes to real nodes! int nextInternalNodeIndexToUse = 0; ReifyStagingNodes(nodeIndex, resources.StagingNodes, ref subtreeReferences, ref treeletInternalNodes, ref nextInternalNodeIndexToUse, ref spareNodes, out nodesInvalidated); //If any nodes are left over, put them into the spares list for later reuse. for (int i = nextInternalNodeIndexToUse; i < treeletInternalNodes.Count; ++i) { spareNodes.Add(treeletInternalNodes.Elements[i]); } } else { nodesInvalidated = false; } }
unsafe void ValidateStaging(Node* stagingNodes, int stagingNodeIndex, ref QuickList<int> subtreeNodePointers, ref QuickList<int> collectedSubtreeReferences, ref QuickList<int> internalReferences, out int foundSubtrees, out int foundLeafCount) { var stagingNode = stagingNodes + stagingNodeIndex; var children = &stagingNode->ChildA; var leafCounts = &stagingNode->LeafCountA; foundSubtrees = foundLeafCount = 0; for (int i = 0; i < stagingNode->ChildCount; ++i) { if (children[i] >= 0) { int childFoundSubtrees, childFoundLeafCount; if (internalReferences.Contains(children[i])) throw new Exception("A child points to an internal node that was visited. Possible loop, or just general invalid."); internalReferences.Add(children[i]); ValidateStaging(stagingNodes, children[i], ref subtreeNodePointers, ref collectedSubtreeReferences, ref internalReferences, out childFoundSubtrees, out childFoundLeafCount); if (childFoundLeafCount != leafCounts[i]) throw new Exception("Bad leaf count."); foundSubtrees += childFoundSubtrees; foundLeafCount += childFoundLeafCount; } else { var subtreeNodePointerIndex = Encode(children[i]); var subtreeNodePointer = subtreeNodePointers.Elements[subtreeNodePointerIndex]; //Rather than looking up the shuffled SweepSubtree for information, just go back to the source. if (subtreeNodePointer >= 0) { var node = nodes + subtreeNodePointer; var totalLeafCount = 0; for (int childIndex = 0; childIndex < node->ChildCount; ++childIndex) { totalLeafCount += (&node->LeafCountA)[childIndex]; } if (leafCounts[i] != totalLeafCount) throw new Exception("bad leaf count."); foundLeafCount += totalLeafCount; } else { var leafIndex = Encode(subtreeNodePointer); if (leafCounts[i] != 1) throw new Exception("bad leaf count."); foundLeafCount += 1; } ++foundSubtrees; collectedSubtreeReferences.Add(subtreeNodePointer); } } }
unsafe float RefitAndMark(int index, int leafCountThreshold, ref QuickList<int> refinementCandidates, ref BoundingBox boundingBox) { Debug.Assert(leafCountThreshold > 1); var node = nodes + index; Debug.Assert(node->ChildCount >= 2); Debug.Assert(node->RefineFlag == 0); float childChange = 0; var premetric = ComputeBoundsMetric(ref boundingBox); //The wavefront of internal nodes is defined by the transition from more than threshold to less than threshold. //Add them to a list of refinement candidates. //Note that leaves are not included, since they can't be refinement candidates. if (node->ChildA >= 0) { if (node->LeafCountA <= leafCountThreshold) { refinementCandidates.Add(node->ChildA); childChange += RefitAndMeasure(node->ChildA, ref node->A); } else { childChange += RefitAndMark(node->ChildA, leafCountThreshold, ref refinementCandidates, ref node->A); } } if (node->ChildB >= 0) { if (node->LeafCountB <= leafCountThreshold) { refinementCandidates.Add(node->ChildB); childChange += RefitAndMeasure(node->ChildB, ref node->B); } else { childChange += RefitAndMark(node->ChildB, leafCountThreshold, ref refinementCandidates, ref node->B); } } BoundingBox.Merge(ref node->A, ref node->B, out boundingBox); #if !NODE2 var bounds = &node->A; var children = &node->ChildA; var leafCounts = &node->LeafCountA; for (int i = 0; i < node->ChildCount; ++i) { if (children[i] >= 0) { if (leafCounts[i] <= leafCountThreshold) { //The wavefront of internal nodes is defined by the transition from more than threshold to less than threshold. //Since we don't traverse into these children, there is no need to check the parent's leaf count. refinementCandidates.Add(children[i]); childChange += RefitAndMeasure(children[i], ref bounds[i]); } else { childChange += RefitAndMark(children[i], leafCountThreshold, ref refinementCandidates, ref bounds[i]); } } BoundingBox.Merge(ref bounds[i], ref boundingBox, out boundingBox); //Leaves are not considered members of the wavefront. They're not *refinement candidates* since they're not internal nodes. } #endif var postmetric = ComputeBoundsMetric(ref boundingBox); return postmetric - premetric + childChange; //TODO: Would clamp provide better results? }
unsafe void ValidateStaging(Node* stagingNodes, ref QuickList<int> subtreeNodePointers, int treeletParent, int treeletIndexInParent) { int foundSubtrees, foundLeafCount; QuickList<int> collectedSubtreeReferences = new QuickList<int>(BufferPools<int>.Thread); QuickList<int> internalReferences = new QuickList<int>(BufferPools<int>.Thread); internalReferences.Add(0); ValidateStaging(stagingNodes, 0, ref subtreeNodePointers, ref collectedSubtreeReferences, ref internalReferences, out foundSubtrees, out foundLeafCount); if (treeletParent < -1 || treeletParent >= nodeCount) throw new Exception("Bad treelet parent."); if (treeletIndexInParent < -1 || (treeletParent >= 0 && treeletIndexInParent >= nodes[treeletParent].ChildCount)) throw new Exception("Bad treelet index in parent."); if (treeletParent >= 0 && (&nodes[treeletParent].LeafCountA)[treeletIndexInParent] != foundLeafCount) { throw new Exception("Bad leaf count."); } if (subtreeNodePointers.Count != foundSubtrees) { throw new Exception("Bad subtree found count."); } for (int i = 0; i < collectedSubtreeReferences.Count; ++i) { if (!subtreeNodePointers.Contains(collectedSubtreeReferences[i]) || !collectedSubtreeReferences.Contains(subtreeNodePointers[i])) throw new Exception("Bad subtree reference."); } collectedSubtreeReferences.Dispose(); internalReferences.Dispose(); }
unsafe float RefitAndMark(int leafCountThreshold, ref QuickList<int> refinementCandidates) { if (nodes->ChildCount < 2) { Debug.Assert(nodes->ChildA < 0, "If there's only one child, it should be a leaf."); //If there's only a leaf (or no children), then there's no internal nodes capable of changing in volume, so there's no relevant change in cost. return 0; } var bounds = &nodes->A; var children = &nodes->ChildA; var leafCounts = &nodes->LeafCountA; float childChange = 0; BoundingBox merged = new BoundingBox { Min = new Vector3(float.MaxValue), Max = new Vector3(float.MinValue) }; for (int i = 0; i < nodes->ChildCount; ++i) { //Note: these conditions mean the root will never be considered a wavefront node. That's acceptable; //it will be included regardless. if (children[i] >= 0) { if (leafCounts[i] <= leafCountThreshold) { //The wavefront of internal nodes is defined by the transition from more than threshold to less than threshold. //Since we don't traverse into these children, there is no need to check the parent's leaf count. refinementCandidates.Add(children[i]); childChange += RefitAndMeasure(children[i], ref bounds[i]); } else { childChange += RefitAndMark(children[i], leafCountThreshold, ref refinementCandidates, ref bounds[i]); } } BoundingBox.Merge(ref bounds[i], ref merged, out merged); } var postmetric = ComputeBoundsMetric(ref merged); //Note that the root's own change is not included. //This cost change is used to determine whether or not to refine. //Since refines are unable to change the volume of the root, there's //no point in including it in the volume change. //It does, however, normalize the child volume changes into a cost metric. if (postmetric >= 1e-10) { return childChange / postmetric; } return 0; }
public unsafe void CollectSubtrees(int nodeIndex, int maximumSubtrees, SubtreeHeapEntry* entries, ref QuickList<int> subtrees, ref QuickList<int> internalNodes, out float treeletCost) { //Collect subtrees iteratively by choosing the highest surface area subtree repeatedly. //This collects every child of a given node at once- the set of subtrees must not include only SOME of the children of a node. //(You could lift this restriction and only take some nodes, but it would complicate things. You could not simply remove //the parent and add its children to go deeper; it would require doing some post-fixup on the results of the construction //or perhaps constraining the generation process to leave room for the unaffected nodes.) var node = nodes + nodeIndex; Debug.Assert(maximumSubtrees >= node->ChildCount, "Can't only consider some of a node's children, but specified maximumSubtrees precludes the treelet root's children."); //All of treelet root's children are included immediately. (Follows from above requirement.) var priorityQueue = new SubtreeBinaryHeap(entries); priorityQueue.Insert(node, nodes, ref subtrees); //Note that the treelet root is NOT added to the internal nodes list. //Note that the treelet root's cost is excluded from the treeletCost. //That's because the treelet root cannot change. treeletCost = 0; int highestIndex; float highestCost; int remainingSubtreeSpace = maximumSubtrees - priorityQueue.Count - subtrees.Count; while (priorityQueue.TryPop(nodes, ref remainingSubtreeSpace, ref subtrees, out highestIndex, out highestCost)) { treeletCost += highestCost; internalNodes.Add(highestIndex); //Add all the children to the set of subtrees. //This is safe because we pre-validated the number of children in the node. var expandedNode = nodes + highestIndex; priorityQueue.Insert(expandedNode, nodes, ref subtrees); } for (int i = 0; i < priorityQueue.Count; ++i) { subtrees.Add(priorityQueue.Entries[i].Index); } //Sort the internal nodes so that the depth first builder will tend to produce less cache-scrambled results. Array.Sort(internalNodes.Elements, 0, internalNodes.Count); }
public unsafe int RefitAndRefine(int frameIndex, float refineAggressivenessScale = 1, float cacheOptimizeAggressivenessScale = 1) { //Don't proceed if the tree is empty. if (leafCount == 0) return 0; var pool = BufferPools<int>.Locking; int maximumSubtrees, estimatedRefinementCandidateCount, leafCountThreshold; GetRefitAndMarkTuning(out maximumSubtrees, out estimatedRefinementCandidateCount, out leafCountThreshold); var refinementCandidates = new QuickList<int>(pool, BufferPool<int>.GetPoolIndex(estimatedRefinementCandidateCount)); //Collect the refinement candidates. var costChange = RefitAndMark(leafCountThreshold, ref refinementCandidates); int targetRefinementCount, period, offset; GetRefineTuning(frameIndex, refinementCandidates.Count, refineAggressivenessScale, costChange, 1, out targetRefinementCount, out period, out offset); var refinementTargets = new QuickList<int>(pool, BufferPool<int>.GetPoolIndex(targetRefinementCount)); int actualRefinementTargetsCount = 0; int index = offset; for (int i = 0; i < targetRefinementCount - 1; ++i) { index += period; if (index >= refinementCandidates.Count) index -= refinementCandidates.Count; Debug.Assert(index < refinementCandidates.Count && index >= 0); refinementTargets.Elements[actualRefinementTargetsCount++] = refinementCandidates.Elements[index]; nodes[refinementCandidates.Elements[index]].RefineFlag = 1; } refinementTargets.Count = actualRefinementTargetsCount; refinementCandidates.Count = 0; refinementCandidates.Dispose(); if (nodes->RefineFlag == 0) { refinementTargets.Add(0); nodes->RefineFlag = 1; ++actualRefinementTargetsCount; } //Refine all marked targets. var spareNodes = new QuickList<int>(pool, 8); var subtreeReferences = new QuickList<int>(pool, BufferPool<int>.GetPoolIndex(maximumSubtrees)); var treeletInternalNodes = new QuickList<int>(pool, BufferPool<int>.GetPoolIndex(maximumSubtrees)); int[] buffer; MemoryRegion region; BinnedResources resources; CreateBinnedResources(pool, maximumSubtrees, out buffer, out region, out resources); for (int i = 0; i < refinementTargets.Count; ++i) { subtreeReferences.Count = 0; treeletInternalNodes.Count = 0; bool nodesInvalidated; BinnedRefine(refinementTargets.Elements[i], ref subtreeReferences, maximumSubtrees, ref treeletInternalNodes, ref spareNodes, ref resources, out nodesInvalidated); //TODO: Should this be moved into a post-loop? It could permit some double work, but that's not terrible. //It's not invalid from a multithreading perspective, either- setting the refine flag to zero is essentially an unlock. //If other threads don't see it updated due to cache issues, it doesn't really matter- it's not a signal or anything like that. nodes[refinementTargets.Elements[i]].RefineFlag = 0; } RemoveUnusedInternalNodes(ref spareNodes); region.Dispose(); pool.GiveBack(buffer); spareNodes.Dispose(); subtreeReferences.Count = 0; subtreeReferences.Dispose(); treeletInternalNodes.Count = 0; treeletInternalNodes.Dispose(); refinementTargets.Count = 0; refinementTargets.Dispose(); var cacheOptimizeCount = GetCacheOptimizeTuning(maximumSubtrees, costChange, cacheOptimizeAggressivenessScale); var startIndex = (int)(((long)frameIndex * cacheOptimizeCount) % nodeCount); //We could wrap around. But we could also not do that because it doesn't really matter! //var startTime = Stopwatch.GetTimestamp() / (double)Stopwatch.Frequency; var end = Math.Min(NodeCount, startIndex + cacheOptimizeCount); for (int i = startIndex; i < end; ++i) { IncrementalCacheOptimize(i); } //var endTime = Stopwatch.GetTimestamp() / (double)Stopwatch.Frequency; //Console.WriteLine($"Cache optimize time: {endTime - startTime}"); return actualRefinementTargetsCount; }
public override void Update(double dt) { RigidTransform transform = new RigidTransform(mesh.Position); RigidTransform convexTransform = convex.WorldTransform; ContactRefresher.ContactRefresh(contacts, supplementData, ref convexTransform, ref transform, contactIndicesToRemove); RemoveQueuedContacts(); var overlaps = new QuickList<Vector3i>(BufferPools<Vector3i>.Thread); mesh.ChunkShape.GetOverlaps(mesh.Position, convex.BoundingBox, ref overlaps); var candidatesToAdd = new QuickList<ContactData>(BufferPools<ContactData>.Thread, BufferPool<int>.GetPoolIndex(overlaps.Count)); for (int i = 0; i < overlaps.Count; i++) { GeneralConvexPairTester manifold; if (!ActivePairs.TryGetValue(overlaps.Elements[i], out manifold)) { manifold = GetPair(ref overlaps.Elements[i]); } else { ActivePairs.FastRemove(overlaps.Elements[i]); } activePairsBackBuffer.Add(overlaps.Elements[i], manifold); ContactData contactCandidate; if (manifold.GenerateContactCandidate(out contactCandidate)) { candidatesToAdd.Add(ref contactCandidate); } } overlaps.Dispose(); for (int i = ActivePairs.Count - 1; i >= 0; i--) { ReturnPair(ActivePairs.Values[i]); ActivePairs.FastRemove(ActivePairs.Keys[i]); } var temp = ActivePairs; ActivePairs = activePairsBackBuffer; activePairsBackBuffer = temp; if (contacts.Count + candidatesToAdd.Count > 4) { var reducedCandidates = new QuickList<ContactData>(BufferPools<ContactData>.Thread, 3); ContactReducer.ReduceContacts(contacts, ref candidatesToAdd, contactIndicesToRemove, ref reducedCandidates); RemoveQueuedContacts(); for (int i = reducedCandidates.Count - 1; i >= 0; i--) { Add(ref reducedCandidates.Elements[i]); reducedCandidates.RemoveAt(i); } reducedCandidates.Dispose(); } else if (candidatesToAdd.Count > 0) { for (int i = 0; i < candidatesToAdd.Count; i++) { Add(ref candidatesToAdd.Elements[i]); } } candidatesToAdd.Dispose(); }
// TODO: Optimize me! public void GetOverlaps(Vector3 gridPosition, BoundingBox boundingBox, ref QuickList<Vector3i> overlaps) { BoundingBox b2 = new BoundingBox(); Vector3.Subtract(ref boundingBox.Min, ref gridPosition, out b2.Min); Vector3.Subtract(ref boundingBox.Max, ref gridPosition, out b2.Max); var min = new Vector3i { X = Math.Max(0, (int)b2.Min.X), Y = Math.Max(0, (int)b2.Min.Y), Z = Math.Max(0, (int)b2.Min.Z) }; var max = new Vector3i { X = Math.Min(CHUNK_SIZE - 1, (int)b2.Max.X), Y = Math.Min(CHUNK_SIZE - 1, (int)b2.Max.Y), Z = Math.Min(CHUNK_SIZE - 1, (int)b2.Max.Z) }; for (int x = min.X; x <= max.X; x++) { for (int y = min.Y; y <= max.Y; y++) { for (int z = min.Z; z <= max.Z; z++) { if (Blocks[BlockIndex(x, y, z)].Material.GetSolidity() == MaterialSolidity.FULLSOLID) { overlaps.Add(new Vector3i { X = x, Y = y, Z = z }); } } } } }