public unsafe bool Allocate(ulong id, int vertexCount, out int start, out Buffer <Vector3> vertices) { if (TryGetExistingMesh(id, out start, out vertices)) { return(false); } if (allocator.Allocate(id, vertexCount, out var longStart)) { start = (int)longStart; vertices = this.vertices.Slice(start, vertexCount); pendingUploads.Add(new UploadRequest { Start = start, Count = vertexCount }, Pool); return(true); } //Didn't fit. We need to resize. var copyCount = TriangleBuffer.Capacity + vertexCount; var newSize = 1 << SpanHelper.GetContainingPowerOf2(copyCount); Pool.ResizeToAtLeast(ref this.vertices, newSize, copyCount); allocator.Capacity = newSize; allocator.Allocate(id, vertexCount, out longStart); start = (int)longStart; vertices = this.vertices.Slice(start, vertexCount); //A resize forces an upload of everything, so any previous pending uploads are unnecessary. pendingUploads.Count = 0; pendingUploads.Add(new UploadRequest { Start = 0, Count = copyCount }, Pool); return(true); }
unsafe void Allocate <TBodyReferenceGetter>(int constraintHandle, ref int constraintBodyHandles, int bodyCount, Bodies bodies, int typeId, BufferPool pool, TBodyReferenceGetter bodyReferenceGetter, int minimumBodyCapacity, int minimumReferenceCapacity) where TBodyReferenceGetter : struct, IBodyReferenceGetter { var fallbackPool = pool.SpecializeFor <FallbackReference>(); var intPool = pool.SpecializeFor <int>(); var minimumReferencePower = SpanHelper.GetContainingPowerOf2(minimumReferenceCapacity); EnsureCapacity(Math.Max(bodyConstraintReferences.Count + bodyCount, minimumBodyCapacity), pool); for (int i = 0; i < bodyCount; ++i) { var bodyReference = bodyReferenceGetter.GetBodyReference(bodies, Unsafe.Add(ref constraintBodyHandles, i)); var bodyAlreadyListed = bodyConstraintReferences.GetTableIndices(ref bodyReference, out var tableIndex, out var elementIndex); //If an entry for this body does not yet exist, we'll create one. if (!bodyAlreadyListed) { elementIndex = bodyConstraintReferences.Count; } ref var constraintReferences = ref bodyConstraintReferences.Values[elementIndex]; if (!bodyAlreadyListed) { //The body is not already contained. Create a list for it. QuickSet <FallbackReference, Buffer <FallbackReference>, Buffer <int>, FallbackReferenceComparer> .Create(fallbackPool, intPool, minimumReferencePower, 2, out constraintReferences); bodyConstraintReferences.Keys[elementIndex] = bodyReference; bodyConstraintReferences.Table[tableIndex] = elementIndex + 1; ++bodyConstraintReferences.Count; } var fallbackReference = new FallbackReference { ConstraintHandle = constraintHandle, IndexInConstraint = i }; constraintReferences.Add(ref fallbackReference, fallbackPool, intPool); }
void Add(char character) { if (characters.Length == count) Array.Resize(ref characters, SpanHelper.GetContainingPowerOf2(count * 2)); characters[count++] = character; }
public void Resize <TKeyPool, TValuePool, TTablePool>(int newSize, TKeyPool keyPool, TValuePool valuePool, TTablePool tablePool) where TKeyPool : IMemoryPool <TKey, TKeySpan> where TValuePool : IMemoryPool <TValue, TValueSpan> where TTablePool : IMemoryPool <int, TTableSpan> { ResizeForPower(SpanHelper.GetContainingPowerOf2(newSize), keyPool, valuePool, tablePool); }
public unsafe bool Allocate(ulong id, int vertexCount, out int start, out Buffer <Vector3> vertices) { if (allocator.TryGetAllocationRegion(id, out var allocation)) { Debug.Assert(allocation.End - allocation.Start == vertexCount, "If you're trying to allocate room for a bunch of triangles and we found it already, it better match the expected size."); start = (int)allocation.Start; vertices = this.vertices.Slice(start, vertexCount); return(false); } if (allocator.Allocate(id, vertexCount, out var longStart)) { start = (int)longStart; vertices = this.vertices.Slice(start, vertexCount); pendingUploads.Add(new UploadRequest { Start = start, Count = vertexCount }, Pool.SpecializeFor <UploadRequest>()); return(true); } //Didn't fit. We need to resize. var copyCount = TriangleBuffer.Capacity + vertexCount; var newSize = SpanHelper.GetContainingPowerOf2(copyCount); Pool.Resize(ref this.vertices, newSize, copyCount); allocator.Capacity = newSize; allocator.Allocate(id, vertexCount, out longStart); start = (int)longStart; vertices = this.vertices.Slice(start, vertexCount); //A resize forces an upload of everything, so any previous pending uploads are unnecessary. pendingUploads.Count = 0; pendingUploads.Add(new UploadRequest { Start = 0, Count = copyCount }, Pool.SpecializeFor <UploadRequest>()); return(true); }
public SimulationProfiler(int initialStageCount = 8) { QuickDictionary <object, double, Array <object>, Array <double>, Array <int>, ReferenceComparer <object> > .Create( objectPool, doublePool, intPool, SpanHelper.GetContainingPowerOf2(initialStageCount), 3, out stages); QuickDictionary <object, long, Array <object>, Array <long>, Array <int>, ReferenceComparer <object> > .Create( objectPool, longPool, intPool, SpanHelper.GetContainingPowerOf2(initialStageCount), 3, out startTimeStamps); }
public PairCache(BufferPool pool, int minimumMappingSize = 2048, int minimumPendingSize = 128, int minimumPerTypeCapacity = 128) { this.minimumPendingSize = minimumPendingSize; this.minimumPerTypeCapacity = minimumPerTypeCapacity; this.pool = pool; OverlapMapping.Create( pool.SpecializeFor <CollidablePair>(), pool.SpecializeFor <CollidablePairPointers>(), pool.SpecializeFor <int>(), SpanHelper.GetContainingPowerOf2(minimumMappingSize), 3, out Mapping); }
/// <summary> /// Compacts the internal buffer to the minimum size required for the number of elements in the queue. /// </summary> /// <typeparam name="TPool">Type of the pool to pull from if necessary.</typeparam> /// <param name="pool">Pool to pull from if necessary.</param> public void Compact <TPool>(TPool pool) where TPool : IMemoryPool <T, TSpan> { Validate(); var newPoolIndex = SpanHelper.GetContainingPowerOf2(Count); if ((1 << newPoolIndex) != Span.Length) { ResizeForPower(newPoolIndex, pool); } }
public void Resize <TPool, TTablePool>(int newSize, TPool pool, TTablePool tablePool) where TPool : IMemoryPool <T, TSpan> where TTablePool : IMemoryPool <int, TTableSpan> { var oldSpanPower = SpanHelper.GetContainingPowerOf2(Span.Length); var oldTableSpanPower = SpanHelper.GetContainingPowerOf2(Table.Length); var tablePoolOffset = oldTableSpanPower - oldSpanPower; ResizeForPower(SpanHelper.GetContainingPowerOf2(newSize), tablePoolOffset, pool, tablePool); }
public void Resize <TKeyPool, TValuePool, TTablePool>(int newSize, TKeyPool keyPool, TValuePool valuePool, TTablePool tablePool) where TKeyPool : IMemoryPool <TKey, TKeySpan> where TValuePool : IMemoryPool <TValue, TValueSpan> where TTablePool : IMemoryPool <int, TTableSpan> { var oldSpanPower = SpanHelper.GetContainingPowerOf2(Keys.Length); var oldTableSpanPower = SpanHelper.GetContainingPowerOf2(Table.Length); var tablePoolOffset = oldTableSpanPower - oldSpanPower; ResizeForPower(SpanHelper.GetContainingPowerOf2(newSize), tablePoolOffset, keyPool, valuePool, tablePool); }
/// <summary> /// Constructs a ray batcher for the broad phase and initializes its backing resources. /// </summary> /// <param name="pool">Pool to pull resources from.</param> /// <param name="rayTester">Ray tester used to test leaves found by the broad phase tree traversals.</param> /// <param name="batcherRayCapacity">Maximum number of rays to execute in each traversal. /// This should typically be chosen as the highest value which avoids spilling data out of L2 cache.</param> public BroadPhaseRayBatcher(BufferPool pool, BroadPhase broadPhase, TRayTester rayTester, int batcherRayCapacity = 2048) { activeTester = new LeafTester { Leaves = broadPhase.activeLeaves, RayTester = rayTester }; staticTester = new LeafTester { Leaves = broadPhase.staticLeaves, RayTester = rayTester }; this.broadPhase = broadPhase; batcher = new RayBatcher(pool, batcherRayCapacity, Math.Max(8, 2 * SpanHelper.GetContainingPowerOf2(Math.Max(broadPhase.StaticTree.LeafCount, broadPhase.ActiveTree.LeafCount)))); }
/// <summary> /// Shrinks the internal buffers to the smallest acceptable size and releases the old buffers to the pools. /// </summary> /// <param name="element">Element to add.</param> /// <param name="pool">Pool used for element spans.</param> /// <param name="tablePool">Pool used for table spans.</param> /// <typeparam name="TPool">Type of the pool used for element spans.</typeparam> /// <typeparam name="TTablePool">Type of the pool used for table spans.</typeparam> public void Compact <TPool, TTablePool>(TPool pool, TTablePool tablePool) where TPool : IMemoryPool <T, TSpan> where TTablePool : IMemoryPool <int, TTableSpan> { Validate(); var minimumRequiredPoolIndex = SpanHelper.GetContainingPowerOf2(Count); if ((1 << minimumRequiredPoolIndex) != Span.Length) { Resize(Count, pool, tablePool); } }
public TextBuilder Append(string text, int start, int count) { var newCount = this.count + count; if (newCount > characters.Length) Array.Resize(ref characters, SpanHelper.GetContainingPowerOf2(newCount)); int end = start + count; for (int i = start; i < end; ++i) { characters[this.count++] = text[i]; } return this; }
/// <summary> /// Shrinks the internal buffers to the smallest acceptable size and releases the old buffers to the pools. /// </summary> /// <param name="keyPool">Pool used for key spans.</param> /// <param name="valuePool">Pool used for value spans.</param> /// <param name="tablePool">Pool used for table spans.</param> /// <typeparam name="TKeyPool">Type of the pool used for key spans.</typeparam> /// <typeparam name="TValuePool">Type of the pool used for value spans.</typeparam> /// <typeparam name="TTablePool">Type of the pool used for table spans.</typeparam> /// <param name="element">Element to add.</param> public void Compact <TKeyPool, TValuePool, TTablePool>(TKeyPool keyPool, TValuePool valuePool, TTablePool tablePool) where TKeyPool : IMemoryPool <TKey, TKeySpan> where TValuePool : IMemoryPool <TValue, TValueSpan> where TTablePool : IMemoryPool <int, TTableSpan> { Validate(); var minimumRequiredPoolIndex = SpanHelper.GetContainingPowerOf2(Count); if ((1 << minimumRequiredPoolIndex) != Keys.Length) { Resize(Count, keyPool, valuePool, tablePool); } }
public static void Test() { const int elementCount = 65536; const int elementExclusiveUpperBound = 1 << 16; var bufferPool = new BufferPool(); bufferPool.Take <int>(elementCount, out var keys); bufferPool.Take <int>(elementCount, out var indexMap); bufferPool.Take <int>(elementCount, out var keys2); bufferPool.Take <int>(elementCount, out var indexMap2); bufferPool.Take <int>(elementCount, out var keys3); bufferPool.Take <int>(elementCount, out var indexMap3); bufferPool.Take <int>(elementCount, out var keys4); bufferPool.Take <int>(elementCount, out var indexMap4); Random random = new Random(5); for (int iteration = 0; iteration < 4; ++iteration) { for (int i = 0; i < elementCount; ++i) { indexMap[i] = i; //keys[i] = i / (elementCount / elementExclusiveUpperBound); //keys[i] = i % elementExclusiveUpperBound; //keys[i] = i; keys[i] = random.Next(elementExclusiveUpperBound); } keys.CopyTo(0, keys2, 0, elementCount); keys.CopyTo(0, keys3, 0, elementCount); keys.CopyTo(0, keys4, 0, elementCount); indexMap.CopyTo(0, indexMap2, 0, elementCount); indexMap.CopyTo(0, indexMap3, 0, elementCount); indexMap.CopyTo(0, indexMap4, 0, elementCount); var timer = Stopwatch.StartNew(); var keysScratch = new int[elementCount]; var valuesScratch = new int[elementCount]; var bucketCounts = new int[1024]; for (int t = 0; t < 16; ++t) { var comparer = new Comparer(); timer.Restart(); QuickSort.Sort(ref keys[0], ref indexMap[0], 0, elementCount - 1, ref comparer); //QuickSort.Sort2(ref keys[0], ref indexMap[0], 0, elementCount - 1, ref comparer); timer.Stop(); VerifySort(ref keys); Console.WriteLine($"QuickSort time (ms): {timer.Elapsed.TotalSeconds * 1e3}"); //timer.Restart(); //Array.Sort(keys2.Memory, indexMap2.Memory, 0, elementCount); //timer.Stop(); //VerifySort(ref keys2); //Console.WriteLine($"Array.Sort time (ms): {timer.Elapsed.TotalSeconds * 1e3}"); timer.Restart(); Array.Clear(bucketCounts, 0, bucketCounts.Length); LSBRadixSort.SortU16(ref keys3[0], ref indexMap3[0], ref keysScratch[0], ref valuesScratch[0], ref bucketCounts[0], elementCount); timer.Stop(); VerifySort(ref keys3); Console.WriteLine($"{t} LSBRadixSort time (ms): {timer.Elapsed.TotalSeconds * 1e3}"); var originalIndices = new int[256]; timer.Restart(); //MSBRadixSort.SortU32(ref keys4[0], ref indexMap4[0], ref bucketCounts[0], ref originalIndices[0], elementCount, 24); MSBRadixSort.SortU32(ref keys4[0], ref indexMap4[0], elementCount, SpanHelper.GetContainingPowerOf2(elementExclusiveUpperBound)); timer.Stop(); VerifySort(ref keys4); Console.WriteLine($"{t} MSBRadixSort time (ms): {timer.Elapsed.TotalSeconds * 1e3}"); } } bufferPool.Clear(); }
public void Resize <TPool, TTablePool>(int newSize, TPool pool, TTablePool tablePool) where TPool : IMemoryPool <T, TSpan> where TTablePool : IMemoryPool <int, TTableSpan> { ResizeForPower(SpanHelper.GetContainingPowerOf2(newSize), pool, tablePool); }
public static void Test() { void VerifySort <TSpan>(TSpan keys) where TSpan : ISpan <int> { for (int i = 1; i < keys.Length; ++i) { Debug.Assert(keys[i] >= keys[i - 1]); } } const int elementCount = 65536; const int elementExclusiveUpperBound = int.MaxValue;// 1 << 5; var bufferPool = new BufferPool(); var threadDispatcher = new SimpleThreadDispatcher(8); for (int iteration = 0; iteration < 4; ++iteration) { GC.Collect(3, GCCollectionMode.Forced, true); var keys = new Array <int>(new int[elementCount]); var indexMap = new Array <int>(new int[elementCount]); Random random = new Random(5); for (int i = 0; i < elementCount; ++i) { indexMap[i] = i; //keys[i] = i / (elementCount / elementExclusiveUpperBound); //keys[i] = i % elementExclusiveUpperBound; //keys[i] = i; keys[i] = random.Next(elementExclusiveUpperBound); } var keys2 = new Array <int>(new int[elementCount]); var indexMap2 = new Array <int>(new int[elementCount]); var keys3 = new Array <int>(new int[elementCount]); var indexMap3 = new Array <int>(new int[elementCount]); var keys4 = new Array <int>(new int[elementCount]); var indexMap4 = new Array <int>(new int[elementCount]); keys.CopyTo(0, ref keys2, 0, elementCount); keys.CopyTo(0, ref keys3, 0, elementCount); keys.CopyTo(0, ref keys4, 0, elementCount); indexMap.CopyTo(0, ref indexMap2, 0, elementCount); indexMap.CopyTo(0, ref indexMap3, 0, elementCount); indexMap.CopyTo(0, ref indexMap4, 0, elementCount); var timer = Stopwatch.StartNew(); var keysScratch = new int[elementCount]; var valuesScratch = new int[elementCount]; var bucketCounts = new int[1024]; for (int t = 0; t < 16; ++t) { keys2.CopyTo(0, ref keys, 0, elementCount); unsafe { var comparer = new Comparer(); fixed(int *keysPointer = keys.Memory) fixed(int *valuesPointer = indexMap.Memory) { var keysBuffer = new Buffer <int>(keysPointer, keys.Length); var valuesBuffer = new Buffer <int>(valuesPointer, indexMap.Length); timer.Restart(); QuickSort.Sort(ref keysBuffer[0], ref valuesBuffer[0], 0, elementCount - 1, ref comparer); } } //QuickSort.Sort2(ref keys[0], ref indexMap[0], 0, elementCount - 1, ref comparer); timer.Stop(); VerifySort(keys); Console.WriteLine($"QuickSort time (ms): {timer.Elapsed.TotalSeconds * 1e3}"); //keys.CopyTo(0, ref keys2, 0, elementCount); //timer.Restart(); //Array.Sort(keys2.Memory, indexMap2.Memory, 0, elementCount); //timer.Stop(); //VerifySort(keys2); //Console.WriteLine($"Array.Sort time (ms): {timer.Elapsed.TotalSeconds * 1e3}"); keys.CopyTo(0, ref keys3, 0, elementCount); timer.Restart(); Array.Clear(bucketCounts, 0, bucketCounts.Length); LSBRadixSort.SortU16(ref keys3[0], ref indexMap3[0], ref keysScratch[0], ref valuesScratch[0], ref bucketCounts[0], elementCount); timer.Stop(); VerifySort(keys3); Console.WriteLine($"{t} LSBRadixSort time (ms): {timer.Elapsed.TotalSeconds * 1e3}"); keys.CopyTo(0, ref keys4, 0, elementCount); var originalIndices = new int[256]; timer.Restart(); //MSBRadixSort.SortU32(ref keys4[0], ref indexMap4[0], ref bucketCounts[0], ref originalIndices[0], elementCount, 24); MSBRadixSort.SortU32(ref keys4[0], ref indexMap4[0], elementCount, SpanHelper.GetContainingPowerOf2(elementExclusiveUpperBound)); timer.Stop(); VerifySort(keys4); Console.WriteLine($"{t} MSBRadixSort time (ms): {timer.Elapsed.TotalSeconds * 1e3}"); } } }
public static void Test() { //var random = new Random(5); //var comparer = new CollidablePairComparer(); //for (int i = 0; i < 10000; ++i) //{ // var a = new CollidableReference((CollidableMobility)random.Next(3), random.Next(1 << 30)); // var b = new CollidableReference((CollidableMobility)random.Next(3), random.Next(1 << 30)); // var pair1 = new CollidablePair(a, b); // var pair2 = new CollidablePair(b, a); // Debug.Assert(comparer.Hash(ref pair1) == comparer.Hash(ref pair2)); // Debug.Assert(comparer.Equals(ref pair1, ref pair2)); //} //for (int i = 0; i < 10000; ++i) //{ // var a = new CollidableReference((CollidableMobility)random.Next(3), random.Next(1 << 30)); // var b = new CollidableReference((CollidableMobility)random.Next(3), random.Next(1 << 30)); // var pair1 = new CollidablePair(a, b); // CollidablePair pair2; // do // { // var a2 = new CollidableReference((CollidableMobility)random.Next(3), random.Next(1 << 30)); // var b2 = new CollidableReference((CollidableMobility)random.Next(3), random.Next(1 << 30)); // pair2 = new CollidablePair(a2, b2); // } while ( // (pair2.A.Packed == pair1.A.Packed && pair2.B.Packed == pair1.B.Packed) || // (pair2.B.Packed == pair1.A.Packed && pair2.A.Packed == pair1.B.Packed)); // Debug.Assert(!comparer.Equals(ref pair1, ref pair2)); //} const int iterationCount = 1000; const int perLayerCollidableCount = 900; const int layerCount = 10; int[] creationRemap = new int[perLayerCollidableCount * (layerCount - 1)]; int[] lookupRemap = new int[creationRemap.Length]; for (int i = 0; i < creationRemap.Length; ++i) { creationRemap[i] = i; lookupRemap[i] = i; } QuickDictionary <CollidablePair, int, Array <CollidablePair>, Array <int>, Array <int>, CollidablePairComparer> .Create( new PassthroughArrayPool <CollidablePair>(), new PassthroughArrayPool <int>(), new PassthroughArrayPool <int>(), SpanHelper.GetContainingPowerOf2(creationRemap.Length), 1, out var dictionary); var random = new Random(5); for (int i = 0; i < creationRemap.Length - 1; ++i) { { var temp = creationRemap[i]; var swapTarget = random.Next(i + 1, creationRemap.Length); creationRemap[i] = creationRemap[swapTarget]; creationRemap[swapTarget] = temp; } { var temp = lookupRemap[i]; var swapTarget = random.Next(i + 1, lookupRemap.Length); lookupRemap[i] = lookupRemap[swapTarget]; lookupRemap[swapTarget] = temp; } } int accumulator = 0; double totalTime = 0; const int warmupIterations = 128; for (int iterationIndex = 0; iterationIndex < iterationCount + warmupIterations; ++iterationIndex) { dictionary.Clear(); for (int i = 0; i < creationRemap.Length; ++i) { var index = creationRemap[i]; var pair = new CollidablePair { A = new CollidableReference(CollidableMobility.Kinematic, index), B = new CollidableReference(CollidableMobility.Dynamic, index + perLayerCollidableCount) }; dictionary.AddUnsafely(ref pair, ref index); } CacheBlaster.Blast(); //Prewarm the remap into cache to more closely mirror the behavior in the narrow phase. for (int i = 0; i < lookupRemap.Length; ++i) { accumulator += lookupRemap[i]; } var start = Stopwatch.GetTimestamp(); for (int i = 0; i < lookupRemap.Length; ++i) { var collidableIndex = lookupRemap[i]; var pair = new CollidablePair { A = new CollidableReference(CollidableMobility.Kinematic, collidableIndex), B = new CollidableReference(CollidableMobility.Dynamic, collidableIndex + perLayerCollidableCount) }; var dictionaryIndex = dictionary.IndexOf(ref pair); accumulator += dictionaryIndex; } var end = Stopwatch.GetTimestamp(); if (iterationIndex >= warmupIterations) { totalTime += (end - start) / (double)Stopwatch.Frequency; } } Console.WriteLine($"Time per lookup (ns): {1e9 * totalTime / (iterationCount * creationRemap.Length)}, acc{accumulator}"); }