public void Flush(IThreadDispatcher threadDispatcher = null) { var deterministic = threadDispatcher != null && Simulation.Deterministic; OnPreflush(threadDispatcher, deterministic); //var start = Stopwatch.GetTimestamp(); flushJobs = new QuickList <NarrowPhaseFlushJob>(128, Pool); PairCache.PrepareFlushJobs(ref flushJobs); var removalBatchJobCount = ConstraintRemover.CreateFlushJobs(deterministic); //Note that we explicitly add the constraint remover jobs here. //The constraint remover can be used in two ways- sleeper style, and narrow phase style. //In sleeping, we're not actually removing constraints from the simulation completely, so it requires fewer jobs. //The constraint remover just lets you choose which jobs to call. The narrow phase needs all of them. flushJobs.EnsureCapacity(flushJobs.Count + removalBatchJobCount + 4, Pool); flushJobs.AddUnsafely(new NarrowPhaseFlushJob { Type = NarrowPhaseFlushJobType.RemoveConstraintsFromBodyLists }); flushJobs.AddUnsafely(new NarrowPhaseFlushJob { Type = NarrowPhaseFlushJobType.ReturnConstraintHandles }); flushJobs.AddUnsafely(new NarrowPhaseFlushJob { Type = NarrowPhaseFlushJobType.RemoveConstraintFromBatchReferencedHandles }); if (Solver.ActiveSet.Batches.Count > Solver.FallbackBatchThreshold) { flushJobs.AddUnsafely(new NarrowPhaseFlushJob { Type = NarrowPhaseFlushJobType.RemoveConstraintsFromFallbackBatch }); } for (int i = 0; i < removalBatchJobCount; ++i) { flushJobs.AddUnsafely(new NarrowPhaseFlushJob { Type = NarrowPhaseFlushJobType.RemoveConstraintFromTypeBatch, Index = i }); } if (threadDispatcher == null) { for (int i = 0; i < flushJobs.Count; ++i) { ExecuteFlushJob(ref flushJobs[i], Pool); } } else { flushJobIndex = -1; this.threadDispatcher = threadDispatcher; threadDispatcher.DispatchWorkers(flushWorkerLoop); this.threadDispatcher = null; } //var end = Stopwatch.GetTimestamp(); //Console.WriteLine($"Flush stage 3 time (us): {1e6 * (end - start) / Stopwatch.Frequency}"); flushJobs.Dispose(Pool); PairCache.Postflush(); ConstraintRemover.Postflush(); OnPostflush(threadDispatcher); }
ref TypeBatch CreateNewTypeBatch(int typeId, TypeProcessor typeProcessor, int initialCapacity, BufferPool pool) { var newIndex = TypeBatches.Count; TypeBatches.EnsureCapacity(TypeBatches.Count + 1, pool.SpecializeFor <TypeBatch>()); TypeIndexToTypeBatchIndex[typeId] = newIndex; ref var typeBatch = ref TypeBatches.AllocateUnsafely();
internal unsafe TypeBatch *CreateNewTypeBatch(int typeId, TypeProcessor typeProcessor, int initialCapacity, BufferPool pool) { Debug.Assert(typeProcessor != null, "Can't create a type batch for a nonexistent type processor. Did you forget to call Solver.Register<T> for the constraint type?"); var newIndex = TypeBatches.Count; TypeBatches.EnsureCapacity(TypeBatches.Count + 1, pool); TypeIndexToTypeBatchIndex[typeId] = newIndex; ref var typeBatch = ref TypeBatches.AllocateUnsafely();
public void Flush(IThreadDispatcher threadDispatcher = null, bool deterministic = false) { OnPreflush(threadDispatcher, deterministic); //var start = Stopwatch.GetTimestamp(); var jobPool = Pool.SpecializeFor <NarrowPhaseFlushJob>(); QuickList <NarrowPhaseFlushJob, Buffer <NarrowPhaseFlushJob> > .Create(jobPool, 128, out flushJobs); PairCache.PrepareFlushJobs(ref flushJobs); //We indirectly pass the determinism state; it's used by the constraint remover bookkeeping. this.deterministic = deterministic; var removalBatchJobCount = ConstraintRemover.CreateFlushJobs(); //Note that we explicitly add the constraint remover jobs here. //The constraint remover can be used in two ways- deactivation style, and narrow phase style. //In deactivation, we're not actually removing constraints from the simulation completely, so it requires fewer jobs. //The constraint remover just lets you choose which jobs to call. The narrow phase needs all of them. flushJobs.EnsureCapacity(flushJobs.Count + removalBatchJobCount + 3, jobPool); flushJobs.AddUnsafely(new NarrowPhaseFlushJob { Type = NarrowPhaseFlushJobType.RemoveConstraintsFromBodyLists }); flushJobs.AddUnsafely(new NarrowPhaseFlushJob { Type = NarrowPhaseFlushJobType.ReturnConstraintHandles }); flushJobs.AddUnsafely(new NarrowPhaseFlushJob { Type = NarrowPhaseFlushJobType.RemoveConstraintFromBatchReferencedHandles }); for (int i = 0; i < removalBatchJobCount; ++i) { flushJobs.AddUnsafely(new NarrowPhaseFlushJob { Type = NarrowPhaseFlushJobType.RemoveConstraintFromTypeBatch, Index = i }); } if (threadDispatcher == null) { for (int i = 0; i < flushJobs.Count; ++i) { ExecuteFlushJob(ref flushJobs[i], Pool); } } else { flushJobIndex = -1; this.threadDispatcher = threadDispatcher; threadDispatcher.DispatchWorkers(flushWorkerLoop); this.threadDispatcher = null; } //var end = Stopwatch.GetTimestamp(); //Console.WriteLine($"Flush stage 3 time (us): {1e6 * (end - start) / Stopwatch.Frequency}"); flushJobs.Dispose(Pool.SpecializeFor <NarrowPhaseFlushJob>()); PairCache.Postflush(); ConstraintRemover.Postflush(); OnPostflush(threadDispatcher); }
public TextBuilder Append(string text, int start, int count) { characters.EnsureCapacity(characters.Count + text.Length, new PassthroughArrayPool <char>()); int end = start + count; for (int i = start; i < end; ++i) { characters.AddUnsafely(text[i]); } return(this); }
/// <summary> /// Ensures that the underlying id queue can hold at least a certain number of ids. /// </summary> /// <param name="count">Number of elements to preallocate space for in the available ids queue.</param> /// <param name="pool">Pool to pull resized spans from.</param> public void EnsureCapacity <TPool>(int count, TPool pool) where TPool : IMemoryPool <int, TSpan> { if (!AvailableIds.Span.Allocated) { //If this was disposed, we must explicitly rehydrate it. QuickList <int, TSpan> .Create(pool, count, out AvailableIds); } else { AvailableIds.EnsureCapacity(count, pool); } }
public static void TestListResizing(IUnmanagedMemoryPool pool) { Random random = new Random(5); var list = new QuickList <int>(4, pool); List <int> controlList = new List <int>(); for (int iterationIndex = 0; iterationIndex < 100000; ++iterationIndex) { if (random.NextDouble() < 0.7) { list.Add(iterationIndex, pool); controlList.Add(iterationIndex); } if (random.NextDouble() < 0.2) { var indexToRemove = random.Next(list.Count); list.RemoveAt(indexToRemove); controlList.RemoveAt(indexToRemove); } if (iterationIndex % 1000 == 0) { list.EnsureCapacity(list.Count * 3, pool); } else if (iterationIndex % 7777 == 0) { list.Compact(pool); } } Debug.Assert(list.Count == controlList.Count); for (int i = 0; i < list.Count; ++i) { var a = list[i]; var b = controlList[i]; Debug.Assert(a == b); Debug.Assert(list.Count == controlList.Count); } list.Dispose(pool); }
public static void TestListResizing() { Random random = new Random(5); UnsafeBufferPool <int> pool = new UnsafeBufferPool <int>(); QuickList <int> list = new QuickList <int>(pool, 2); List <int> controlList = new List <int>(); for (int iterationIndex = 0; iterationIndex < 100000; ++iterationIndex) { if (random.NextDouble() < 0.7) { list.Add(iterationIndex); controlList.Add(iterationIndex); } if (random.NextDouble() < 0.2) { var indexToRemove = random.Next(list.Count); list.RemoveAt(indexToRemove); controlList.RemoveAt(indexToRemove); } if (iterationIndex % 1000 == 0) { list.EnsureCapacity(list.Count * 3); } else if (iterationIndex % 7777 == 0) { list.Compact(); } } Assert.IsTrue(list.Count == controlList.Count); for (int i = 0; i < list.Count; ++i) { var a = list[i]; var b = controlList[i]; Assert.IsTrue(a == b); Assert.IsTrue(list.Count == controlList.Count); } }
public void CreateJobs(int threadCount, ref QuickList <PreflushJob> jobs, BufferPool pool, int mappingCount) { if (mappingCount > 0) { if (threadCount > 1) { const int jobsPerThread = 2; //TODO: Empirical tune; probably just 1. freshnessJobCount = Math.Min(threadCount * jobsPerThread, mappingCount); var pairsPerJob = mappingCount / freshnessJobCount; var remainder = mappingCount - pairsPerJob * freshnessJobCount; int previousEnd = 0; jobs.EnsureCapacity(jobs.Count + freshnessJobCount, pool); int jobIndex = 0; while (previousEnd < mappingCount) { ref var job = ref jobs.AllocateUnsafely(); job.Type = PreflushJobType.CheckFreshness; job.Start = previousEnd; //The end of every interval except the last one should be aligned on an 8 byte boundary. var pairsInJob = jobIndex < remainder ? pairsPerJob + 1 : pairsPerJob; previousEnd = ((previousEnd + pairsInJob + 7) >> 3) << 3; if (previousEnd > mappingCount) { previousEnd = mappingCount; } job.End = previousEnd; ++jobIndex; } } else { jobs.Add(new PreflushJob { Type = PreflushJobType.CheckFreshness, Start = 0, End = mappingCount }, pool); } }
public static void TestListResizing() { Random random = new Random(5); UnsafeBufferPool<int> pool = new UnsafeBufferPool<int>(); QuickList<int> list = new QuickList<int>(pool, 2); List<int> controlList = new List<int>(); for (int iterationIndex = 0; iterationIndex < 100000; ++iterationIndex) { if (random.NextDouble() < 0.7) { list.Add(iterationIndex); controlList.Add(iterationIndex); } if (random.NextDouble() < 0.2) { var indexToRemove = random.Next(list.Count); list.RemoveAt(indexToRemove); controlList.RemoveAt(indexToRemove); } if (iterationIndex % 1000 == 0) { list.EnsureCapacity(list.Count * 3); } else if (iterationIndex % 7777 == 0) { list.Compact(); } } Assert.IsTrue(list.Count == controlList.Count); for (int i = 0; i < list.Count; ++i) { var a = list[i]; var b = controlList[i]; Assert.IsTrue(a == b); Assert.IsTrue(list.Count == controlList.Count); } }