Пример #1
0
            public void Swap(int[] bodies, int islandCount)
            {
                //Pick a body. Find all bodies of its type.
                //Put them all in the same spot, starting at the body with the lowest index.
                if (bodyIndex >= bodies.Length)
                {
                    bodyIndex = 0;
                }
                var bodyType    = bodies[bodyIndex];
                int lowestIndex = -1;

                QuickList <int, Array <int> > .Create(pool, 128, out var islandBodyIndices);

                for (int i = bodies.Length - 1; i >= 0; --i)
                {
                    if (bodies[i] == bodyType)
                    {
                        lowestIndex = i;
                        islandBodyIndices.Add(i, pool);
                    }
                }
                var baseIndex = Math.Min(bodies.Length - islandBodyIndices.Count, lowestIndex);

                Array.Sort(islandBodyIndices.Span.Memory, 0, islandBodyIndices.Count);
                for (int i = 0; i < islandBodyIndices.Count; ++i)
                {
                    IslandCacheConvergence.Swap(ref bodies[baseIndex + i], ref bodies[islandBodyIndices[i]]);
                }
                bodyIndex += islandBodyIndices.Count;
                islandBodyIndices.Dispose(pool);
            }
Пример #2
0
 public unsafe IslandScaffoldConstraintBatch(Solver solver, BufferPool pool)
 {
     pool.SpecializeFor<int>().Take(solver.TypeProcessors.Length, out TypeIdToIndex);
     Unsafe.InitBlockUnaligned(TypeIdToIndex.Memory, 0xFF, (uint)(TypeIdToIndex.Length * sizeof(int)));
     QuickList<IslandScaffoldTypeBatch, Buffer<IslandScaffoldTypeBatch>>.Create(pool.SpecializeFor<IslandScaffoldTypeBatch>(), solver.TypeProcessors.Length, out TypeBatches);
     ReferencedBodyIndices = new IndexSet(pool, solver.bodies.ActiveSet.Count);
 }
Пример #3
0
        public override void Update(Input input, float dt)
        {
            for (int iterationIndex = 0; iterationIndex < 100; ++iterationIndex)
            {
                QuickList <int, Buffer <int> > .Create(BufferPool.SpecializeFor <int>(), Simulation.Bodies.ActiveSet.Count, out var bodyIndicestoDeactivate);

                for (int i = 0; i < Simulation.Bodies.ActiveSet.Count; ++i)
                {
                    bodyIndicestoDeactivate.AllocateUnsafely() = i;
                }
                Simulation.Sleeper.Sleep(ref bodyIndicestoDeactivate);

                bodyIndicestoDeactivate.Dispose(BufferPool.SpecializeFor <int>());

                QuickList <int, Buffer <int> > .Create(BufferPool.SpecializeFor <int>(), Simulation.Bodies.Sets.Length, out var setsToActivate);

                for (int i = 1; i < Simulation.Bodies.Sets.Length; ++i)
                {
                    if (Simulation.Bodies.Sets[i].Allocated)
                    {
                        setsToActivate.AllocateUnsafely() = i;
                    }
                }

                Simulation.Awakener.AwakenSets(ref setsToActivate);
                setsToActivate.Dispose(BufferPool.SpecializeFor <int>());
            }

            base.Update(input, dt);
        }
Пример #4
0
 public OverlapWorker(int workerIndex, BufferPool pool, NarrowPhase <TCallbacks> narrowPhase)
 {
     Batcher = new CollisionBatcher <CollisionCallbacks>(pool, narrowPhase.Shapes, narrowPhase.CollisionTaskRegistry, narrowPhase.timestepDuration,
                                                         new CollisionCallbacks(workerIndex, pool, narrowPhase));
     PendingConstraints = new PendingConstraintAddCache(pool);
     QuickList <int, Buffer <int> > .Create(pool.SpecializeFor <int>(), 16, out PendingSetAwakenings);
 }
Пример #5
0
        public void Flush(IThreadDispatcher threadDispatcher = null, bool deterministic = false)
        {
            OnPreflush(threadDispatcher, deterministic);
            //var start = Stopwatch.GetTimestamp();
            QuickList <NarrowPhaseFlushJob, Buffer <NarrowPhaseFlushJob> > .Create(Pool.SpecializeFor <NarrowPhaseFlushJob>(), 128, out flushJobs);

            PairCache.PrepareFlushJobs(ref flushJobs);
            //We indirectly pass the determinism state; it's used by the constraint remover bookkeeping.
            this.deterministic = deterministic;
            ConstraintRemover.CreateFlushJobs(ref flushJobs);

            if (threadDispatcher == null)
            {
                for (int i = 0; i < flushJobs.Count; ++i)
                {
                    ExecuteFlushJob(ref flushJobs[i]);
                }
            }
            else
            {
                flushJobIndex = -1;
                threadDispatcher.DispatchWorkers(flushWorkerLoop);
            }
            //var end = Stopwatch.GetTimestamp();
            //Console.WriteLine($"Flush stage 3 time (us): {1e6 * (end - start) / Stopwatch.Frequency}");
            flushJobs.Dispose(Pool.SpecializeFor <NarrowPhaseFlushJob>());

            PairCache.Postflush();
            ConstraintRemover.Postflush();

            OnPostflush(threadDispatcher);
        }
Пример #6
0
 public OverlapWorker(int workerIndex, BufferPool pool, NarrowPhase <TCallbacks> narrowPhase)
 {
     Batcher = new StreamingBatcher(pool, narrowPhase.CollisionTaskRegistry);
     Filters = new BatcherFilters(workerIndex, narrowPhase);
     ConstraintGenerators = new ConstraintGenerators(workerIndex, pool, narrowPhase);
     PendingConstraints   = new PendingConstraintAddCache(pool);
     QuickList <int, Buffer <int> > .Create(pool.SpecializeFor <int>(), 16, out PendingSetAwakenings);
 }
Пример #7
0
        public LineExtractor(ParallelLooper looper, int initialLineCapacity = 8192)
        {
            QuickList <LineInstance, Array <LineInstance> > .Create(new PassthroughArrayPool <LineInstance>(), initialLineCapacity, out lines);

            constraints   = new ConstraintLineExtractor();
            boundingBoxes = new BoundingBoxLineExtractor();
            this.looper   = looper;
        }
Пример #8
0
        public unsafe IslandProtoConstraintBatch(Solver solver, BufferPool pool)
        {
            pool.SpecializeFor <int>().Take(solver.TypeProcessors.Length, out TypeIdToIndex);
            Unsafe.InitBlockUnaligned(TypeIdToIndex.Memory, 0xFF, (uint)(TypeIdToIndex.Length * sizeof(int)));
            QuickList <IslandProtoTypeBatch, Buffer <IslandProtoTypeBatch> > .Create(pool.SpecializeFor <IslandProtoTypeBatch>(), solver.TypeProcessors.Length, out TypeBatches);

            ReferencedBodyHandles = new HandleSet(pool, solver.bodies.HandlePool.HighestPossiblyClaimedId + 1);
        }
Пример #9
0
        public void Flush(IThreadDispatcher threadDispatcher = null, bool deterministic = false)
        {
            OnPreflush(threadDispatcher, deterministic);
            //var start = Stopwatch.GetTimestamp();
            var jobPool = Pool.SpecializeFor <NarrowPhaseFlushJob>();

            QuickList <NarrowPhaseFlushJob, Buffer <NarrowPhaseFlushJob> > .Create(jobPool, 128, out flushJobs);

            PairCache.PrepareFlushJobs(ref flushJobs);
            //We indirectly pass the determinism state; it's used by the constraint remover bookkeeping.
            this.deterministic = deterministic;
            var removalBatchJobCount = ConstraintRemover.CreateFlushJobs();

            //Note that we explicitly add the constraint remover jobs here.
            //The constraint remover can be used in two ways- deactivation style, and narrow phase style.
            //In deactivation, we're not actually removing constraints from the simulation completely, so it requires fewer jobs.
            //The constraint remover just lets you choose which jobs to call. The narrow phase needs all of them.
            flushJobs.EnsureCapacity(flushJobs.Count + removalBatchJobCount + 3, jobPool);
            flushJobs.AddUnsafely(new NarrowPhaseFlushJob {
                Type = NarrowPhaseFlushJobType.RemoveConstraintsFromBodyLists
            });
            flushJobs.AddUnsafely(new NarrowPhaseFlushJob {
                Type = NarrowPhaseFlushJobType.ReturnConstraintHandles
            });
            flushJobs.AddUnsafely(new NarrowPhaseFlushJob {
                Type = NarrowPhaseFlushJobType.RemoveConstraintFromBatchReferencedHandles
            });
            for (int i = 0; i < removalBatchJobCount; ++i)
            {
                flushJobs.AddUnsafely(new NarrowPhaseFlushJob {
                    Type = NarrowPhaseFlushJobType.RemoveConstraintFromTypeBatch, Index = i
                });
            }

            if (threadDispatcher == null)
            {
                for (int i = 0; i < flushJobs.Count; ++i)
                {
                    ExecuteFlushJob(ref flushJobs[i], Pool);
                }
            }
            else
            {
                flushJobIndex         = -1;
                this.threadDispatcher = threadDispatcher;
                threadDispatcher.DispatchWorkers(flushWorkerLoop);
                this.threadDispatcher = null;
            }
            //var end = Stopwatch.GetTimestamp();
            //Console.WriteLine($"Flush stage 3 time (us): {1e6 * (end - start) / Stopwatch.Frequency}");
            flushJobs.Dispose(Pool.SpecializeFor <NarrowPhaseFlushJob>());

            PairCache.Postflush();
            ConstraintRemover.Postflush();

            OnPostflush(threadDispatcher);
        }
Пример #10
0
 public Graph(GraphDescription description, int initialSeriesCapacity = 8)
 {
     Description = description;
     if (initialSeriesCapacity <= 0)
     {
         throw new ArgumentException("Capacity must be positive.");
     }
     QuickList <Series, Array <Series> > .Create(new PassthroughArrayPool <Series>(), initialSeriesCapacity, out graphSeries);
 }
Пример #11
0
        public ShapesExtractor(ParallelLooper looper, int initialCapacityPerShapeType = 1024)
        {
            QuickList <SphereInstance, Array <SphereInstance> > .Create(new PassthroughArrayPool <SphereInstance>(), initialCapacityPerShapeType, out spheres);

            QuickList <CapsuleInstance, Array <CapsuleInstance> > .Create(new PassthroughArrayPool <CapsuleInstance>(), initialCapacityPerShapeType, out capsules);

            QuickList <BoxInstance, Array <BoxInstance> > .Create(new PassthroughArrayPool <BoxInstance>(), initialCapacityPerShapeType, out boxes);

            this.looper = looper;
        }
Пример #12
0
        public static void Test()
        {
            var rawPool = new BufferPool();

            for (int i = 3; i < 7; ++i)
            {
                rawPool.EnsureCapacityForPower(1 << 19, i);
            }
            var bufferPool = rawPool.SpecializeFor <int>();

            var       random    = new Random(5);
            const int listCount = 1000;
            var       lists     = new QuickList <int, Buffer <int> > [listCount];

            for (int i = 0; i < 1000; ++i)
            {
                QuickList <int, Buffer <int> > .Create(bufferPool, 0 + random.Next(9), out lists[i]);

                ref var   list       = ref lists[i];
                const int anchorSize = 128;
                for (int j = 0; j < 1000; ++j)
                {
                    var removeProbability = 0.5f + 0.5f * (list.Count - anchorSize) / anchorSize;
                    var p = random.NextDouble();
                    if (p < removeProbability)
                    {
                        Debug.Assert(list.Count > 0);
                        //Note that adds can invalidate the start.
                        if (p < removeProbability * 0.5)
                        {
                            //Remove an element that is actually present.
                            var toRemoveIndex = random.Next(list.Count);
                            var predicate     = new TestPredicate {
                                ToCompare = list[toRemoveIndex]
                            };
                            var removed = list.FastRemove(ref predicate);
                            Debug.Assert(removed, "If we selected an element from the list, it should be removable.");
                        }
                        else
                        {
                            var toRemove  = -(1 + random.Next(16));
                            var predicate = new TestPredicate {
                                ToCompare = toRemove
                            };
                            var removed = list.FastRemove(ref predicate);
                            Debug.Assert(!removed, "Shouldn't be able to remove things that were never added!");
                        }
                    }
                    else
                    {
                        var toAdd = random.Next(256);
                        list.Add(toAdd, bufferPool);
                    }
                }
            }
Пример #13
0
            public WorkerCache(BufferPool pool, int batchCapacity, int minimumCapacityPerBatch)
            {
                this.pool = pool;
                Debug.Assert(minimumCapacityPerBatch > 0);
                this.minimumCapacityPerBatch = minimumCapacityPerBatch;
                QuickList <TypeBatchIndex, Buffer <TypeBatchIndex> > .Create(pool.SpecializeFor <TypeBatchIndex>(), batchCapacity, out Batches);

                QuickList <QuickList <int, Buffer <int> >, Buffer <QuickList <int, Buffer <int> > > > .Create(pool.SpecializeFor <QuickList <int, Buffer <int> > >(), batchCapacity, out BatchHandles);

                QuickList <RemovalTarget, Buffer <RemovalTarget> > .Create(pool.SpecializeFor <RemovalTarget>(), batchCapacity, out RemovalTargets);
            }
Пример #14
0
        public BoundingBoxLineExtractor()
        {
            QuickList <ThreadJob, Array <ThreadJob> > .Create(new PassthroughArrayPool <ThreadJob>(), Environment.ProcessorCount *jobsPerThread, out jobs);

            //Because we don't know how many lines will be created beforehand, each thread needs a dedicated structure. We'll copy everything together at the end.
            //Not the most efficient thing, but it doesn't matter much.
            for (int i = 0; i < jobs.Span.Length; ++i)
            {
                QuickList <LineInstance, Array <LineInstance> > .Create(new PassthroughArrayPool <LineInstance>(), Environment.ProcessorCount *jobsPerThread, out jobs.Span[i].JobLines);
            }
            workDelegate = Work;
        }
Пример #15
0
 /// <summary>
 /// Ensures that the underlying id queue can hold at least a certain number of ids.
 /// </summary>
 /// <param name="count">Number of elements to preallocate space for in the available ids queue.</param>
 /// <param name="pool">Pool to pull resized spans from.</param>
 public void EnsureCapacity <TPool>(int count, TPool pool) where TPool : IMemoryPool <int, TSpan>
 {
     if (!AvailableIds.Span.Allocated)
     {
         //If this was disposed, we must explicitly rehydrate it.
         QuickList <int, TSpan> .Create(pool, count, out AvailableIds);
     }
     else
     {
         AvailableIds.EnsureCapacity(count, pool);
     }
 }
Пример #16
0
        /// <summary>
        /// Wakes up all bodies and constraints within a set. Doesn't do anything if the set is awake (index zero).
        /// </summary>
        /// <param name="setIndex">Index of the set to awaken.</param>
        public void AwakenSet(int setIndex)
        {
            if (setIndex > 0)
            {
                ValidateSleepingSetIndex(setIndex);
                //TODO: Some fairly pointless work here- spans or other approaches could help with the API.
                QuickList <int, Buffer <int> > .Create(pool.SpecializeFor <int>(), 1, out var list);

                list.AddUnsafely(setIndex);
                AwakenSets(ref list);
                list.Dispose(pool.SpecializeFor <int>());
            }
        }
Пример #17
0
        public SleepingSetBuilder(BufferPool pool, int initialPairCapacity, int initialCapacityPerCache)
        {
            var listPool = pool.SpecializeFor <UntypedList>();

            listPool.Take(PairCache.CollisionConstraintTypeCount, out ConstraintCaches);
            listPool.Take(PairCache.CollisionTypeCount, out CollisionCaches);
            //Original values are used to test for existence; have to clear to avoid undefined values.
            ConstraintCaches.Clear(0, ConstraintCaches.Length);
            CollisionCaches.Clear(0, CollisionCaches.Length);
            QuickList <SleepingPair, Buffer <SleepingPair> > .Create(pool.SpecializeFor <SleepingPair>(), initialPairCapacity, out Pairs);

            InitialCapacityPerCache = initialCapacityPerCache;
        }
Пример #18
0
        public MeshCache(Device device, BufferPool pool, int initialSizeInVertices = 1 << 22)
        {
            Pool = pool;
            pool.Take(initialSizeInVertices, out vertices);
            TriangleBuffer = new StructuredBuffer <Vector3>(device, initialSizeInVertices, "Mesh Cache Vertex Buffer");
            allocator      = new Allocator(initialSizeInVertices);

            QuickList <UploadRequest, Buffer <UploadRequest> > .Create(pool.SpecializeFor <UploadRequest>(), 128, out pendingUploads);

            QuickList <ulong, Buffer <ulong> > .Create(pool.SpecializeFor <ulong>(), 128, out requestedIds);

            QuickSet <ulong, Buffer <ulong>, Buffer <int>, PrimitiveComparer <ulong> > .Create(pool.SpecializeFor <ulong>(), pool.SpecializeFor <int>(), 8, 3, out previouslyAllocatedIds);
        }
Пример #19
0
        /// <summary>
        /// Awakens a list of set indices.
        /// </summary>
        /// <param name="setIndices">List of set indices to wake up.</param>
        /// <param name="threadDispatcher">Thread dispatcher to use when waking the bodies. Pass null to run on a single thread.</param>
        public void AwakenSets(ref QuickList <int, Buffer <int> > setIndices, IThreadDispatcher threadDispatcher = null)
        {
            QuickList <int, Buffer <int> > .Create(pool.SpecializeFor <int>(), setIndices.Count, out var uniqueSetIndices);

            var uniqueSet = new IndexSet(pool, bodies.Sets.Length);

            AccumulateUniqueIndices(ref setIndices, ref uniqueSet, ref uniqueSetIndices);
            uniqueSet.Dispose(pool);

            //Note that we use the same codepath as multithreading, we just don't use a multithreaded dispatch to execute jobs.
            //TODO: It would probably be a good idea to add a little heuristic to avoid doing multithreaded dispatches if there are only like 5 total bodies.
            //Shouldn't matter too much- the threaded variant should only really be used when doing big batched changes, so having a fixed constant cost isn't that bad.
            int threadCount = threadDispatcher == null ? 1 : threadDispatcher.ThreadCount;

            //Note that direct wakes always reset activity states. I suspect this is sufficiently universal that no one will ever want the alternative,
            //even though the narrowphase does avoid resetting activity states for the sake of faster resleeping when possible.
            var(phaseOneJobCount, phaseTwoJobCount) = PrepareJobs(ref uniqueSetIndices, true, threadCount);

            if (threadCount > 1)
            {
                this.jobIndex = -1;
                this.jobCount = phaseOneJobCount;
                threadDispatcher.DispatchWorkers(phaseOneWorkerDelegate);
            }
            else
            {
                for (int i = 0; i < phaseOneJobCount; ++i)
                {
                    ExecutePhaseOneJob(i);
                }
            }

            if (threadCount > 1)
            {
                this.jobIndex = -1;
                this.jobCount = phaseTwoJobCount;
                threadDispatcher.DispatchWorkers(phaseTwoWorkerDelegate);
            }
            else
            {
                for (int i = 0; i < phaseTwoJobCount; ++i)
                {
                    ExecutePhaseTwoJob(i);
                }
            }

            DisposeForCompletedAwakenings(ref uniqueSetIndices);

            uniqueSetIndices.Dispose(pool.SpecializeFor <int>());
        }
Пример #20
0
        public ShapesExtractor(Device device, ParallelLooper looper, BufferPool pool, int initialCapacityPerShapeType = 1024)
        {
            QuickList <SphereInstance, Array <SphereInstance> > .Create(new PassthroughArrayPool <SphereInstance>(), initialCapacityPerShapeType, out spheres);

            QuickList <CapsuleInstance, Array <CapsuleInstance> > .Create(new PassthroughArrayPool <CapsuleInstance>(), initialCapacityPerShapeType, out capsules);

            QuickList <BoxInstance, Array <BoxInstance> > .Create(new PassthroughArrayPool <BoxInstance>(), initialCapacityPerShapeType, out boxes);

            QuickList <TriangleInstance, Array <TriangleInstance> > .Create(new PassthroughArrayPool <TriangleInstance>(), initialCapacityPerShapeType, out triangles);

            QuickList <MeshInstance, Array <MeshInstance> > .Create(new PassthroughArrayPool <MeshInstance>(), initialCapacityPerShapeType, out meshes);

            this.MeshCache = new MeshCache(device, pool);
            this.looper    = looper;
        }
Пример #21
0
        /// <summary>
        /// Resizes the underlying buffer to the smallest size required to hold the given count and the current available id count.
        /// </summary>
        /// <param name="count">Number of elements to guarantee space for in the available ids queue.</param>
        public void Resize <TPool>(int count, TPool pool) where TPool : IMemoryPool <int, TSpan>
        {
            if (!AvailableIds.Span.Allocated)
            {
                //If this was disposed, we must explicitly rehydrate it.
                QuickList <int, TSpan> .Create(pool, count, out AvailableIds);

                return;
            }
            var targetLength = BufferPool <int> .GetLowestContainingElementCount(Math.Max(count, AvailableIds.Count));

            if (AvailableIds.Span.Length != targetLength)
            {
                AvailableIds.Resize(targetLength, pool);
            }
        }
Пример #22
0
        internal int Add(ref BodyDescription bodyDescription, int handle, int minimumConstraintCapacity, BufferPool pool)
        {
            var index = Count;

            if (index == IndexToHandle.Length)
            {
                InternalResize(IndexToHandle.Length * 2, pool);
            }
            ++Count;
            IndexToHandle[index] = handle;
            //Collidable's broad phase index is left unset. The Bodies collection is responsible for attaching that data.
            QuickList <BodyConstraintReference, Buffer <BodyConstraintReference> > .Create(pool.SpecializeFor <BodyConstraintReference>(), minimumConstraintCapacity, out Constraints[index]);

            ApplyDescriptionByIndex(index, ref bodyDescription);
            return(index);
        }
Пример #23
0
        static void FillTrashBuffers(Simulation simulation, Random random)
        {
            var       pool        = simulation.BufferPool.SpecializeFor <int>();
            var       bufferPool  = simulation.BufferPool.SpecializeFor <Buffer <int> >();
            const int bufferCount = 50;

            QuickList <Buffer <int>, Buffer <Buffer <int> > > .Create(bufferPool, bufferCount, out var bufferList);

            for (int trashBufferIndex = 0; trashBufferIndex < bufferCount; ++trashBufferIndex)
            {
                //Pull a buffer from the pool, fill it with trash data, and return it.
                ref var buffer = ref bufferList.AllocateUnsafely();
                pool.Take(1 << random.Next(18), out buffer);
                for (int k = 0; k < buffer.Length; ++k)
                {
                    buffer[k] = random.Next(int.MinValue, int.MaxValue);
                }
            }
        public static void TestListResizing <TSpan, TPool>(TPool pool)
            where TSpan : ISpan <int>
            where TPool : IMemoryPool <int, TSpan>
        {
            Random random = new Random(5);

            QuickList <int, TSpan> .Create(pool, 4, out var list);

            List <int> controlList = new List <int>();

            for (int iterationIndex = 0; iterationIndex < 100000; ++iterationIndex)
            {
                if (random.NextDouble() < 0.7)
                {
                    list.Add(iterationIndex, pool);
                    controlList.Add(iterationIndex);
                }
                if (random.NextDouble() < 0.2)
                {
                    var indexToRemove = random.Next(list.Count);
                    list.RemoveAt(indexToRemove);
                    controlList.RemoveAt(indexToRemove);
                }
                if (iterationIndex % 1000 == 0)
                {
                    list.EnsureCapacity(list.Count * 3, pool);
                }
                else if (iterationIndex % 7777 == 0)
                {
                    list.Compact(pool);
                }
            }

            Debug.Assert(list.Count == controlList.Count);
            for (int i = 0; i < list.Count; ++i)
            {
                var a = list[i];
                var b = controlList[i];
                Debug.Assert(a == b);
                Debug.Assert(list.Count == controlList.Count);
            }

            list.Dispose(pool);
        }
Пример #25
0
        public WorkerPairCache(int workerIndex, BufferPool pool, ref QuickList <int, Buffer <int> > minimumSizesPerConstraintType, ref QuickList <int, Buffer <int> > minimumSizesPerCollisionType,
                               int pendingCapacity, int minimumPerTypeCapacity = 128)
        {
            this.workerIndex            = workerIndex;
            this.pool                   = pool;
            this.minimumPerTypeCapacity = minimumPerTypeCapacity;
            const float previousCountMultiplier = 1.25f;

            pool.SpecializeFor <UntypedList>().Take((int)(minimumSizesPerConstraintType.Count * previousCountMultiplier), out constraintCaches);
            pool.SpecializeFor <UntypedList>().Take((int)(minimumSizesPerCollisionType.Count * previousCountMultiplier), out collisionCaches);
            for (int i = 0; i < minimumSizesPerConstraintType.Count; ++i)
            {
                if (minimumSizesPerConstraintType[i] > 0)
                {
                    constraintCaches[i] = new UntypedList(Math.Max(minimumPerTypeCapacity, (int)(previousCountMultiplier * minimumSizesPerConstraintType[i])), pool);
                }
                else
                {
                    constraintCaches[i] = new UntypedList();
                }
            }
            //Clear out the remainder of slots to avoid invalid data.
            constraintCaches.Clear(minimumSizesPerConstraintType.Count, constraintCaches.Length - minimumSizesPerConstraintType.Count);
            for (int i = 0; i < minimumSizesPerCollisionType.Count; ++i)
            {
                if (minimumSizesPerCollisionType[i] > 0)
                {
                    collisionCaches[i] = new UntypedList(Math.Max(minimumPerTypeCapacity, (int)(previousCountMultiplier * minimumSizesPerCollisionType[i])), pool);
                }
                else
                {
                    collisionCaches[i] = new UntypedList();
                }
            }
            //Clear out the remainder of slots to avoid invalid data.
            collisionCaches.Clear(minimumSizesPerCollisionType.Count, collisionCaches.Length - minimumSizesPerCollisionType.Count);

            QuickList <PendingAdd, Buffer <PendingAdd> > .Create(pool.SpecializeFor <PendingAdd>(), pendingCapacity, out PendingAdds);

            QuickList <CollidablePair, Buffer <CollidablePair> > .Create(pool.SpecializeFor <CollidablePair>(), pendingCapacity, out PendingRemoves);
        }
Пример #26
0
        public Input(Window window)
        {
            this.window             = window.window;
            this.window.KeyDown    += KeyDown;
            this.window.KeyUp      += KeyUp;
            this.window.MouseDown  += MouseDown;
            this.window.MouseUp    += MouseUp;
            this.window.MouseWheel += MouseWheel;
            this.window.KeyPress   += KeyPress;
            var keyPool         = new PassthroughArrayPool <Key>();
            var mouseButtonPool = new PassthroughArrayPool <MouseButton>();
            var intPool         = new PassthroughArrayPool <int>();

            MouseButtonSet.Create(mouseButtonPool, intPool, 3, 3, out anyDownedButtons);
            MouseButtonSet.Create(mouseButtonPool, intPool, 3, 3, out downedButtons);
            MouseButtonSet.Create(mouseButtonPool, intPool, 3, 3, out previousDownedButtons);
            KeySet.Create(keyPool, intPool, 3, 3, out anyDownedKeys);
            KeySet.Create(keyPool, intPool, 3, 3, out downedKeys);
            KeySet.Create(keyPool, intPool, 3, 3, out previousDownedKeys);
            QuickList <char, Array <char> > .Create(new PassthroughArrayPool <char>(), 32, out TypedCharacters);
        }
Пример #27
0
            /// <summary>
            /// Prepares the jobs associated with a self test. Must be called before a dispatch over PairTest.
            /// </summary>
            /// <param name="tree">Tree to test against itself.</param>
            /// <param name="overlapHandlers">Callbacks used to handle individual overlaps detected by the self test.</param>
            /// <param name="threadCount">Number of threads to prepare jobs for.</param>
            public void PrepareJobs(ref Tree tree, TOverlapHandler[] overlapHandlers, int threadCount)
            {
                //If there are not multiple children, there's no need to recurse.
                //This provides a guarantee that there are at least 2 children in each internal node considered by GetOverlapsInNode.
                if (tree.leafCount < 2)
                {
                    //We clear it out to avoid keeping any old job counts. The count property is used for scheduling, so incorrect values could break the job scheduler.
                    jobs = new QuickList <Job, Buffer <Job> >();
                    return;
                }
                Debug.Assert(overlapHandlers.Length >= threadCount);
                const float jobMultiplier  = 1.5f;
                var         targetJobCount = Math.Max(1, jobMultiplier * threadCount);

                leafThreshold = (int)(tree.leafCount / targetJobCount);
                QuickList <Job, Buffer <Job> > .Create(Pool.SpecializeFor <Job>(), (int)(targetJobCount * 2), out jobs);

                NextNodePair         = -1;
                this.OverlapHandlers = overlapHandlers;
                this.Tree            = tree;
                //Collect jobs.
                CollectJobsInNode(0, tree.leafCount, ref OverlapHandlers[0]);
            }
Пример #28
0
        public void Prepare(IThreadDispatcher threadDispatcher = null)
        {
            int maximumConstraintTypeCount = 0, maximumCollisionTypeCount = 0;

            for (int i = 0; i < workerCaches.Count; ++i)
            {
                workerCaches[i].GetMaximumCacheTypeCounts(out var collision, out var constraint);
                if (collision > maximumCollisionTypeCount)
                {
                    maximumCollisionTypeCount = collision;
                }
                if (constraint > maximumConstraintTypeCount)
                {
                    maximumConstraintTypeCount = constraint;
                }
            }
            QuickList <PreallocationSizes, Buffer <PreallocationSizes> > .Create(pool.SpecializeFor <PreallocationSizes>(), maximumConstraintTypeCount, out var minimumSizesPerConstraintType);

            QuickList <PreallocationSizes, Buffer <PreallocationSizes> > .Create(pool.SpecializeFor <PreallocationSizes>(), maximumCollisionTypeCount, out var minimumSizesPerCollisionType);

            //Since the minimum size accumulation builds the minimum size incrementally, bad data within the array can corrupt the result- we must clear it.
            minimumSizesPerConstraintType.Span.Clear(0, minimumSizesPerConstraintType.Span.Length);
            minimumSizesPerCollisionType.Span.Clear(0, minimumSizesPerCollisionType.Span.Length);
            for (int i = 0; i < workerCaches.Count; ++i)
            {
                workerCaches[i].AccumulateMinimumSizes(ref minimumSizesPerConstraintType, ref minimumSizesPerCollisionType);
            }

            var threadCount = threadDispatcher != null ? threadDispatcher.ThreadCount : 1;

            //Ensure that the new worker pair caches can hold all workers.
            if (!NextWorkerCaches.Span.Allocated || NextWorkerCaches.Span.Length < threadCount)
            {
                //The next worker caches should never need to be disposed here. The flush should have taken care of it.
#if DEBUG
                for (int i = 0; i < NextWorkerCaches.Count; ++i)
                {
                    Debug.Assert(NextWorkerCaches[i].Equals(default(WorkerPairCache)));
                }
#endif
                QuickList <WorkerPairCache, Array <WorkerPairCache> > .Create(new PassthroughArrayPool <WorkerPairCache>(), threadCount, out NextWorkerCaches);
            }
            //Note that we have not initialized the workerCaches from the previous frame. In the event that this is the first frame and there are no previous worker caches,
            //there will be no pointers into the caches, and removal analysis loops over the count which defaults to zero- so it's safe.
            NextWorkerCaches.Count = threadCount;

            var pendingSize = Math.Max(minimumPendingSize, previousPendingSize);
            if (threadDispatcher != null)
            {
                for (int i = 0; i < threadCount; ++i)
                {
                    NextWorkerCaches[i] = new WorkerPairCache(i, threadDispatcher.GetThreadMemoryPool(i), ref minimumSizesPerConstraintType, ref minimumSizesPerCollisionType,
                                                              pendingSize, minimumPerTypeCapacity);
                }
            }
            else
            {
                NextWorkerCaches[0] = new WorkerPairCache(0, pool, ref minimumSizesPerConstraintType, ref minimumSizesPerCollisionType, pendingSize, minimumPerTypeCapacity);
            }
            minimumSizesPerConstraintType.Dispose(pool.SpecializeFor <PreallocationSizes>());
            minimumSizesPerCollisionType.Dispose(pool.SpecializeFor <PreallocationSizes>());

            //Create the pair freshness array for the existing overlaps.
            pool.Take(Mapping.Count, out PairFreshness);
            //This clears 1 byte per pair. 32768 pairs with 10GBps assumed single core bandwidth means about 3 microseconds.
            //There is a small chance that multithreading this would be useful in larger simulations- but it would be very, very close.
            PairFreshness.Clear(0, Mapping.Count);
        }
        public BoundingBoxLineExtractor()
        {
            QuickList <ThreadJob, Array <ThreadJob> > .Create(new PassthroughArrayPool <ThreadJob>(), Environment.ProcessorCount *jobsPerThread, out jobs);

            workDelegate = Work;
        }
Пример #30
0
        public static void TestChurnStability()
        {
            var   allocator = new Allocator(2048);
            var   random    = new Random(5);
            ulong idCounter = 0;
            var   pool      = new PassthroughArrayPool <ulong>();

            QuickList <ulong, Array <ulong> > .Create(pool, 8, out var allocatedIds);

            QuickList <ulong, Array <ulong> > .Create(pool, 8, out var unallocatedIds);

            for (int i = 0; i < 512; ++i)
            {
                long start;
                var  id = idCounter++;
                //allocator.ValidatePointers();
                if (allocator.Allocate(id, 1 + random.Next(5), out start))
                {
                    allocatedIds.Add(id, pool);
                }
                else
                {
                    unallocatedIds.Add(id, pool);
                }
                //allocator.ValidatePointers();
            }
            for (int timestepIndex = 0; timestepIndex < 100000; ++timestepIndex)
            {
                //First add and remove a bunch randomly.
                for (int i = random.Next(Math.Min(allocatedIds.Count, 15)); i >= 0; --i)
                {
                    var indexToRemove = random.Next(allocatedIds.Count);
                    //allocator.ValidatePointers();
                    var deallocated = allocator.Deallocate(allocatedIds[indexToRemove]);
                    Debug.Assert(deallocated);
                    //allocator.ValidatePointers();
                    unallocatedIds.Add(allocatedIds[indexToRemove], pool);
                    allocatedIds.FastRemoveAt(indexToRemove);
                }
                for (int i = random.Next(Math.Min(unallocatedIds.Count, 15)); i >= 0; --i)
                {
                    var indexToAllocate = random.Next(unallocatedIds.Count);
                    //allocator.ValidatePointers();
                    if (allocator.Allocate(unallocatedIds[indexToAllocate], random.Next(3), out long start))
                    {
                        //allocator.ValidatePointers();
                        allocatedIds.Add(unallocatedIds[indexToAllocate], pool);
                        unallocatedIds.FastRemoveAt(indexToAllocate);
                    }
                    //allocator.ValidatePointers();
                }
                //Check to ensure that everything's still coherent.
                for (int i = 0; i < allocatedIds.Count; ++i)
                {
                    Debug.Assert(allocator.Contains(allocatedIds[i]));
                }
                for (int i = 0; i < unallocatedIds.Count; ++i)
                {
                    Debug.Assert(!allocator.Contains(unallocatedIds[i]));
                }
            }
            //Wind it down.
            for (int i = 0; i < allocatedIds.Count; ++i)
            {
                var deallocated = allocator.Deallocate(allocatedIds[i]);
                Debug.Assert(deallocated);
            }
            //Confirm cleanup.
            for (int i = 0; i < allocatedIds.Count; ++i)
            {
                Debug.Assert(!allocator.Contains(allocatedIds[i]));
            }
            for (int i = 0; i < unallocatedIds.Count; ++i)
            {
                Debug.Assert(!allocator.Contains(unallocatedIds[i]));
            }
        }