void DestroyDown(Thread currentThread, AbstractChannelHandlerContext ctx, bool inEventLoop) { // We have reached at tail; now traverse backwards. AbstractChannelHandlerContext headContext = this.head; while (true) { if (ctx == headContext) { break; } IEventExecutor executor = ctx.Executor; if (inEventLoop || executor.IsInEventLoop(currentThread)) { lock (this) { Remove0(ctx); this.CallHandlerRemoved0(ctx); } } else { executor.Execute((self, c) => ((DefaultChannelPipeline)self).DestroyDown(Thread.CurrentThread, (AbstractChannelHandlerContext)c, true), this, ctx); break; } ctx = ctx.Prev; inEventLoop = false; } }
public void Release <T>(T value) where T : class { Contract.Requires(value == this.Value, "value differs from one backed by this handle."); Stack stack = this.Stack; Thread thread = Thread.CurrentThread; if (stack.Thread == thread) { stack.Push(this); return; } ConditionalWeakTable <Stack, WeakOrderQueue> queueDictionary = DelayedPool.Value; WeakOrderQueue delayedRecycled; if (!queueDictionary.TryGetValue(stack, out delayedRecycled)) { var newQueue = new WeakOrderQueue(stack, thread); delayedRecycled = newQueue; queueDictionary.Add(stack, delayedRecycled); } delayedRecycled.Add(this); }
public static bool Join(this Thread thread, TimeSpan timeout) { long tm = (long)timeout.TotalMilliseconds; Contract.Requires(tm >= 0 && tm <= int.MaxValue); return(thread.Join((int)tm)); }
internal Stack(int maxCapacity, ThreadLocalPool parent, Thread thread) { this.maxCapacity = maxCapacity; this.Parent = parent; this.Thread = thread; this.elements = new Handle[Math.Min(InitialCapacity, maxCapacity)]; }
public static bool Join(this Thread thread, TimeSpan timeout) { long tm = (long)timeout.TotalMilliseconds; if (/*tm < 0 ||*/ (ulong)tm > int.MaxValue) { ThrowHelper.ThrowIndexOutOfRangeException(); } return(thread.Join((int)tm)); }
internal WeakOrderQueue(Stack stack, Thread thread) { Contract.Requires(stack != null); this.ownerThread = new WeakReference <Thread>(thread); this.head = this.tail = new Link(); lock (stack) { this.next = stack.HeadQueue; stack.HeadQueue = this; } }
// TODO: Test if adding padding helps under contention //private long pad0, pad1, pad2, pad3, pad4, pad5, pad6, pad7; internal PoolThreadCache(PoolArena <T> heapArena, int tinyCacheSize, int smallCacheSize, int normalCacheSize, int maxCachedBufferCapacity, int freeSweepAllocationThreshold) { Contract.Requires(maxCachedBufferCapacity >= 0); Contract.Requires(freeSweepAllocationThreshold > 0); this.freeSweepAllocationThreshold = freeSweepAllocationThreshold; this.HeapArena = heapArena; if (heapArena != null) { // Create the caches for the heap allocations this.tinySubPageHeapCaches = CreateSubPageCaches( tinyCacheSize, PoolArena <T> .NumTinySubpagePools, SizeClass.Tiny); this.smallSubPageHeapCaches = CreateSubPageCaches( smallCacheSize, heapArena.NumSmallSubpagePools, SizeClass.Small); this.numShiftsNormalHeap = Log2(heapArena.PageSize); this.normalHeapCaches = CreateNormalCaches( normalCacheSize, maxCachedBufferCapacity, heapArena); heapArena.IncrementNumThreadCaches(); } else { // No heapArea is configured so just null out all caches this.tinySubPageHeapCaches = null; this.smallSubPageHeapCaches = null; this.normalHeapCaches = null; this.numShiftsNormalHeap = -1; } // We only need to watch the thread when any cache is used. if (this.tinySubPageHeapCaches != null || this.smallSubPageHeapCaches != null || this.normalHeapCaches != null) { this.freeTask = this.Free0; this.deathWatchThread = Thread.CurrentThread; // The thread-local cache will keep a list of pooled buffers which must be returned to // the pool when the thread is not alive anymore. ThreadDeathWatcher.Watch(this.deathWatchThread, this.freeTask); } else { this.freeTask = null; this.deathWatchThread = null; } }
void DestroyUp(AbstractChannelHandlerContext ctx, bool inEventLoop) { Thread currentThread = Thread.CurrentThread; AbstractChannelHandlerContext tailContext = this.tail; while (true) { if (ctx == tailContext) { this.DestroyDown(currentThread, tailContext.Prev, inEventLoop); break; } IEventExecutor executor = ctx.Executor; if (!inEventLoop && !executor.IsInEventLoop(currentThread)) { executor.Execute((self, c) => ((DefaultChannelPipeline)self).DestroyUp((AbstractChannelHandlerContext)c, true), this, ctx); break; } ctx = ctx.Next; inEventLoop = false; } }
// TODO: Test if adding padding helps under contention //private long pad0, pad1, pad2, pad3, pad4, pad5, pad6, pad7; internal PoolThreadCache(PoolArena <T> heapArena, PoolArena <T> directArena, int tinyCacheSize, int smallCacheSize, int normalCacheSize, int maxCachedBufferCapacity, int freeSweepAllocationThreshold) { if (maxCachedBufferCapacity < 0) { ThrowHelper.ThrowArgumentException_PositiveOrZero(maxCachedBufferCapacity, ExceptionArgument.maxCachedBufferCapacity); } _freeSweepAllocationThreshold = freeSweepAllocationThreshold; HeapArena = heapArena; DirectArena = directArena; if (directArena is object) { tinySubPageDirectCaches = CreateSubPageCaches( tinyCacheSize, PoolArena <T> .NumTinySubpagePools, SizeClass.Tiny); smallSubPageDirectCaches = CreateSubPageCaches( smallCacheSize, directArena.NumSmallSubpagePools, SizeClass.Small); _numShiftsNormalDirect = Log2(directArena.PageSize); normalDirectCaches = CreateNormalCaches( normalCacheSize, maxCachedBufferCapacity, directArena); directArena.IncrementNumThreadCaches(); } else { // No directArea is configured so just null out all caches tinySubPageDirectCaches = null; smallSubPageDirectCaches = null; normalDirectCaches = null; _numShiftsNormalDirect = -1; } if (heapArena is object) { // Create the caches for the heap allocations tinySubPageHeapCaches = CreateSubPageCaches( tinyCacheSize, PoolArena <T> .NumTinySubpagePools, SizeClass.Tiny); smallSubPageHeapCaches = CreateSubPageCaches( smallCacheSize, heapArena.NumSmallSubpagePools, SizeClass.Small); _numShiftsNormalHeap = Log2(heapArena.PageSize); normalHeapCaches = CreateNormalCaches( normalCacheSize, maxCachedBufferCapacity, heapArena); heapArena.IncrementNumThreadCaches(); } else { // No heapArea is configured so just null out all caches tinySubPageHeapCaches = null; smallSubPageHeapCaches = null; normalHeapCaches = null; _numShiftsNormalHeap = -1; } // We only need to watch the thread when any cache is used. if (tinySubPageDirectCaches is object || smallSubPageDirectCaches is object || normalDirectCaches is object || tinySubPageHeapCaches is object || smallSubPageHeapCaches is object || normalHeapCaches is object) { if (freeSweepAllocationThreshold < 1) { ThrowHelper.ThrowArgumentException_Positive(freeSweepAllocationThreshold, ExceptionArgument.freeSweepAllocationThreshold); } _freeTask = Free0; _deathWatchThread = Thread.CurrentThread; // The thread-local cache will keep a list of pooled buffers which must be returned to // the pool when the thread is not alive anymore. ThreadDeathWatcher.Watch(_deathWatchThread, _freeTask); } else { _freeTask = null; _deathWatchThread = null; } }
public override bool IsInEventLoop(Thread thread) => true;