internal Object Pop(UIntPtr markedColor) { VTable.Assert(!this.IsEmpty(), "Queue is empty!"); Object obj = Magic.fromAddress(this.head); VTable.Assert(ThreadHeaderQueue.GcMark(obj) == markedColor); UIntPtr newHead = QueueLink(obj); this.head = newHead; SetQueueField(obj, markedColor); return(obj); }
internal static void MarkIfNecessaryInline(UIntPtr value, Thread thread) { #if !SINGULARITY || CONCURRENT_MS_COLLECTOR UIntPtr marked = markedColor; if (ThreadHeaderQueue.GcMark(Magic.fromAddress(value)) != marked) { VTable.Assert(PageTable.IsMyPage(PageTable.Page(value))); UIntPtr unmarked = unmarkedColor; ThreadHeaderQueue.Push(thread, value, marked, unmarked); } #endif // CONCURRENT_MS_COLLECTOR }
internal static bool MarkIfNecessary(UIntPtr value) { #if !SINGULARITY || CONCURRENT_MS_COLLECTOR if (value == 0) { return(false); } UIntPtr marked = markedColor; if (PageTable.IsGcPage(PageTable.Page(value)) && ThreadHeaderQueue.GcMark(Magic.fromAddress(value)) != marked) { VTable.Assert(PageTable.IsMyPage(PageTable.Page(value))); Thread thread = Thread.CurrentThread; UIntPtr unmarked = unmarkedColor; ThreadHeaderQueue.Push(thread, value, marked, unmarked); return(true); } #endif // CONCURRENT_MS_COLLECTOR return(false); }
private bool StealFrom(ref ThreadHeaderQueue fromQueue, UIntPtr markedColor) { UIntPtr fromHead, fromTail; if (fromQueue.StealList(out fromHead, out fromTail)) { // Prepend the stolen list segment to our list Object tailObject = Magic.fromAddress(fromTail); VTable.Assert(ThreadHeaderQueue.GcMark(tailObject) == markedColor); SetQueueField(tailObject, this.head + markedColor); this.head = fromHead; return(true); } else { return(false); } }
private void StealFromDead(ref ThreadHeaderQueue fromQueue, UIntPtr markedColor) { // It is assumed that there are no concurrent accesses to // fromQueue. The thread owning the queue is supposed to // be dead, and there should only be one other thread // trying to steal the dead thread's queue. if (Thread.VolatileRead(ref fromQueue.head) != Thread.VolatileRead(ref fromQueue.stolenHead)) { UIntPtr fromHead, fromTail; this.ongoingUpdates++; ThreadHeaderQueue.transferAttempt = true; if (fromQueue.StealList(out fromHead, out fromTail)) { // Prepend the stolen list segment to our list Object tailObject = Magic.fromAddress(fromTail); #if ATOMIC_PUSH // NOTE: We don't try to be thread-safe on this.head VTable.Assert(GcMark(tailObject) == markedColor); SetQueueField(tailObject, this.head + markedColor); Thread.VolatileWrite(ref this.head, fromHead); #else // ATOMIC_PUSH // REVIEW: We don't really need LOCK CMPXCHG, but // we do need CMPXCHG (needs to be atomic with // respect to the current processor, only). UIntPtr oldHead; UIntPtr foundHead = Thread.VolatileRead(ref this.head); do { oldHead = foundHead; SetQueueField(tailObject, oldHead + markedColor); foundHead = Interlocked.CompareExchange(ref this.head, fromHead, oldHead); } while (foundHead != oldHead); #endif // ATOMIC_PUSH } this.ongoingUpdates--; } }
// call when the heap is inited internal static void InitLate() { if (fDebug) { VTable.DebugPrint("CoCo: in InitLate\n"); } interlock = new Object(); MultiUseWord.GetMonitor(interlock); MixinThread(Thread.CurrentThread).readyForCoCo = true; // REVIEW: this is just offensive GCFieldOffset = (UIntPtr)Magic.toPointer(ref ThreadHeaderQueue.MixinObject(instance).preHeader.link) - Magic.addressOf(instance); vtableFieldOffset = (UIntPtr)instance.VTableFieldAddr - Magic.addressOf(instance); instance.InitLateStub(); inited = true; }