internal override UIntPtr Visit(Object obj) { setBackupRefcount(obj, UIntPtr.Zero); return(ObjectLayout.ObjectSize(Magic.addressOf(obj), obj.vtable)); }
internal static Object Allocate(VTable vtable) { UIntPtr numBytes = ObjectLayout.ObjectSize(vtable); UIntPtr objectAddr = AllocateBlock(numBytes, vtable.baseAlignment); Object result = Magic.fromAddress(objectAddr); #if REFERENCE_COUNTING_GC uint refState = vtable.isAcyclicRefType ? (ReferenceCountingCollector. acyclicFlagMask | 2) : 2; result.REF_STATE = refState & ~ReferenceCountingCollector.countingONFlagMask; #elif DEFERRED_REFERENCE_COUNTING_GC uint refState = vtable.isAcyclicRefType ? (DeferredReferenceCountingCollector. acyclicFlagMask | DeferredReferenceCountingCollector. markFlagMask) : DeferredReferenceCountingCollector. markFlagMask; result.REF_STATE = refState & ~DeferredReferenceCountingCollector.countingONFlagMask; #endif Barrier.BootstrapInitObject(result, vtable); return(result); }
internal override Object AllocateObject(VTable vtable, Thread currentThread) { return(AllocateObject(vtable, ObjectLayout.ObjectSize(vtable), vtable.baseAlignment, currentThread)); }
internal override unsafe UIntPtr Visit(Object obj) { obj.GcMark(UIntPtr.Zero); VTable vtable = obj.vtable; UIntPtr size = ObjectLayout.ObjectSize(Magic.addressOf(obj), vtable); return(size); }
internal override unsafe UIntPtr Visit(Object obj) { VTable vtable = obj.vtable; RuntimeType rType = vtable.vtableType; VTable.Assert(!MultiUseWord.IsMarked(rType), "@!MultiUseWord.IsMarked(rType)"); return(ObjectLayout.ObjectSize(Magic.addressOf(obj), vtable)); }
internal override unsafe UIntPtr Visit(Object obj) { VTable vtable = obj.vtable; RuntimeType rType = vtable.vtableType; if (MultiUseWord.IsMarked(rType) != this.isVisitedFlag) { this.Count++; MultiUseWord.SetMark(rType, this.isVisitedFlag); } return(ObjectLayout.ObjectSize(Magic.addressOf(obj), vtable)); }
internal override unsafe UIntPtr Visit(Object obj) { VTable vtable = obj.vtable; RuntimeType rType = vtable.vtableType; uint tableIndex = (uint)MultiUseWord.GetValForObject(rType); UIntPtr objAddr = Magic.addressOf(obj); this.accounts[tableIndex].TotalSize += ObjectLayout.ObjectSize(objAddr, vtable); this.accounts[tableIndex].Count++; return(ObjectLayout.ObjectSize(objAddr, vtable)); }
internal override UIntPtr VisitLarge(Object obj) { UIntPtr objectSize = ObjectLayout.ObjectSize(Magic.addressOf(obj), obj.GcUnmarkedVTable); if (!obj.GcMark(UIntPtr.Zero)) { // We did not change the color of the object back // to unmarked, so we are responsible for freeing it. SegregatedFreeList.FreeLarge(obj); } // REVIEW: Should we return a real size here? return(objectSize); }
internal override Object AllocateObject(VTable vtable, Thread currentThread) { Object result = base.AllocateObject(vtable, currentThread); if (GC.IsProfiling) { ProfileAllocation(result); } if (VTable.enableGCProfiling) { ulong size = (ulong)ObjectLayout.ObjectSize(vtable); RegisterNewObject(size); } return(result); }
// BUGBUG: We are allocating an ArrayList while the collector // is running. If the ArrayList gets big enough to be // allocated in the older generation, then the RemSet has the // potential to overflow since the boxed integers will reside // in the young generation. We should eventually eliminate // the use of ArrayList in this class as well as avoid boxing // the page indices. internal unsafe override void Visit(UIntPtr *loc) { UIntPtr addr = *loc; UIntPtr page = PageTable.Page(addr); PageType pageType = PageTable.Type(page); if (!PageTable.IsZombiePage(pageType)) { VTable.Assert(PageTable.IsGcPage(pageType) || PageTable.IsNonGcPage(pageType) || PageTable.IsStackPage(pageType) || PageTable.IsSharedPage(pageType) || VTable.BuildC2Mods, "Semispace:RegisterPinnedReference:1"); return; } PageType gen = PageTable.ZombieToLive(pageType); UIntPtr pinnedObjectAddr = InteriorPtrTable.Find(addr); if (pinnedPageList == null) { pinnedPageList = new ArrayList(); comparer = new UIntPtrComparer(); } Object pinnedObject = Magic.fromAddress(pinnedObjectAddr); UIntPtr objectSize = ObjectLayout.ObjectSize(pinnedObjectAddr, pinnedObject.vtable); UIntPtr beforeObjectAddr = pinnedObjectAddr - PreHeader.Size; UIntPtr pastObjectAddr = beforeObjectAddr + objectSize; UIntPtr firstPage = PageTable.Page(beforeObjectAddr); UIntPtr lastPage = PageTable.Page(pastObjectAddr - 1); for (UIntPtr i = firstPage; i <= lastPage; i++) { if (!pinnedPageList.Contains(i)) { Trace.Log(Trace.Area.Pointer, "RegPin: ptr={0} page={1} gen={2}", __arglist(pinnedObjectAddr, i, gen)); GenerationalCollector.gcPromotedTable[(int)gen - 1] += PageTable.PageSize; pinnedPageList.Add(i); } } }
private static UIntPtr PostPinnedObjectAddr(UIntPtr endPage) { UIntPtr endAddr = PageTable.PageAddr(endPage); UIntPtr postLastObjectAddr = InteriorPtrTable.Last(endPage - 1); if (postLastObjectAddr < endAddr && !BumpAllocator.IsUnusedSpace(postLastObjectAddr)) { // If the next object straddles into the next page, // return the location just past the object Object lastObject = Magic.fromAddress(postLastObjectAddr); UIntPtr lastObjectSize = ObjectLayout.ObjectSize(postLastObjectAddr, lastObject.vtable); postLastObjectAddr += lastObjectSize; } return(postLastObjectAddr - PreHeader.Size); }
// Copied from InteriorPtrTable.cs internal static unsafe UIntPtr ObjectSize(UIntPtr addr) { UIntPtr vtableAddr = Allocator.GetObjectVTable(addr); UIntPtr vtablePage = PageTable.Page(vtableAddr); if (PageTable.IsGcPage(vtablePage)) { // The vtable field is really a forwarding pointer vtableAddr = Allocator.GetObjectVTable(vtableAddr); } else { // Clear the lowest bits, if set vtableAddr &= ~((UIntPtr)3); } VTable vtable = Magic.toVTable(Magic.fromAddress(vtableAddr)); return(ObjectLayout.ObjectSize(addr, vtable)); }
internal override unsafe UIntPtr Visit(Object obj) { VTable vtable = obj.vtable; UIntPtr size = ObjectLayout.ObjectSize(Magic.addressOf(obj), vtable); uint refState = obj.REF_STATE; UIntPtr refCount = (UIntPtr)(refState & RSMasks.refCount); if ((refState & RSMasks.countingFlag) != 0 && refCount > 0) { UIntPtr count = getBackupRefcount(obj); if (count == 0 && obj.GcMark() == UIntPtr.Zero) { bfsMarker.Traverse(obj); } } return(size); }
internal override unsafe UIntPtr Visit(Object obj) { VTable vtable = obj.vtable; RuntimeType rType = vtable.vtableType; if (MultiUseWord.IsMarked(rType) != this.isVisitedFlag) { VTable.Assert(this.tableIndex < this.accounts.Length, @"this.tableIndex < this.accounts.Length"); this.accounts[this.tableIndex].RuntimeTypeObject = rType; this.accounts[this.tableIndex].TotalSize = UIntPtr.Zero; this.accounts[this.tableIndex].Count = 0; MultiUseWord.SetMark(rType, this.isVisitedFlag); this.tableIndex++; } return(ObjectLayout.ObjectSize(Magic.addressOf(obj), vtable)); }
internal override unsafe UIntPtr Visit(Object obj) { VTable vtable = obj.vtable; UIntPtr size = ObjectLayout.ObjectSize(Magic.addressOf(obj), vtable); uint refState = obj.REF_STATE; UIntPtr refCount = (UIntPtr)(refState & RSMasks.refCount); if ((refState & RSMasks.countingFlag) != 0 && refCount > 0) { UIntPtr count = getBackupRefcount(obj); if (count == 0) { UIntPtr dTime = getDfsDiscoveryTime(obj); UIntPtr fTime = getDfsFinishingTime(obj); cycleClosure.Initialize(dTime, fTime); cycleClosure.VisitReferenceFields(obj); } } return(size); }
internal static WriteBarrierCMS MakeEarlyInstance() { // We need a write barrier even if we haven't set up enough of the // memory system to support allocating from bootstrap memory yet. VTable vtable = ((RuntimeType)typeof(WriteBarrierCMS)).classVtable; UIntPtr numBytes = ObjectLayout.ObjectSize(vtable); if (numBytes > (UIntPtr)sizeof(FakeObjectBytes)) { return(null); // too big to allocate in memoryForFakeObject } UIntPtr fakeObjectAddr; fixed(PostHeader *middlePtr = & memoryForFakeObject.postBytes) { fakeObjectAddr = (UIntPtr)middlePtr; } Object result = Magic.fromAddress(fakeObjectAddr); *result.VTableFieldAddr = Magic.addressOf(vtable); return((WriteBarrierCMS)result); }
internal override unsafe UIntPtr Visit(Object obj) { VTable vtable = obj.vtable; UIntPtr size = ObjectLayout.ObjectSize(Magic.addressOf(obj), vtable); uint refState = obj.REF_STATE; UIntPtr refCount = (UIntPtr)(refState & RSMasks.refCount); if ((refState & RSMasks.countingFlag) != 0 && refCount > 0) { // This object is considered live by the // RC collector. UIntPtr count = getBackupRefcount(obj); if (count == 0) { // But it is actually unreachable. this.Size += size; } } return(size); }
// Reference updates and object relocation private unsafe UIntPtr ForwardReferences(PageType generation, out UIntPtr oldAllocPtr) { VTable.Assert(IsValidGeneration((int)generation)); UIntPtr destPage = UIntPtr.Zero; UIntPtr destCursor; UIntPtr destLimit; PageType destGeneration; if (generation < MAX_GENERATION) { destGeneration = generation + 1; } else { destGeneration = MAX_GENERATION; } destCursor = UIntPtr.Zero; destLimit = UIntPtr.Zero; oldAllocPtr = destCursor; UIntPtr runLength = UIntPtr.Zero; for (UIntPtr i = UIntPtr.Zero; i < PageTable.pageTableCount; i++) { if (!IsMyZombiePage(i)) { continue; } UIntPtr deltaBytes = (UIntPtr)0x80000000; UIntPtr sourceCursor = PageTable.PageAddr(i); do { i++; } while (i < PageTable.pageTableCount && IsMyZombiePage(i)); UIntPtr sourceLimit = PageTable.PageAddr(i); while (true) { if (sourceCursor >= sourceLimit) { break; } if (Allocator.IsAlignmentMarkerAddr(sourceCursor)) { sourceCursor += UIntPtr.Size; deltaBytes += UIntPtr.Size; continue; } if (BumpAllocator.IsUnusedMarkerAddr(sourceCursor)) { sourceCursor += UIntPtr.Size; sourceCursor = PageTable.PagePad(sourceCursor); deltaBytes = (UIntPtr)0x80000000; continue; } UIntPtr objectAddr = sourceCursor + PreHeader.Size; UIntPtr vtableOrMarker = Allocator.GetObjectVTable(objectAddr); if (vtableOrMarker == UIntPtr.Zero) { // We found the end of an allocation page sourceCursor = PageTable.PagePad(sourceCursor); deltaBytes = (UIntPtr)0x80000000; continue; } UIntPtr vtableAddr; if ((vtableOrMarker & 1) != 0) { UIntPtr temp = *(UIntPtr *)(vtableOrMarker - 1); while ((temp & 1) != 0) { temp = *(UIntPtr *)(temp - 1); } VTable.Assert(PageTable.IsNonGcPage(PageTable.Type(PageTable.Page(temp)))); vtableAddr = temp; if ((temp & 2) != 0) { // Found pinned object SkipDestinationAreas(ref destPage, destCursor, ref destLimit, sourceCursor); deltaBytes -= (sourceCursor - destCursor); destCursor = sourceCursor; vtableAddr -= 2; // Remove "pinned" bit } Allocator.SetObjectVTable(objectAddr, vtableAddr); } else { vtableAddr = vtableOrMarker; } VTable vtable = Magic.toVTable(Magic.fromAddress(vtableAddr)); UIntPtr objectSize = ObjectLayout.ObjectSize(objectAddr, vtable); VTable.Assert(objectSize > 0); if ((vtableOrMarker & 1) != 0) { if (GenerationalCollector.IsLargeObjectSize (objectSize)) { // Don't move large objects SkipDestinationAreas(ref destPage, destCursor, ref destLimit, sourceCursor); UIntPtr localDelta = sourceCursor - destCursor; deltaBytes -= localDelta; if (deltaBytes == UIntPtr.Zero && runLength != UIntPtr.Zero) { runLength += localDelta; } destCursor = sourceCursor; UIntPtr objLimit = sourceCursor + objectSize; UIntPtr pageEndAddr = PageTable.PagePad(objLimit); objectSize = (pageEndAddr - sourceCursor); } else if (destCursor + objectSize > destLimit) { UIntPtr oldDestCursor = destCursor; FindDestinationArea(ref destPage, ref destCursor, ref destLimit, objectSize, destGeneration); VTable.Assert(destCursor <= sourceCursor); VTable.Assert(destCursor + objectSize <= destLimit); deltaBytes -= (destCursor - oldDestCursor); } else if (vtable.baseAlignment > UIntPtr.Size) { uint alignmentMask = vtable.baseAlignment - 1; int offset = PreHeader.Size + UIntPtr.Size; while (((destCursor + offset) & alignmentMask) != 0) { destCursor += UIntPtr.Size; deltaBytes -= UIntPtr.Size; if (deltaBytes == UIntPtr.Zero && runLength != UIntPtr.Zero) { runLength += UIntPtr.Size; } } } if (runLength == UIntPtr.Zero || deltaBytes != UIntPtr.Zero) { if (runLength != UIntPtr.Zero) { RegisterRelocationEnd(runLength); } RegisterRelocationStart(sourceCursor, destCursor); deltaBytes = UIntPtr.Zero; runLength = UIntPtr.Zero; } UIntPtr newObjectAddr = destCursor + PreHeader.Size; do { UIntPtr *ptrAddr = (UIntPtr *)(vtableOrMarker - 1); vtableOrMarker = *ptrAddr; *ptrAddr = newObjectAddr; } while ((vtableOrMarker & 1) != 0); destCursor += objectSize; runLength += objectSize; } else { deltaBytes += objectSize; if (runLength != UIntPtr.Zero) { RegisterRelocationEnd(runLength); } runLength = UIntPtr.Zero; } sourceCursor += objectSize; } } if (runLength != UIntPtr.Zero) { RegisterRelocationEnd(runLength); } return(destCursor); }
internal override void Uniquify() { // Sort the write buffer. // TODO: Would like sort that is in-place, O(n lg n) worst-case, and // O(n) when nearly-sorted. bool changed = true; for (int max = writeBufferIndex; changed; --max) { changed = false; for (int i = 1; i < max; ++i) { if (writeBuffer[i - 1] > writeBuffer[i]) { changed = true; UIntPtr t = writeBuffer[i - 1]; writeBuffer[i - 1] = writeBuffer[i]; writeBuffer[i] = t; } } } // Remove duplicates int dest = 0; UIntPtr last = UIntPtr.Zero; for (int i = 0; i < writeBufferIndex; i++) { UIntPtr current = *(writeBuffer + i); if (current != last) { VTable.Assert(current > last); *(writeBuffer + dest) = current; dest++; last = current; if ((current & 1) != 0) { // The entire object is marked, skip interior addresses UIntPtr objPtr = current - 1; VTable vtable = Magic.fromAddress(objPtr).vtable; UIntPtr size = ObjectLayout.ObjectSize(objPtr, vtable); VTable.Assert(size > 0); UIntPtr objLimit = objPtr + size; i++; while (i < writeBufferIndex && *(writeBuffer + i) < objLimit) { i++; } i--; } } } writeBufferIndex = dest; // Remove duplicates hiding in the overflow slot in thread objects! for (int i = 0; i < Thread.threadTable.Length; i++) { Thread t = Thread.threadTable[i]; if (t != null) { UIntPtr overflowPtr = MixinThread(t).ssb.overflowValue; if (overflowPtr != UIntPtr.Zero) { int left = 0; int right = writeBufferIndex; while (left < right - 1) { int mid = (left + right) / 2; if (*(writeBuffer + mid) <= overflowPtr) { left = mid; } else { right = mid; } } UIntPtr foundPtr = *(writeBuffer + left); if (overflowPtr == foundPtr) { // Found an exact duplicate MixinThread(t).ssb.overflowValue = UIntPtr.Zero; continue; } else if ((foundPtr & 1) != 0) { UIntPtr objAddr = foundPtr - 1; VTable vtable = Magic.fromAddress(objAddr).vtable; UIntPtr size = ObjectLayout.ObjectSize(objAddr, vtable); if (overflowPtr < objAddr + size) { // found a pointer into a checked object MixinThread(t).ssb.overflowValue = UIntPtr.Zero; } } else if ((overflowPtr & 1) != 0) { UIntPtr objAddr = overflowPtr - 1; VTable vtable = Magic.fromAddress(objAddr).vtable; UIntPtr size = ObjectLayout.ObjectSize(objAddr, vtable); UIntPtr objLimit = objAddr + size; int skipCount = 0; int probe = left + 1; while (probe < writeBufferIndex && *(writeBuffer + probe) < objLimit) { probe++; skipCount++; } if (skipCount > 0) { while (probe < writeBufferIndex) { *(writeBuffer + left) = *(writeBuffer + probe); left++; probe++; } } } } } } }
UIntPtr VisitReferenceFieldsTemplate(ref ObjectDescriptor objDesc) { UIntPtr pointerTracking = objDesc.vtable.pointerTrackingMask; uint objectTag = (pointerTracking & 0xf); UIntPtr size; switch (objectTag) { case ObjectLayout.SPARSE_TAG: { UIntPtr *sparseObject = (UIntPtr *)objDesc.objectBase; size = ObjectLayout.ObjectSize(objDesc.vtable); pointerTracking >>= 4; while (pointerTracking != 0) { uint index = pointerTracking & 0xf; pointerTracking >>= 4; // The cast to int prevents C# from taking the // index * sizeof(UIntPtr) to long: UIntPtr *loc = sparseObject + (int)index; this.Filter(loc, ref objDesc); } break; } case ObjectLayout.DENSE_TAG: { // skip vtable int postHeaderSize = PostHeader.Size; UIntPtr *denseObject = (UIntPtr *) (objDesc.objectBase + postHeaderSize); size = ObjectLayout.ObjectSize(objDesc.vtable); pointerTracking >>= 4; while (pointerTracking != 0) { if ((pointerTracking & ((UIntPtr)0x1)) != 0) { this.Filter(denseObject, ref objDesc); } pointerTracking >>= 1; denseObject++; } break; } case ObjectLayout.PTR_VECTOR_TAG: { int postHeaderSize = PostHeader.Size; uint length = *(uint *)(objDesc.objectBase + postHeaderSize); size = ObjectLayout.ArraySize(objDesc.vtable, length); int preHeaderSize = PreHeader.Size; UIntPtr *elementAddress = (UIntPtr *) (objDesc.objectBase + objDesc.vtable.baseLength - preHeaderSize); for (uint i = 0; i < length; i++, elementAddress++) { this.Filter(elementAddress, ref objDesc); } break; } case ObjectLayout.OTHER_VECTOR_TAG: { int postHeaderSize = PostHeader.Size; uint length = *(uint *)(objDesc.objectBase + postHeaderSize); size = ObjectLayout.ArraySize(objDesc.vtable, length); if (objDesc.vtable.arrayOf == StructuralType.Struct) { // pretend the struct is boxed and account for the // presence of the vtable field VTable elementVTable = objDesc.vtable.arrayElementClass; UIntPtr elementMask = elementVTable.pointerTrackingMask; // A structure with no references will have a SPARSE // descriptor with no offset values. if (elementMask != (UIntPtr)ObjectLayout.SPARSE_TAG) { int preHeaderSize = PreHeader.Size; UIntPtr elementAddress = (objDesc.objectBase + objDesc.vtable.baseLength - preHeaderSize - postHeaderSize); int elementSize = objDesc.vtable.arrayElementSize; objDesc.vtable = elementVTable; for (uint i = 0; i < length; i++) { objDesc.objectBase = elementAddress; this.VisitReferenceFieldsTemplateNoInline(ref objDesc); elementAddress += elementSize; } } } break; } case ObjectLayout.PTR_ARRAY_TAG: { int postHeaderSize = PostHeader.Size; uint length = *(uint *)(objDesc.objectBase + postHeaderSize + sizeof(uint)); size = ObjectLayout.ArraySize(objDesc.vtable, length); int preHeaderSize = PreHeader.Size; UIntPtr *elementAddress = (UIntPtr *) (objDesc.objectBase + objDesc.vtable.baseLength - preHeaderSize); for (uint i = 0; i < length; i++, elementAddress++) { this.Filter(elementAddress, ref objDesc); } break; } case ObjectLayout.OTHER_ARRAY_TAG: { int postHeaderSize = PostHeader.Size; uint length = *(uint *)(objDesc.objectBase + postHeaderSize + sizeof(uint)); size = ObjectLayout.ArraySize(objDesc.vtable, length); if (objDesc.vtable.arrayOf == StructuralType.Struct) { // pretend the struct is boxed and account for the // presence of the PostHeader VTable elementVTable = objDesc.vtable.arrayElementClass; UIntPtr elementMask = elementVTable.pointerTrackingMask; // A structure with no references will have a SPARSE // descriptor with no offset values. if (elementMask != (UIntPtr)ObjectLayout.SPARSE_TAG) { int preHeaderSize = PreHeader.Size; int elementSize = objDesc.vtable.arrayElementSize; UIntPtr elementAddress = objDesc.objectBase + objDesc.vtable.baseLength - preHeaderSize - postHeaderSize; objDesc.vtable = elementVTable; for (uint i = 0; i < length; i++) { objDesc.objectBase = elementAddress; this.VisitReferenceFieldsTemplateNoInline(ref objDesc); elementAddress += elementSize; } } } break; } case ObjectLayout.STRING_TAG: { int postHeaderSize = PostHeader.Size; uint arrayLength = *(uint *)(objDesc.objectBase + postHeaderSize); size = ObjectLayout.StringSize(objDesc.vtable, arrayLength); break; } default: { // escape case VTable.Assert((objectTag & 0x1) == 0, "ReferenceVisitor: (objectTag & 0x1) == 0"); UIntPtr *largeObject = (UIntPtr *)objDesc.objectBase; size = ObjectLayout.ObjectSize(objDesc.vtable); int *pointerDescription = (int *)pointerTracking; int count = *pointerDescription; for (int i = 1; i <= count; i++) { UIntPtr *loc = largeObject + *(pointerDescription + i); this.Filter(loc, ref objDesc); } break; } } return(size); }