private static UIntPtr PostPinnedObjectAddr(UIntPtr endPage) { UIntPtr endAddr = PageTable.PageAddr(endPage); UIntPtr postLastObjectAddr = InteriorPtrTable.Last(endPage - 1); if (postLastObjectAddr < endAddr && !BumpAllocator.IsUnusedSpace(postLastObjectAddr)) { // If the next object straddles into the next page, // return the location just past the object Object lastObject = Magic.fromAddress(postLastObjectAddr); UIntPtr lastObjectSize = ObjectLayout.ObjectSize(postLastObjectAddr, lastObject.vtable); postLastObjectAddr += lastObjectSize; } return(postLastObjectAddr - PreHeader.Size); }
public static unsafe void Initialize() { maxEntries = 1 << 16; VTable UIntPtrArrayVtable = ((RuntimeType)typeof(UIntPtr[])).classVtable; tableSize = ObjectLayout.ArraySize(UIntPtrArrayVtable, maxEntries); // Allocate a pool for ZCT BumpAllocator entryPool = new BumpAllocator(PageType.NonGC); UIntPtr memStart = MemoryManager.AllocateMemory(tableSize); entryPool.SetZeroedRange(memStart, tableSize); PageManager.SetStaticDataPages(memStart, tableSize); // Initialize ZCT zeroCountTable = (UIntPtr[]) DeferredReferenceCountingCollector. AllocateArray(ref entryPool, UIntPtrArrayVtable, tableSize); VTable.Assert(zeroCountTable != null, @"zeroCountTable != null"); *(uint *)(Magic.addressOf(zeroCountTable) + PostHeader.Size) = maxEntries; VTable.Assert(zeroCountTable.Length == maxEntries, @"zeroCountTable.Length == maxEntries"); // Build ZCT freeEntries list freeHead = 1; for (uint i = 1; i < maxEntries - 1; i++) { zeroCountTable[i] = (UIntPtr)(((i + 1) << 2) | 0x01); } zeroCountTable[maxEntries - 1] = (UIntPtr)0x01; zctGarbagePicker = (ZCTGarbagePicker)BootstrapMemory. Allocate(typeof(ZCTGarbagePicker)); }
// Copied from InteriorPtrTable.cs internal static unsafe UIntPtr ObjectSize(UIntPtr addr) { UIntPtr vtableAddr = Allocator.GetObjectVTable(addr); UIntPtr vtablePage = PageTable.Page(vtableAddr); if (PageTable.IsGcPage(vtablePage)) { // The vtable field is really a forwarding pointer vtableAddr = Allocator.GetObjectVTable(vtableAddr); } else { // Clear the lowest bits, if set vtableAddr &= ~((UIntPtr)3); } VTable vtable = Magic.toVTable(Magic.fromAddress(vtableAddr)); return(ObjectLayout.ObjectSize(addr, vtable)); }
internal override unsafe UIntPtr Visit(Object obj) { VTable vtable = obj.vtable; UIntPtr size = ObjectLayout.ObjectSize(Magic.addressOf(obj), vtable); uint refState = obj.REF_STATE; UIntPtr refCount = (UIntPtr)(refState & RSMasks.refCount); if ((refState & RSMasks.countingFlag) != 0 && refCount > 0) { UIntPtr count = getBackupRefcount(obj); if (count == 0 && obj.GcMark() == UIntPtr.Zero) { bfsMarker.Traverse(obj); } } return(size); }
internal override bool TagObjectForCopy(Object from, Object to, out UIntPtr spaceOverhead) { TagNode tn = new TagNode(); spaceOverhead = ObjectLayout.Sizeof(tn); tn.from = from; tn.next = tagHead; // prepare to-space object UIntPtr begin = UIntPtr.Zero - (UIntPtr)PreHeader.Size; UIntPtr end = ((ObjectLayout.Sizeof(from) + sizeof(UIntPtr) - 1) & ~((UIntPtr)sizeof(UIntPtr) - 1)) - PreHeader.Size; for (UIntPtr offset = begin; offset != end; offset += sizeof(UIntPtr)) { if (!IgnoreOffset(offset)) { *(UIntPtr *)(Magic.addressOf(to) + offset) = Alpha; } } // now we can tag from-space if (CASCoCoWord(from, Tagged(Magic.addressOf(to)), Simple())) { tagHead = tn; return(true); } else { return(false); } }
internal override unsafe UIntPtr Visit(Object obj) { VTable vtable = obj.vtable; RuntimeType rType = vtable.vtableType; if (MultiUseWord.IsMarked(rType) != this.isVisitedFlag) { VTable.Assert(this.tableIndex < this.accounts.Length, @"this.tableIndex < this.accounts.Length"); this.accounts[this.tableIndex].RuntimeTypeObject = rType; this.accounts[this.tableIndex].TotalSize = UIntPtr.Zero; this.accounts[this.tableIndex].Count = 0; MultiUseWord.SetMark(rType, this.isVisitedFlag); this.tableIndex++; } return(ObjectLayout.ObjectSize(Magic.addressOf(obj), vtable)); }
internal static WriteBarrierCMS MakeEarlyInstance() { // We need a write barrier even if we haven't set up enough of the // memory system to support allocating from bootstrap memory yet. VTable vtable = ((RuntimeType)typeof(WriteBarrierCMS)).classVtable; UIntPtr numBytes = ObjectLayout.ObjectSize(vtable); if (numBytes > (UIntPtr)sizeof(FakeObjectBytes)) { return(null); // too big to allocate in memoryForFakeObject } UIntPtr fakeObjectAddr; fixed(PostHeader *middlePtr = & memoryForFakeObject.postBytes) { fakeObjectAddr = (UIntPtr)middlePtr; } Object result = Magic.fromAddress(fakeObjectAddr); *result.VTableFieldAddr = Magic.addressOf(vtable); return((WriteBarrierCMS)result); }
internal override unsafe UIntPtr Visit(Object obj) { VTable vtable = obj.vtable; UIntPtr size = ObjectLayout.ObjectSize(Magic.addressOf(obj), vtable); uint refState = obj.REF_STATE; UIntPtr refCount = (UIntPtr)(refState & RSMasks.refCount); if ((refState & RSMasks.countingFlag) != 0 && refCount > 0) { // This object is considered live by the // RC collector. UIntPtr count = getBackupRefcount(obj); if (count == 0) { // But it is actually unreachable. this.Size += size; } } return(size); }
internal override unsafe UIntPtr Visit(Object obj) { VTable vtable = obj.vtable; UIntPtr size = ObjectLayout.ObjectSize(Magic.addressOf(obj), vtable); uint refState = obj.REF_STATE; UIntPtr refCount = (UIntPtr)(refState & RSMasks.refCount); if ((refState & RSMasks.countingFlag) != 0 && refCount > 0) { UIntPtr count = getBackupRefcount(obj); if (count == 0) { UIntPtr dTime = getDfsDiscoveryTime(obj); UIntPtr fTime = getDfsFinishingTime(obj); cycleClosure.Initialize(dTime, fTime); cycleClosure.VisitReferenceFields(obj); } } return(size); }
internal static unsafe bool AccumulateRCUpdates(String methodName, int methodIndex, uint maxIndex, AcctRecord rec) { VTable.Assert(RCCollector.ProfilingMode, @"RCCollector.ProfilingMode"); // Return if the page table hasn't been set up yet. if (PageTable.pageTableCount == UIntPtr.Zero) { return(false); } if (methods == null) { // Allocate up front storage for the accounting records. // // This is requisitioned directly from the memory // manager. Care should be taken to ensure that // AccumulateRCUpdates does not indirectly call // methods that may have compiler-inserted RC updates. VTable vtable = ((RuntimeType)typeof(AcctRecord[])).classVtable; UIntPtr size = ObjectLayout.ArraySize(vtable, maxIndex + 1); BumpAllocator profileData = new BumpAllocator(PageType.NonGC); UIntPtr profileDataStart = MemoryManager.AllocateMemory(size); profileData.SetRange(profileDataStart, size); PageManager.SetStaticDataPages(profileDataStart, size); methods = (AcctRecord[])Allocate(ref profileData, vtable, size); VTable.Assert(methods != null, @"methods != null"); *(uint *)(Magic.addressOf(methods) + PostHeader.Size) = maxIndex + 1; } VTable.Assert(methods.Length == maxIndex + 1, @"methods.Length == maxIndex+1"); if (methods[methodIndex].methodName == null) { methodNames[methodIndex].methodName = methodName; } // Not "methodNames[methodIndex].methodName == methodName" // because the Equality operator carries compiler-inserted // RC updates! VTable.Assert(Magic.addressOf(methodNames[methodIndex]. methodName) == Magic.addressOf(methodName), @"Magic.addressOf(methodNames[methodIndex]. methodName) == Magic.addressOf(methodName)"); methods[methodIndex] += rec; return(true); }
private static unsafe void processPLCList() { int startTicks = 0; bool enableGCTiming = VTable.enableGCTiming; if (enableGCTiming) { VTable.enableGCTiming = false; startTicks = Environment.TickCount; } if (VTable.enableGCWatermarks) { MemoryAccounting.RecordHeapWatermarks(); } #if DEBUG VTable.Assert(firstPLCLink->objAddr == UIntPtr.Zero, @"firstPLCLink->objAddr == UIntPtr.Zero"); #endif // DEBUG // Let S be the subgraph of heap objects reachable from // the PLC list. Decrement counts due to references in S. for (PLCLink *link = firstPLCLink->next; link != null; link = link->next) { UIntPtr objAddr = link->objAddr; VTable.Assert(objAddr != UIntPtr.Zero, @"objAddr != UIntPtr.Zero"); Object obj = Magic.fromAddress(objAddr); VTable.Assert((obj.REF_STATE & countingONFlagMask) != 0, @"(obj.REF_STATE & countingONFlagMask) != 0"); uint refState = obj.REF_STATE; if ((refState & markFlagMask) == 0) { obj.REF_STATE = refState | markFlagMask; internalDecrementer.Traverse(objAddr); } } // Objects that now have non-zero counts are those that // have references external to S incident on them. // Recompute counts due to reachability from such objects. for (PLCLink *link = firstPLCLink->next; link != null; link = link->next) { UIntPtr objAddr = link->objAddr; internalScanner.Traverse(objAddr); } // String together objects with reference count // of zero for reclamation. internalReclaimer.Initialize(); for (PLCLink *link = firstPLCLink->next; link != null; link = link->next) { UIntPtr objAddr = link->objAddr; internalReclaimer.Traverse(objAddr); } ulong reclaimedBytes = 0; Object reclaimedObj = internalReclaimer.ReclaimedObjects; while (reclaimedObj != null) { if (VTable.enableGCProfiling) { UIntPtr size = ObjectLayout.Sizeof(reclaimedObj); reclaimedBytes += (ulong)size; } Object nextReclaimedObj = getNextLink(reclaimedObj); SegregatedFreeList.Free(reclaimedObj); reclaimedObj = nextReclaimedObj; } // Recycle the PLC list. if (firstPLCLink->next != null) { PLCLink *lastPLCLink = firstPLCLink; do { lastPLCLink = lastPLCLink->next; } while (lastPLCLink->next != null); lastPLCLink->next = plcListChunk; plcListChunk = firstPLCLink->next; firstPLCLink->next = null; } // Release the memory used up by work lists. UIntPtrQueue.ReleaseStandbyPages(null); SegregatedFreeList.RecycleGlobalPages(); SegregatedFreeList.CommitFreedData(); GC.newBytesSinceGC = UIntPtr.Zero; if (enableGCTiming) { int elapsedTicks = Environment.TickCount - startTicks; System.GC.gcTotalTime += elapsedTicks; if (System.GC.maxPauseTime < elapsedTicks) { System.GC.maxPauseTime = elapsedTicks; } System.GC.pauseCount++; VTable.enableGCTiming = true; } if (VTable.enableGCProfiling) { if (maxCyclicGarbage < reclaimedBytes) { maxCyclicGarbage = reclaimedBytes; } totalCyclicGarbage += reclaimedBytes; cycleCollections++; } }
internal override ulong Copy() { ulong cnt = 0; TagNode myTagHead = tagHead; ulong myNTagged = nTagged; tagHead = null; nTagged = 0; if (fAbortDebug) { VTable.DebugPrint("Aborter: copying "); VTable.DebugPrint(myNTagged); VTable.DebugPrint(" objects.\n"); } // first do all of the copying for (TagNode n = myTagHead; n != null; n = n.next) { Util.MemCopy(Magic.addressOf(n.to) - PreHeader.Size, Magic.addressOf(n.from) - PreHeader.Size, ObjectLayout.Sizeof(n.from)); // fix the forwarding word in the to-space object (without // this it'll point at from-space) MixinObject(n.to).preHeader.CoCoWord = WithNoForwardNotCopying(n.to); } if (fAbortDebug) { VTable.DebugPrint("Aborter: copied "); VTable.DebugPrint(myNTagged); VTable.DebugPrint(" objects.\n"); } // now attempt to forward all objects. for (TagNode n = myTagHead; n != null; n = n.next) { UIntPtr oldCoCoWord = CAS(ref MixinObject(n.from).preHeader.CoCoWord, WithForward(Magic.addressOf(n.to)), WithNoForwardCopying(n.from)); VTable.Assert(!IsForwarded(MixinObject(n.to).preHeader.CoCoWord, n.to)); if (oldCoCoWord == WithNoForwardCopying(n.from)) { // copy successful cnt++; } } if (fAbortDebug) { VTable.DebugPrint("Aborter: forwarded "); VTable.DebugPrint(cnt); VTable.DebugPrint(" objects.\n"); } return(cnt); }
internal unsafe void ScanHook(Object obj) { UIntPtr page = PageTable.Page(Magic.addressOf(obj)); if (PageTable.Type(page) != SegregatedFreeList.SMALL_OBJ_PAGE) { //VTable.DebugPrint(" not tagging because this isn't a small object page"); return; } SegregatedFreeList.PageHeader *ph = (SegregatedFreeList.PageHeader *)PageTable.PageAddr(page); if (!new CoCoPageUserValue(ph->userValue).Marked) { //VTable.DebugPrint(" not tagging because the page isn't marked\n"); return; } if (obj is EMU || obj is Monitor || obj is Thread || obj is ThreadHeaderQueue) { CoCoBarrier.NotifyPin(Magic.addressOf(obj)); if (fVerbose) { VTable.DebugPrint(" $$ not tagging object because it's a monitor or EMU\n"); } return; } if (doingCoCo) { //VTable.DebugPrint(" not tagging object because doingCoCo\n"); return; } if (!CoCoBarrier.instance.ObjectIsNotCopied(obj)) { if (fVerbose) { VTable.DebugPrint(" not tagging object because object is already in the process of being copied.\n"); } return; } if (fVerbose && obj.GetType() != typeof(Object)) { VTable.DebugPrint(" $$ tagging a non-System.Object; type is "); VTable.DebugPrint(obj.GetType().Name); VTable.DebugPrint("\n"); } // REVIEW: I wish that there was an easier way of // doing this. Object copy; if (obj is Array) { Array a = (Array)obj; if (a.IsVector) { copy = GC.AllocateVector(a.vtable, a.Length); } else { copy = GC.AllocateArray(a.vtable, a.Rank, a.Length); } } else if (obj is String) { String s = (String)obj; // REVIEW: this is not nice. copy = GC.AllocateString(s.ArrayLength - 1); } else { copy = GC.AllocateObject(obj.vtable); } VTable.Assert(ObjectLayout.Sizeof(copy) == ObjectLayout.Sizeof(obj), "Copy is not same size as original"); spaceOverhead += ObjectLayout.Sizeof(copy); bool first = !CoCoBarrier.instance.AnyTaggedForCopying; UIntPtr thisSpaceOverhead; if (CoCoBarrier.instance.TagObjectForCopy(obj, copy, out thisSpaceOverhead)) { cnt++; if (first) { lock (interlock) { if (!wantCoCo && !doingCoCo) { wantCoCo = true; } } } } spaceOverhead += thisSpaceOverhead; }
UIntPtr VisitReferenceFieldsTemplate(ref ObjectDescriptor objDesc) { UIntPtr pointerTracking = objDesc.vtable.pointerTrackingMask; uint objectTag = (pointerTracking & 0xf); UIntPtr size; switch (objectTag) { case ObjectLayout.SPARSE_TAG: { UIntPtr *sparseObject = (UIntPtr *)objDesc.objectBase; size = ObjectLayout.ObjectSize(objDesc.vtable); pointerTracking >>= 4; while (pointerTracking != 0) { uint index = pointerTracking & 0xf; pointerTracking >>= 4; // The cast to int prevents C# from taking the // index * sizeof(UIntPtr) to long: UIntPtr *loc = sparseObject + (int)index; this.Filter(loc, ref objDesc); } break; } case ObjectLayout.DENSE_TAG: { // skip vtable int postHeaderSize = PostHeader.Size; UIntPtr *denseObject = (UIntPtr *) (objDesc.objectBase + postHeaderSize); size = ObjectLayout.ObjectSize(objDesc.vtable); pointerTracking >>= 4; while (pointerTracking != 0) { if ((pointerTracking & ((UIntPtr)0x1)) != 0) { this.Filter(denseObject, ref objDesc); } pointerTracking >>= 1; denseObject++; } break; } case ObjectLayout.PTR_VECTOR_TAG: { int postHeaderSize = PostHeader.Size; uint length = *(uint *)(objDesc.objectBase + postHeaderSize); size = ObjectLayout.ArraySize(objDesc.vtable, length); int preHeaderSize = PreHeader.Size; UIntPtr *elementAddress = (UIntPtr *) (objDesc.objectBase + objDesc.vtable.baseLength - preHeaderSize); for (uint i = 0; i < length; i++, elementAddress++) { this.Filter(elementAddress, ref objDesc); } break; } case ObjectLayout.OTHER_VECTOR_TAG: { int postHeaderSize = PostHeader.Size; uint length = *(uint *)(objDesc.objectBase + postHeaderSize); size = ObjectLayout.ArraySize(objDesc.vtable, length); if (objDesc.vtable.arrayOf == StructuralType.Struct) { // pretend the struct is boxed and account for the // presence of the vtable field VTable elementVTable = objDesc.vtable.arrayElementClass; UIntPtr elementMask = elementVTable.pointerTrackingMask; // A structure with no references will have a SPARSE // descriptor with no offset values. if (elementMask != (UIntPtr)ObjectLayout.SPARSE_TAG) { int preHeaderSize = PreHeader.Size; UIntPtr elementAddress = (objDesc.objectBase + objDesc.vtable.baseLength - preHeaderSize - postHeaderSize); int elementSize = objDesc.vtable.arrayElementSize; objDesc.vtable = elementVTable; for (uint i = 0; i < length; i++) { objDesc.objectBase = elementAddress; this.VisitReferenceFieldsTemplateNoInline(ref objDesc); elementAddress += elementSize; } } } break; } case ObjectLayout.PTR_ARRAY_TAG: { int postHeaderSize = PostHeader.Size; uint length = *(uint *)(objDesc.objectBase + postHeaderSize + sizeof(uint)); size = ObjectLayout.ArraySize(objDesc.vtable, length); int preHeaderSize = PreHeader.Size; UIntPtr *elementAddress = (UIntPtr *) (objDesc.objectBase + objDesc.vtable.baseLength - preHeaderSize); for (uint i = 0; i < length; i++, elementAddress++) { this.Filter(elementAddress, ref objDesc); } break; } case ObjectLayout.OTHER_ARRAY_TAG: { int postHeaderSize = PostHeader.Size; uint length = *(uint *)(objDesc.objectBase + postHeaderSize + sizeof(uint)); size = ObjectLayout.ArraySize(objDesc.vtable, length); if (objDesc.vtable.arrayOf == StructuralType.Struct) { // pretend the struct is boxed and account for the // presence of the PostHeader VTable elementVTable = objDesc.vtable.arrayElementClass; UIntPtr elementMask = elementVTable.pointerTrackingMask; // A structure with no references will have a SPARSE // descriptor with no offset values. if (elementMask != (UIntPtr)ObjectLayout.SPARSE_TAG) { int preHeaderSize = PreHeader.Size; int elementSize = objDesc.vtable.arrayElementSize; UIntPtr elementAddress = objDesc.objectBase + objDesc.vtable.baseLength - preHeaderSize - postHeaderSize; objDesc.vtable = elementVTable; for (uint i = 0; i < length; i++) { objDesc.objectBase = elementAddress; this.VisitReferenceFieldsTemplateNoInline(ref objDesc); elementAddress += elementSize; } } } break; } case ObjectLayout.STRING_TAG: { int postHeaderSize = PostHeader.Size; uint arrayLength = *(uint *)(objDesc.objectBase + postHeaderSize); size = ObjectLayout.StringSize(objDesc.vtable, arrayLength); break; } default: { // escape case VTable.Assert((objectTag & 0x1) == 0, "ReferenceVisitor: (objectTag & 0x1) == 0"); UIntPtr *largeObject = (UIntPtr *)objDesc.objectBase; size = ObjectLayout.ObjectSize(objDesc.vtable); int *pointerDescription = (int *)pointerTracking; int count = *pointerDescription; for (int i = 1; i <= count; i++) { UIntPtr *loc = largeObject + *(pointerDescription + i); this.Filter(loc, ref objDesc); } break; } } return(size); }
// Reference updates and object relocation private unsafe UIntPtr ForwardReferences(PageType generation, out UIntPtr oldAllocPtr) { VTable.Assert(IsValidGeneration((int)generation)); UIntPtr destPage = UIntPtr.Zero; UIntPtr destCursor; UIntPtr destLimit; PageType destGeneration; if (generation < MAX_GENERATION) { destGeneration = generation + 1; } else { destGeneration = MAX_GENERATION; } destCursor = UIntPtr.Zero; destLimit = UIntPtr.Zero; oldAllocPtr = destCursor; UIntPtr runLength = UIntPtr.Zero; for (UIntPtr i = UIntPtr.Zero; i < PageTable.pageTableCount; i++) { if (!IsMyZombiePage(i)) { continue; } UIntPtr deltaBytes = (UIntPtr)0x80000000; UIntPtr sourceCursor = PageTable.PageAddr(i); do { i++; } while (i < PageTable.pageTableCount && IsMyZombiePage(i)); UIntPtr sourceLimit = PageTable.PageAddr(i); while (true) { if (sourceCursor >= sourceLimit) { break; } if (Allocator.IsAlignmentMarkerAddr(sourceCursor)) { sourceCursor += UIntPtr.Size; deltaBytes += UIntPtr.Size; continue; } if (BumpAllocator.IsUnusedMarkerAddr(sourceCursor)) { sourceCursor += UIntPtr.Size; sourceCursor = PageTable.PagePad(sourceCursor); deltaBytes = (UIntPtr)0x80000000; continue; } UIntPtr objectAddr = sourceCursor + PreHeader.Size; UIntPtr vtableOrMarker = Allocator.GetObjectVTable(objectAddr); if (vtableOrMarker == UIntPtr.Zero) { // We found the end of an allocation page sourceCursor = PageTable.PagePad(sourceCursor); deltaBytes = (UIntPtr)0x80000000; continue; } UIntPtr vtableAddr; if ((vtableOrMarker & 1) != 0) { UIntPtr temp = *(UIntPtr *)(vtableOrMarker - 1); while ((temp & 1) != 0) { temp = *(UIntPtr *)(temp - 1); } VTable.Assert(PageTable.IsNonGcPage(PageTable.Type(PageTable.Page(temp)))); vtableAddr = temp; if ((temp & 2) != 0) { // Found pinned object SkipDestinationAreas(ref destPage, destCursor, ref destLimit, sourceCursor); deltaBytes -= (sourceCursor - destCursor); destCursor = sourceCursor; vtableAddr -= 2; // Remove "pinned" bit } Allocator.SetObjectVTable(objectAddr, vtableAddr); } else { vtableAddr = vtableOrMarker; } VTable vtable = Magic.toVTable(Magic.fromAddress(vtableAddr)); UIntPtr objectSize = ObjectLayout.ObjectSize(objectAddr, vtable); VTable.Assert(objectSize > 0); if ((vtableOrMarker & 1) != 0) { if (GenerationalCollector.IsLargeObjectSize (objectSize)) { // Don't move large objects SkipDestinationAreas(ref destPage, destCursor, ref destLimit, sourceCursor); UIntPtr localDelta = sourceCursor - destCursor; deltaBytes -= localDelta; if (deltaBytes == UIntPtr.Zero && runLength != UIntPtr.Zero) { runLength += localDelta; } destCursor = sourceCursor; UIntPtr objLimit = sourceCursor + objectSize; UIntPtr pageEndAddr = PageTable.PagePad(objLimit); objectSize = (pageEndAddr - sourceCursor); } else if (destCursor + objectSize > destLimit) { UIntPtr oldDestCursor = destCursor; FindDestinationArea(ref destPage, ref destCursor, ref destLimit, objectSize, destGeneration); VTable.Assert(destCursor <= sourceCursor); VTable.Assert(destCursor + objectSize <= destLimit); deltaBytes -= (destCursor - oldDestCursor); } else if (vtable.baseAlignment > UIntPtr.Size) { uint alignmentMask = vtable.baseAlignment - 1; int offset = PreHeader.Size + UIntPtr.Size; while (((destCursor + offset) & alignmentMask) != 0) { destCursor += UIntPtr.Size; deltaBytes -= UIntPtr.Size; if (deltaBytes == UIntPtr.Zero && runLength != UIntPtr.Zero) { runLength += UIntPtr.Size; } } } if (runLength == UIntPtr.Zero || deltaBytes != UIntPtr.Zero) { if (runLength != UIntPtr.Zero) { RegisterRelocationEnd(runLength); } RegisterRelocationStart(sourceCursor, destCursor); deltaBytes = UIntPtr.Zero; runLength = UIntPtr.Zero; } UIntPtr newObjectAddr = destCursor + PreHeader.Size; do { UIntPtr *ptrAddr = (UIntPtr *)(vtableOrMarker - 1); vtableOrMarker = *ptrAddr; *ptrAddr = newObjectAddr; } while ((vtableOrMarker & 1) != 0); destCursor += objectSize; runLength += objectSize; } else { deltaBytes += objectSize; if (runLength != UIntPtr.Zero) { RegisterRelocationEnd(runLength); } runLength = UIntPtr.Zero; } sourceCursor += objectSize; } } if (runLength != UIntPtr.Zero) { RegisterRelocationEnd(runLength); } return(destCursor); }
static bool CopyObject(Object from) { if (fDietVerboseCopyDebug) { VTable.DebugPrint(" Copying "); VTable.DebugPrint((ulong)Magic.addressOf(from)); VTable.DebugPrint(" with CoCoWord = "); VTable.DebugPrint((ulong)MixinObject(from).preHeader.CoCoWord); VTable.DebugPrint("\n"); } for (;;) { UIntPtr CoCoWord = MixinObject(from).preHeader.CoCoWord; if (IsSimple(CoCoWord)) { // got pinned .. ignore return(false); } else if (IsTagged(CoCoWord)) { if (ShouldPin(Magic.addressOf(from))) { if (CASCoCoWord(from, Simple(), CoCoWord)) { // pinned NotifyPin(Magic.addressOf(from)); return(false); } } else { CASCoCoWord(from, Copying(CoCoWord), CoCoWord); } } else if (IsCopying(CoCoWord)) { Object to = Magic.fromAddress(ForwardPtr(CoCoWord)); UIntPtr begin = UIntPtr.Zero - (UIntPtr)PreHeader.Size; UIntPtr end = ((ObjectLayout.Sizeof(from) + sizeof(UIntPtr) - 1) & ~((UIntPtr)sizeof(UIntPtr) - 1)) - PreHeader.Size; if (fDietVerboseCopyDebug) { VTable.DebugPrint(" copying to "); VTable.DebugPrint((ulong)Magic.addressOf(to)); VTable.DebugPrint("; begin = "); VTable.DebugPrint((ulong)begin); VTable.DebugPrint("; end = "); VTable.DebugPrint((ulong)end); VTable.DebugPrint("; PreHeader.Size = "); VTable.DebugPrint((ulong)PreHeader.Size); VTable.DebugPrint("; Sizeof(from) = "); VTable.DebugPrint((ulong)ObjectLayout.Sizeof(from)); VTable.DebugPrint("; Sizeof(to) = "); VTable.DebugPrint((ulong)ObjectLayout.Sizeof(to)); VTable.DebugPrint("\n"); } for (UIntPtr offset = begin; offset != end; offset += sizeof(UIntPtr)) { if (!IgnoreOffset(offset)) { UIntPtr *fptr = (UIntPtr *)(Magic.addressOf(from) + offset); UIntPtr *tptr = (UIntPtr *)(Magic.addressOf(to) + offset); for (;;) { UIntPtr word = *fptr; if (word == Alpha) { // NOTE: this case will only be hit the first time // around the loop. if it's not hit the first time // it'll never get hit. break; } *tptr = word; if (InternalImmutableOffset(from, offset) || CAS(fptr, Alpha, word) == word) { break; } } } } MixinObject(from).preHeader.CoCoWord = Forwarded(CoCoWord); return(true); } else { VTable.NotReached(); } } }
internal override void Uniquify() { // Sort the write buffer. // TODO: Would like sort that is in-place, O(n lg n) worst-case, and // O(n) when nearly-sorted. bool changed = true; for (int max = writeBufferIndex; changed; --max) { changed = false; for (int i = 1; i < max; ++i) { if (writeBuffer[i - 1] > writeBuffer[i]) { changed = true; UIntPtr t = writeBuffer[i - 1]; writeBuffer[i - 1] = writeBuffer[i]; writeBuffer[i] = t; } } } // Remove duplicates int dest = 0; UIntPtr last = UIntPtr.Zero; for (int i = 0; i < writeBufferIndex; i++) { UIntPtr current = *(writeBuffer + i); if (current != last) { VTable.Assert(current > last); *(writeBuffer + dest) = current; dest++; last = current; if ((current & 1) != 0) { // The entire object is marked, skip interior addresses UIntPtr objPtr = current - 1; VTable vtable = Magic.fromAddress(objPtr).vtable; UIntPtr size = ObjectLayout.ObjectSize(objPtr, vtable); VTable.Assert(size > 0); UIntPtr objLimit = objPtr + size; i++; while (i < writeBufferIndex && *(writeBuffer + i) < objLimit) { i++; } i--; } } } writeBufferIndex = dest; // Remove duplicates hiding in the overflow slot in thread objects! for (int i = 0; i < Thread.threadTable.Length; i++) { Thread t = Thread.threadTable[i]; if (t != null) { UIntPtr overflowPtr = MixinThread(t).ssb.overflowValue; if (overflowPtr != UIntPtr.Zero) { int left = 0; int right = writeBufferIndex; while (left < right - 1) { int mid = (left + right) / 2; if (*(writeBuffer + mid) <= overflowPtr) { left = mid; } else { right = mid; } } UIntPtr foundPtr = *(writeBuffer + left); if (overflowPtr == foundPtr) { // Found an exact duplicate MixinThread(t).ssb.overflowValue = UIntPtr.Zero; continue; } else if ((foundPtr & 1) != 0) { UIntPtr objAddr = foundPtr - 1; VTable vtable = Magic.fromAddress(objAddr).vtable; UIntPtr size = ObjectLayout.ObjectSize(objAddr, vtable); if (overflowPtr < objAddr + size) { // found a pointer into a checked object MixinThread(t).ssb.overflowValue = UIntPtr.Zero; } } else if ((overflowPtr & 1) != 0) { UIntPtr objAddr = overflowPtr - 1; VTable vtable = Magic.fromAddress(objAddr).vtable; UIntPtr size = ObjectLayout.ObjectSize(objAddr, vtable); UIntPtr objLimit = objAddr + size; int skipCount = 0; int probe = left + 1; while (probe < writeBufferIndex && *(writeBuffer + probe) < objLimit) { probe++; skipCount++; } if (skipCount > 0) { while (probe < writeBufferIndex) { *(writeBuffer + left) = *(writeBuffer + probe); left++; probe++; } } } } } } }