Пример #1
0
        internal UIntPtr AllocateFast(UIntPtr bytes, uint alignment)
        {
#if SINGULARITY_KERNEL
#if ENSURE_ALLOCATION_ALLOWED
            BumpAllocator.EnsureAllocationAllowed();
#endif
#endif
            UIntPtr allocPtr =
                Allocator.AlignedAllocationPtr(this.allocPtr,
                                               this.reserveLimit,
                                               alignment);
            UIntPtr objectLimitPtr = allocPtr + bytes;
            if (objectLimitPtr > this.reserveLimit)
            {
                this.allocPtr = allocPtr;
                return(UIntPtr.Zero);
            }
            if (objectLimitPtr > this.zeroedLimit)
            {
                Util.MemClear(allocPtr, bytes);
                this.zeroedLimit = objectLimitPtr;
            }
            this.allocPtr = objectLimitPtr;
            return(allocPtr + PreHeader.Size);
        }
Пример #2
0
 private unsafe void CompactHeapObjects(UIntPtr previousEnd)
 {
     while (!this.relocationQueue.IsEmpty)
     {
         UIntPtr sourceAddress      = this.relocationQueue.Read();
         UIntPtr destinationAddress = this.relocationQueue.Read();
         UIntPtr runLength          = this.relocationQueue.Read();
         if (previousEnd != destinationAddress)
         {
             VTable.Assert(previousEnd < destinationAddress);
             if (PageTable.Page(destinationAddress) !=
                 PageTable.Page(previousEnd + PreHeader.Size))
             {
                 if (!PageTable.PageAligned(previousEnd))
                 {
                     UIntPtr pageLimit = PageTable.PagePad(previousEnd);
                     BumpAllocator.WriteUnusedMarker(previousEnd);
                     previousEnd += UIntPtr.Size;
                     Util.MemClear(previousEnd,
                                   pageLimit - previousEnd);
                 }
                 if (!PageTable.PageAligned(destinationAddress))
                 {
                     // This only happens before pinned objects and
                     // large objects
                     UIntPtr start =
                         PageTable.PageAlign(destinationAddress);
                     VTable.Assert(previousEnd <= start);
                     while (start < destinationAddress)
                     {
                         Allocator.WriteAlignment(start);
                         start += UIntPtr.Size;
                     }
                 }
                 UIntPtr objAddr = destinationAddress + PreHeader.Size;
                 InteriorPtrTable.SetFirst(objAddr);
             }
             else
             {
                 VTable.Assert(previousEnd < destinationAddress);
                 UIntPtr start = previousEnd;
                 while (start < destinationAddress)
                 {
                     Allocator.WriteAlignment(start);
                     start += UIntPtr.Size;
                 }
             }
         }
         Util.MemCopy(destinationAddress, sourceAddress, runLength);
         previousEnd = destinationAddress + runLength;
     }
     // Zero out the end of the allocation page
     if (!PageTable.PageAligned(previousEnd))
     {
         UIntPtr pageLimit = PageTable.PagePad(previousEnd);
         Util.MemClear(previousEnd, pageLimit - previousEnd);
     }
     this.relocationQueue.Cleanup(true);
 }
Пример #3
0
        internal static unsafe void VerifyFirst(UIntPtr previousObjectAddr,
                                                UIntPtr objectAddr)
        {
            UIntPtr page = PageTable.Page(objectAddr);

            if (previousObjectAddr != UIntPtr.Zero)
            {
                UIntPtr previousPage = PageTable.Page(previousObjectAddr);
                UIntPtr pageCursor   = previousPage + 1;
                while (pageCursor < page)
                {
                    uint    cursorOffset = PageTable.Extra(pageCursor);
                    UIntPtr objAddr      = (PageTable.PageAddr(pageCursor) +
                                            cursorOffset - OFFSET_SKEW);
                    if (!(cursorOffset <= OFFSET_NO_DATA ||
                          BumpAllocator.IsUnusedSpace(objAddr) ||
                          Allocator.IsAlignment(objAddr) ||
                          BumpAllocator.IsRestOfPageZero(objAddr)))
                    {
                        VTable.DebugPrint
                            ("cursorOffset={0:x} OFFSET_NO_DATA={1:x} objAddr={2:x} unused={3} isalign={4} iszero={5}\n",
                            __arglist((cursorOffset),
                                      (OFFSET_NO_DATA),
                                      ((long)objAddr),
                                      (BumpAllocator.IsUnusedSpace(objAddr)),
                                      (Allocator.IsAlignment(objAddr)),
                                      (BumpAllocator.IsRestOfPageZero(objAddr))));
                    }
                    VTable.Assert(cursorOffset <= OFFSET_NO_DATA ||
                                  BumpAllocator.IsUnusedSpace(objAddr) ||
                                  Allocator.IsAlignment(objAddr) ||
                                  BumpAllocator.IsRestOfPageZero(objAddr),
                                  "VerifyFirst 1");
                    pageCursor++;
                }
            }
            uint offset = PageTable.Extra(page);

            if (offset > OFFSET_NO_DATA)
            {
                UIntPtr firstAddr =
                    PageTable.PageAddr(page) + offset - OFFSET_SKEW;
                if (!(firstAddr == objectAddr ||
                      (firstAddr + UIntPtr.Size == objectAddr &&
                       Allocator.IsAlignment(firstAddr))))
                {
                    VTable.DebugPrint
                        ("firstAddr={0:x} objectAddr={1:x} isalign={2}\n",
                        __arglist(((long)firstAddr),
                                  ((long)objectAddr),
                                  (Allocator.IsAlignment(firstAddr))));
                }
                VTable.Assert(firstAddr == objectAddr ||
                              (firstAddr + 4 == objectAddr &&
                               Allocator.IsAlignment(firstAddr)),
                              "VerifyFirst 2");
            }
        }
Пример #4
0
        private static Object Allocate(ref BumpAllocator profileData,
                                       VTable vtable,
                                       UIntPtr numBytes)
        {
            UIntPtr resultAddr =
                profileData.AllocateFast(numBytes, vtable.baseAlignment);
            Object result = Magic.fromAddress(resultAddr);

            result.REF_STATE = 1;
            result.vtable    = vtable;
            return(result);
        }
Пример #5
0
            internal void ProcessPinnedPages(ReferenceVisitor ptrVisitor)
            {
                if (pinnedPageList == null || pinnedPageList.Count == 0)
                {
                    return;
                }
                pinnedPageList.Sort(comparer);
                int limit = pinnedPageList.Count;

                for (int i = 0; i < limit; i++)
                {
                    UIntPtr  page          = (UIntPtr)pinnedPageList[i];
                    PageType fromSpaceType = PageTable.Type(page);
                    VTable.Assert(PageTable.IsZombiePage(fromSpaceType),
                                  "Semispace:RegisterPinnedReference:2");
                    PageType toSpaceType =
                        PageTable.ZombieToLive(fromSpaceType);
                    PageTable.SetType(page, toSpaceType);
                }
                int pageIndex = 0;

                while (pageIndex < limit)
                {
                    UIntPtr startPage = (UIntPtr)pinnedPageList[pageIndex];
                    UIntPtr endPage   = startPage + 1;
                    pageIndex++;
                    while (pageIndex < limit &&
                           (UIntPtr)pinnedPageList[pageIndex] == endPage)
                    {
                        pageIndex++;
                        endPage++;
                    }
                    UIntPtr objectAddr = FirstPinnedObjectAddr(startPage);
                    UIntPtr pastAddr   = PostPinnedObjectAddr(endPage);
                    while (objectAddr < pastAddr)
                    {
                        if (Allocator.IsAlignment(objectAddr))
                        {
                            objectAddr += UIntPtr.Size;
                        }
                        else if (BumpAllocator.IsUnusedSpace(objectAddr))
                        {
                            objectAddr = (PageTable.PagePad(objectAddr) +
                                          PreHeader.Size);
                        }
                        else
                        {
                            Object obj = Magic.fromAddress(objectAddr);
                            objectAddr += ptrVisitor.VisitReferenceFields(obj);
                        }
                    }
                }
            }
Пример #6
0
        // Finds the object base for an interior pointer.  In the case of a
        // pointer to the tail of an object and the head of another, it will
        // return the former object (the one whose tail we point at).  To
        // get the base pointer for a pointer into the pre-header, you should
        // add PreHeader.Size before calling this.
        internal static UIntPtr Find(UIntPtr addr)
        {
            UIntPtr page     = PageTable.Page(addr);
            UIntPtr currAddr = InteriorPtrTable.First(page);

            // Look out for the unused space token: this page may not
            // have been completely allocated: its "first" object might not
            // be valid.
            if (BumpAllocator.IsUnusedSpace(currAddr) || currAddr > addr)
            {
                // Back up to the previous object.  Should be fast
                // since First updated the InteriorPtrTable entries.
                currAddr = Before(PageTable.PageAddr(page));
            }
            VTable.Assert(!BumpAllocator.IsUnusedSpace(currAddr),
                          "InteriorPtrTable.Find 0");
            VTable.Assert(currAddr <= addr, "InteriorPtrTable.Find 1");
            while (true)
            {
                // Watch out for alignment padding; advance the pointer if
                // it points to a syncblock index rather than a vtable
                // pointer.  Note that we must do this before scrolling,
                // since the page table value was set before we knew the
                // required alignment.
                if (Allocator.IsAlignment(currAddr))
                {
                    currAddr += UIntPtr.Size;
                }
                else if (BumpAllocator.IsUnusedSpace(currAddr))
                {
                    UIntPtr postAddr =
                        PageTable.PagePad(currAddr) + PreHeader.Size;
                    VTable.Assert(postAddr <= addr, "InteriorPtrTable.Find 2");
                    currAddr = postAddr;
                }
                else
                {
                    VTable.Assert(currAddr <= addr, "InteriorPtrTable.Find 3");
                    UIntPtr size = ObjectSize(currAddr);
                    VTable.Assert(size >= UIntPtr.Zero,
                                  "InteriorPtrTable.Find 4");
                    UIntPtr postAddr = currAddr + size;
                    if (postAddr > addr)
                    {
                        return(currAddr);
                    }
                    else
                    {
                        currAddr = postAddr;
                    }
                }
            }
        }
Пример #7
0
        private void TruncateNurseryAllocationAreas()
        {
            int limit = Thread.threadTable.Length;

            for (int i = 0; i < limit; i++)
            {
                Thread t = Thread.threadTable[i];
                if (t != null)
                {
                    BumpAllocator.Truncate(t);
                }
            }
        }
Пример #8
0
 private UIntPtr AllocateObjectMemorySlow(UIntPtr numBytes,
                                          uint alignment,
                                          Thread currentThread)
 {
     //Trace.Log(Trace.Area.Allocate,
     //          "AllocateObjectMemorySlow numBytes={0}, alignment={1}, currentThread={2}", __arglist(numBytes, alignment, currentThread));
     GC.CheckForNeededGCWork(currentThread);
     VTable.Assert(CurrentPhase != StopTheWorldPhase.SingleThreaded ||
                   currentThread.threadIndex == collectorThreadIndex);
     if (GenerationalCollector.IsLargeObjectSize(numBytes))
     {
         return(AllocateBig(numBytes, alignment, currentThread));
     }
     return(BumpAllocator.Allocate(currentThread, numBytes, alignment));
 }
Пример #9
0
        internal override UIntPtr AllocateObjectMemory(UIntPtr numBytes,
                                                       uint alignment,
                                                       Thread currentThread)
        {
            UIntPtr resultAddr =
                BumpAllocator.AllocateFast(currentThread, numBytes, alignment);

            if (resultAddr == UIntPtr.Zero)
            {
                resultAddr = AllocateObjectMemorySlow(numBytes, alignment,
                                                      currentThread);
            }

            VTable.Assert(resultAddr != UIntPtr.Zero);
            return(resultAddr);
        }
Пример #10
0
            private static UIntPtr PostPinnedObjectAddr(UIntPtr endPage)
            {
                UIntPtr endAddr            = PageTable.PageAddr(endPage);
                UIntPtr postLastObjectAddr = InteriorPtrTable.Last(endPage - 1);

                if (postLastObjectAddr < endAddr &&
                    !BumpAllocator.IsUnusedSpace(postLastObjectAddr))
                {
                    // If the next object straddles into the next page,
                    // return the location just past the object
                    Object  lastObject     = Magic.fromAddress(postLastObjectAddr);
                    UIntPtr lastObjectSize =
                        ObjectLayout.ObjectSize(postLastObjectAddr,
                                                lastObject.vtable);
                    postLastObjectAddr += lastObjectSize;
                }
                return(postLastObjectAddr - PreHeader.Size);
            }
Пример #11
0
        internal override bool Scan(NonNullReferenceVisitor visitor)
        {
            bool globalRepeat = false;

            while (true)
            {
                UIntPtr lowAddr, highAddr;
                if (this.destinationLow != this.allocator.AllocPtr)
                {
                    lowAddr             = this.destinationLow;
                    highAddr            = this.allocator.AllocPtr;
                    this.destinationLow = highAddr;
                }
                else if (!this.HaveWork)
                {
                    // No more work to do
                    break;
                }
                else
                {
                    lowAddr = this.GetWork(out highAddr);
                }
                globalRepeat    = true;
                this.sourceHigh = highAddr;
                lowAddr        += PreHeader.Size;
                lowAddr         =
                    BumpAllocator.SkipNonObjectData(lowAddr, highAddr);
                while (lowAddr < this.sourceHigh)
                {
                    Object obj = Magic.fromAddress(lowAddr);
                    lowAddr += visitor.VisitReferenceFields(obj);
                    lowAddr  =
                        BumpAllocator.SkipNonObjectData(lowAddr, highAddr);
                }
                if (lowAddr < highAddr)
                {
                    // The scanning must have been aborted early due to
                    // overflow into a new page.  Put the rest of the
                    // range on the work list.
                    this.AddWork(lowAddr - PreHeader.Size, highAddr);
                }
            }
            return(globalRepeat);
        }
Пример #12
0
        public static unsafe void Initialize()
        {
            maxEntries = 1 << 16;
            VTable UIntPtrArrayVtable =
                ((RuntimeType)typeof(UIntPtr[])).classVtable;

            tableSize =
                ObjectLayout.ArraySize(UIntPtrArrayVtable, maxEntries);

            // Allocate a pool for ZCT
            BumpAllocator entryPool = new BumpAllocator(PageType.NonGC);
            UIntPtr       memStart  = MemoryManager.AllocateMemory(tableSize);

            entryPool.SetZeroedRange(memStart, tableSize);
            PageManager.SetStaticDataPages(memStart, tableSize);

            // Initialize ZCT
            zeroCountTable = (UIntPtr[])
                             DeferredReferenceCountingCollector.
                             AllocateArray(ref entryPool,
                                           UIntPtrArrayVtable,
                                           tableSize);
            VTable.Assert(zeroCountTable != null,
                          @"zeroCountTable != null");

            *(uint *)(Magic.addressOf(zeroCountTable) + PostHeader.Size) =
                maxEntries;
            VTable.Assert(zeroCountTable.Length == maxEntries,
                          @"zeroCountTable.Length == maxEntries");

            // Build ZCT freeEntries list
            freeHead = 1;
            for (uint i = 1; i < maxEntries - 1; i++)
            {
                zeroCountTable[i] = (UIntPtr)(((i + 1) << 2) | 0x01);
            }
            zeroCountTable[maxEntries - 1] = (UIntPtr)0x01;

            zctGarbagePicker =
                (ZCTGarbagePicker)BootstrapMemory.
                Allocate(typeof(ZCTGarbagePicker));
        }
Пример #13
0
        internal override bool Scan(NonNullReferenceVisitor visitor)
        {
            bool localRepeat  = false;
            bool globalRepeat = false;

            while (true)
            {
                while (this.HaveWork)
                {
                    localRepeat = true;
                    UIntPtr lowAddr, highAddr;
                    lowAddr  = this.GetWork(out highAddr);
                    lowAddr += PreHeader.Size;
                    lowAddr  =
                        BumpAllocator.SkipNonObjectData(lowAddr, highAddr);
                    while (lowAddr < highAddr)
                    {
                        Object obj = Magic.fromAddress(lowAddr);
                        lowAddr += visitor.VisitReferenceFields(obj);
                        lowAddr  =
                            BumpAllocator.SkipNonObjectData(lowAddr, highAddr);
                    }
                }
                if (this.destinationLow != this.allocator.AllocPtr)
                {
                    localRepeat = true;
                    UIntPtr lowAddr  = this.destinationLow;
                    UIntPtr highAddr = this.allocator.AllocPtr;
                    this.destinationLow = highAddr;
                    this.AddWork(lowAddr, highAddr);
                }
                if (!localRepeat)
                {
                    // Exit the loop if we have done nothing this time around
                    break;
                }
                globalRepeat = true;
                localRepeat  = false;
            }
            return(globalRepeat);
        }
Пример #14
0
        /*
         * Returns a pointer to the first object on the given page.
         * N.B. If called on a page with no ~allocated~ first object it may
         * return a pointer to the unused space token.
         */
        internal static UIntPtr First(UIntPtr page)
        {
            uint    offset   = PageTable.Extra(page);
            UIntPtr pageAddr = PageTable.PageAddr(page);
            UIntPtr currAddr;

            if (offset != OFFSET_NO_DATA)
            {
                currAddr = pageAddr + (offset - OFFSET_SKEW);
            }
            else
            {
                currAddr = Before(pageAddr);
                VTable.Assert(currAddr <= pageAddr);
                UIntPtr nextPageStart = PageTable.PagePad(currAddr + 1);
                while (currAddr < pageAddr)
                {
                    if (Allocator.IsAlignment(currAddr))
                    {
                        currAddr += UIntPtr.Size;
                    }
                    else if (BumpAllocator.IsUnusedSpace(currAddr))
                    {
                        currAddr = PageTable.PagePad(currAddr) + PreHeader.Size;
                    }
                    else
                    {
                        if (currAddr >= nextPageStart)
                        {
                            InteriorPtrTable.SetFirst(currAddr);
                            nextPageStart = PageTable.PagePad(currAddr + 1);
                        }
                        currAddr += ObjectSize(currAddr);
                    }
                }
            }
            currAddr = Allocator.SkipAlignment(currAddr);
            return(currAddr);
        }
Пример #15
0
        void VisitObjects(ObjectLayout.ObjectVisitor objectVisitor,
                          UIntPtr lowAddr, UIntPtr highAddr)
        {
            UIntPtr oldAddr    = UIntPtr.Zero;
            UIntPtr objectAddr = lowAddr + PreHeader.Size;

            objectAddr = BumpAllocator.SkipNonObjectData(objectAddr, highAddr);
            while (objectAddr < highAddr)
            {
                if (PageTable.Page(objectAddr) != PageTable.Page(oldAddr))
                {
                    InteriorPtrTable.VerifyFirst(oldAddr, objectAddr);
                }
                oldAddr = objectAddr;
                Object  obj        = Magic.fromAddress(objectAddr);
                UIntPtr objectSize = objectVisitor.Visit(obj);
                objectAddr += objectSize;
                objectAddr  =
                    BumpAllocator.SkipNonObjectData(objectAddr, highAddr);
            }
            VTable.Assert(objectAddr - PreHeader.Size <= highAddr);
        }
Пример #16
0
        // Interface with the compiler!

        internal static unsafe UIntPtr AllocateBig(UIntPtr numBytes,
                                                   uint alignment,
                                                   Thread currentThread)
        {
            // Pretenure Trigger
            pretenuredSinceLastFullGC += numBytes;
            if (pretenuredSinceLastFullGC > PretenureHardGCTrigger)
            {
                GC.InvokeMajorCollection(currentThread);
            }

            // Potentially Join a collection
            GC.CheckForNeededGCWork(currentThread);
            int     maxAlignmentOverhead = unchecked ((int)alignment) - UIntPtr.Size;
            UIntPtr pageCount            =
                PageTable.PageCount(numBytes + maxAlignmentOverhead);
            bool    fCleanPages = true;
            UIntPtr page        = PageManager.EnsurePages(currentThread, pageCount,
                                                          largeObjectGeneration,
                                                          ref fCleanPages);
            int unusedBytes =
                unchecked ((int)(PageTable.RegionSize(pageCount) - numBytes));
            int unusedCacheLines =
                unchecked ((int)(unusedBytes - maxAlignmentOverhead)) >> 5;
            int pageOffset = 0;

            if (unusedCacheLines != 0)
            {
                pageOffset = (bigOffset % unusedCacheLines) << 5;
                bigOffset++;
            }
            UIntPtr pageStart = PageTable.PageAddr(page);

            for (int i = 0; i < pageOffset; i += UIntPtr.Size)
            {
                Allocator.WriteAlignment(pageStart + i);
            }
            UIntPtr unalignedStartAddr = pageStart + pageOffset;
            UIntPtr startAddr          =
                Allocator.AlignedAllocationPtr(unalignedStartAddr,
                                               pageStart + unusedBytes,
                                               alignment);

            pageOffset +=
                unchecked ((int)(uint)(startAddr - unalignedStartAddr));
            if (pageOffset < unusedBytes)
            {
                BumpAllocator.WriteUnusedMarker(pageStart + pageOffset + numBytes);
            }
            UIntPtr resultAddr = startAddr + PreHeader.Size;

            InteriorPtrTable.SetFirst(resultAddr);
            VTable.Assert(PageTable.Page(resultAddr) <
                          PageTable.Page(startAddr + numBytes - 1),
                          "Big object should cross pages");
            if (GC.remsetType == RemSetType.Cards)
            {
#if DONT_RECORD_OBJALLOC_IN_OFFSETTABLE
#else
                OffsetTable.SetLast(resultAddr);
#endif
            }
            return(resultAddr);
        }
Пример #17
0
 internal override void NewThreadNotification(Thread newThread,
                                              bool initial)
 {
     base.NewThreadNotification(newThread, initial);
     BumpAllocator.NewThreadNotification(newThread, nurseryGeneration);
 }
Пример #18
0
            private static void CleanPageTail(UIntPtr postPinnedAddr)
            {
                if (!PageTable.PageAligned(postPinnedAddr))
                {
                    // If postPinnedAddr points to the first object on its page,
                    // then we are removing all objects (specifically the part
                    // of the object that the InteriorPtrTable tracks, the
                    // vtables) from the page, so we should clear the page's
                    // entry in the InteriorPtrTable.

                    UIntPtr page        = PageTable.Page(postPinnedAddr);
                    UIntPtr firstObjPtr = InteriorPtrTable.First(page);
                    if (firstObjPtr > postPinnedAddr)
                    {
                        VTable.Assert
                            (firstObjPtr - PreHeader.Size >= postPinnedAddr,
                            "postPinnedAddr should not point to the "
                            + "interior of an object (1)");
                        InteriorPtrTable.ClearFirst(page);
                    }
                    else if (!BumpAllocator.IsUnusedSpace(firstObjPtr))
                    {
                        UIntPtr firstObjSize =
                            InteriorPtrTable.ObjectSize(firstObjPtr);
                        VTable.Assert
                            (firstObjPtr + firstObjSize - PreHeader.Size
                            <= postPinnedAddr,
                            "postPinnedAddr should not point to the "
                            + "interior of an object (2)");
                    }

                    UIntPtr byteCount = PageTable.PagePad(postPinnedAddr)
                                        - postPinnedAddr;
                    Util.MemClear(postPinnedAddr, byteCount);
                    BumpAllocator.WriteUnusedMarker(postPinnedAddr);

                    if (GC.remsetType == RemSetType.Cards && byteCount > 0)
                    {
                        UIntPtr firstCard = CardTable.CardNo(postPinnedAddr);
                        UIntPtr lastCard  =
                            CardTable.CardNo(postPinnedAddr + byteCount - 1);

                        if (!OffsetTable.NoObjectPtrToTheCard(firstCard))
                        {
                            UIntPtr offset = OffsetTable.GetOffset(firstCard);
                            UIntPtr objPtr =
                                CardTable.CardAddr(firstCard) + offset;
                            UIntPtr size = OffsetTable.ObjectSize(objPtr);

                            VTable.Assert
                                ((objPtr + size - PreHeader.Size
                                  <= postPinnedAddr) ||
                                (objPtr >= postPinnedAddr),
                                "Object should be totally "
                                + "above or below postPinnedAddr");
                            if (objPtr >= postPinnedAddr)
                            {
                                OffsetTable.ClearCards(firstCard, firstCard);
                            }
                        }

                        OffsetTable.ClearCards(firstCard + 1, lastCard);
                    }
                }
            }
Пример #19
0
        // Reference updates and object relocation

        private unsafe UIntPtr ForwardReferences(PageType generation,
                                                 out UIntPtr oldAllocPtr)
        {
            VTable.Assert(IsValidGeneration((int)generation));

            UIntPtr  destPage = UIntPtr.Zero;
            UIntPtr  destCursor;
            UIntPtr  destLimit;
            PageType destGeneration;

            if (generation < MAX_GENERATION)
            {
                destGeneration = generation + 1;
            }
            else
            {
                destGeneration = MAX_GENERATION;
            }
            destCursor  = UIntPtr.Zero;
            destLimit   = UIntPtr.Zero;
            oldAllocPtr = destCursor;
            UIntPtr runLength = UIntPtr.Zero;

            for (UIntPtr i = UIntPtr.Zero; i < PageTable.pageTableCount; i++)
            {
                if (!IsMyZombiePage(i))
                {
                    continue;
                }
                UIntPtr deltaBytes   = (UIntPtr)0x80000000;
                UIntPtr sourceCursor = PageTable.PageAddr(i);
                do
                {
                    i++;
                } while (i < PageTable.pageTableCount && IsMyZombiePage(i));
                UIntPtr sourceLimit = PageTable.PageAddr(i);
                while (true)
                {
                    if (sourceCursor >= sourceLimit)
                    {
                        break;
                    }
                    if (Allocator.IsAlignmentMarkerAddr(sourceCursor))
                    {
                        sourceCursor += UIntPtr.Size;
                        deltaBytes   += UIntPtr.Size;
                        continue;
                    }
                    if (BumpAllocator.IsUnusedMarkerAddr(sourceCursor))
                    {
                        sourceCursor += UIntPtr.Size;
                        sourceCursor  = PageTable.PagePad(sourceCursor);
                        deltaBytes    = (UIntPtr)0x80000000;
                        continue;
                    }
                    UIntPtr objectAddr     = sourceCursor + PreHeader.Size;
                    UIntPtr vtableOrMarker =
                        Allocator.GetObjectVTable(objectAddr);
                    if (vtableOrMarker == UIntPtr.Zero)
                    {
                        // We found the end of an allocation page
                        sourceCursor = PageTable.PagePad(sourceCursor);
                        deltaBytes   = (UIntPtr)0x80000000;
                        continue;
                    }
                    UIntPtr vtableAddr;
                    if ((vtableOrMarker & 1) != 0)
                    {
                        UIntPtr temp = *(UIntPtr *)(vtableOrMarker - 1);
                        while ((temp & 1) != 0)
                        {
                            temp = *(UIntPtr *)(temp - 1);
                        }
                        VTable.Assert(PageTable.IsNonGcPage(PageTable.Type(PageTable.Page(temp))));
                        vtableAddr = temp;
                        if ((temp & 2) != 0)
                        {
                            // Found pinned object
                            SkipDestinationAreas(ref destPage, destCursor,
                                                 ref destLimit,
                                                 sourceCursor);
                            deltaBytes -= (sourceCursor - destCursor);
                            destCursor  = sourceCursor;
                            vtableAddr -= 2; // Remove "pinned" bit
                        }
                        Allocator.SetObjectVTable(objectAddr, vtableAddr);
                    }
                    else
                    {
                        vtableAddr = vtableOrMarker;
                    }
                    VTable vtable =
                        Magic.toVTable(Magic.fromAddress(vtableAddr));
                    UIntPtr objectSize =
                        ObjectLayout.ObjectSize(objectAddr, vtable);
                    VTable.Assert(objectSize > 0);
                    if ((vtableOrMarker & 1) != 0)
                    {
                        if (GenerationalCollector.IsLargeObjectSize
                                (objectSize))
                        {
                            // Don't move large objects
                            SkipDestinationAreas(ref destPage,
                                                 destCursor,
                                                 ref destLimit,
                                                 sourceCursor);
                            UIntPtr localDelta =
                                sourceCursor - destCursor;
                            deltaBytes -= localDelta;
                            if (deltaBytes == UIntPtr.Zero &&
                                runLength != UIntPtr.Zero)
                            {
                                runLength += localDelta;
                            }
                            destCursor = sourceCursor;
                            UIntPtr objLimit    = sourceCursor + objectSize;
                            UIntPtr pageEndAddr = PageTable.PagePad(objLimit);
                            objectSize = (pageEndAddr - sourceCursor);
                        }
                        else if (destCursor + objectSize > destLimit)
                        {
                            UIntPtr oldDestCursor = destCursor;
                            FindDestinationArea(ref destPage,
                                                ref destCursor,
                                                ref destLimit,
                                                objectSize,
                                                destGeneration);
                            VTable.Assert(destCursor <= sourceCursor);
                            VTable.Assert(destCursor + objectSize <=
                                          destLimit);
                            deltaBytes -= (destCursor - oldDestCursor);
                        }
                        else if (vtable.baseAlignment > UIntPtr.Size)
                        {
                            uint alignmentMask = vtable.baseAlignment - 1;
                            int  offset        = PreHeader.Size + UIntPtr.Size;
                            while (((destCursor + offset) & alignmentMask) != 0)
                            {
                                destCursor += UIntPtr.Size;
                                deltaBytes -= UIntPtr.Size;
                                if (deltaBytes == UIntPtr.Zero &&
                                    runLength != UIntPtr.Zero)
                                {
                                    runLength += UIntPtr.Size;
                                }
                            }
                        }
                        if (runLength == UIntPtr.Zero ||
                            deltaBytes != UIntPtr.Zero)
                        {
                            if (runLength != UIntPtr.Zero)
                            {
                                RegisterRelocationEnd(runLength);
                            }
                            RegisterRelocationStart(sourceCursor,
                                                    destCursor);
                            deltaBytes = UIntPtr.Zero;
                            runLength  = UIntPtr.Zero;
                        }
                        UIntPtr newObjectAddr = destCursor + PreHeader.Size;
                        do
                        {
                            UIntPtr *ptrAddr = (UIntPtr *)(vtableOrMarker - 1);
                            vtableOrMarker = *ptrAddr;
                            *ptrAddr = newObjectAddr;
                        } while ((vtableOrMarker & 1) != 0);
                        destCursor += objectSize;
                        runLength  += objectSize;
                    }
                    else
                    {
                        deltaBytes += objectSize;
                        if (runLength != UIntPtr.Zero)
                        {
                            RegisterRelocationEnd(runLength);
                        }
                        runLength = UIntPtr.Zero;
                    }
                    sourceCursor += objectSize;
                }
            }
            if (runLength != UIntPtr.Zero)
            {
                RegisterRelocationEnd(runLength);
            }
            return(destCursor);
        }
Пример #20
0
 internal static UIntPtr PtrToNextObject(UIntPtr objPtr, UIntPtr size, UIntPtr limit)
 {
     return(BumpAllocator.SkipNonObjectData(objPtr + size, limit));
 }
Пример #21
0
        internal static unsafe bool AccumulateRCUpdates(String methodName,
                                                        int methodIndex,
                                                        uint maxIndex,
                                                        AcctRecord rec)
        {
            VTable.Assert(RCCollector.ProfilingMode,
                          @"RCCollector.ProfilingMode");

            // Return if the page table hasn't been set up yet.
            if (PageTable.pageTableCount == UIntPtr.Zero)
            {
                return(false);
            }

            if (methods == null)
            {
                // Allocate up front storage for the accounting records.
                //
                // This is requisitioned directly from the memory
                // manager. Care should be taken to ensure that
                // AccumulateRCUpdates does not indirectly call
                // methods that may have compiler-inserted RC updates.
                VTable vtable =
                    ((RuntimeType)typeof(AcctRecord[])).classVtable;
                UIntPtr size =
                    ObjectLayout.ArraySize(vtable, maxIndex + 1);

                BumpAllocator profileData =
                    new BumpAllocator(PageType.NonGC);
                UIntPtr profileDataStart =
                    MemoryManager.AllocateMemory(size);
                profileData.SetRange(profileDataStart, size);
                PageManager.SetStaticDataPages(profileDataStart, size);

                methods =
                    (AcctRecord[])Allocate(ref profileData, vtable, size);
                VTable.Assert(methods != null,
                              @"methods != null");

                *(uint *)(Magic.addressOf(methods) +
                          PostHeader.Size) = maxIndex + 1;
            }

            VTable.Assert(methods.Length == maxIndex + 1,
                          @"methods.Length == maxIndex+1");

            if (methods[methodIndex].methodName == null)
            {
                methodNames[methodIndex].methodName = methodName;
            }
            // Not "methodNames[methodIndex].methodName == methodName"
            // because the Equality operator carries compiler-inserted
            // RC updates!
            VTable.Assert(Magic.addressOf(methodNames[methodIndex].
                                          methodName) ==
                          Magic.addressOf(methodName),
                          @"Magic.addressOf(methodNames[methodIndex].
                                          methodName) ==
                        Magic.addressOf(methodName)");

            methods[methodIndex] += rec;

            return(true);
        }
Пример #22
0
 internal override void NewThreadNotification(Thread newThread,
                                              bool initial)
 {
     base.NewThreadNotification(newThread, initial);
     BumpAllocator.NewThreadNotification(newThread, PageType.Owner0);
 }
Пример #23
0
        private static UIntPtr FirstPtrFromInteriorTable(UIntPtr c)
        {
            UIntPtr cardAddr     = CardTable.CardAddr(c);
            UIntPtr nextCardAddr = CardTable.NextCardAddr(c);
            UIntPtr page         = PageTable.Page(cardAddr);
            UIntPtr pageAddr     = PageTable.PageAddr(page);
            UIntPtr currAddr;

            if (page == 0)
            {
                currAddr = PtrToNextObject(pageAddr,
                                           (UIntPtr)PreHeader.Size, nextCardAddr);
            }
            else
            {
                short offset = PageTable.Extra(page);
                currAddr = UIntPtr.Zero;
                if (offset != InteriorPtrTable.OFFSET_NO_DATA)
                {
                    currAddr = pageAddr + (offset - InteriorPtrTable.OFFSET_SKEW);
                }

                // In general, we expect currAddr <= cardAddr. Or in the extreme
                // case, when the object starts from the page boundary,
                // currAddr - Object.HEADER_BYTES <= cardAddr. The contrary
                // cases has to be handled by searching previous pages.

                if (currAddr == UIntPtr.Zero ||
                    (currAddr > cardAddr &&
                     currAddr - PreHeader.Size > cardAddr))
                {
                    // look from previous pages, in case that an object on
                    // them spans to the current page. In that case, we should
                    // should use that object's ptr.

                    currAddr = InteriorPtrTable.Last(page - 1);

                    // Usually, Last() returns a pointer before or at the page
                    // boundary. However, there is one exception: when an object
                    // exactly fits to the last byte of the previous page, and the next
                    // object starts right from the page boundary (the first byte of
                    // the next page), then the pointer to this next object is returned.
                    // Example found: objPtr =3d09fa8, size=60, pageboundary=
                    // 3d0a000, next objPtr=3d0a008. Then returned pointer is
                    // 3d0a008, which is beyond the page boundary.

                    VTable.Assert(currAddr <= pageAddr ||
                                  currAddr - PreHeader.Size <= pageAddr,
                                  "object is expected before page or right at the beginning of it");
                }
            }
            VTable.Assert(currAddr < nextCardAddr, "object is expected before next card");

            while (currAddr < nextCardAddr)
            {
                if (Allocator.IsAlignment(currAddr))
                {
                    currAddr += UIntPtr.Size;
                }
                else if (BumpAllocator.IsUnusedSpace(currAddr))
                {
                    currAddr = PageTable.PagePad(currAddr) + PreHeader.Size;
                }
                else
                {
                    UIntPtr size = InteriorPtrTable.ObjectSize(currAddr);
                    if (currAddr + size - PreHeader.Size > cardAddr)
                    {
                        return(currAddr);
                    }
                    currAddr += size;
                }
            }
            VTable.Assert(false, "No obj ptr found by looking at interior table");
            return(UIntPtr.Zero);
        }
Пример #24
0
 internal virtual void Initialize(PageType pageType)
 {
     this.pageType  = pageType;
     this.allocator = new BumpAllocator(pageType);
 }
Пример #25
0
 internal override UIntPtr AllocateObjectMemory(UIntPtr numBytes,
                                                uint alignment,
                                                Thread currentThread)
 {
     return(BumpAllocator.Allocate(currentThread, numBytes, alignment));
 }
Пример #26
0
        /*
         * Returns a pointer past the last object _that_fits_completely_ on
         * the given page.  Note that the "last" object on a page may
         * actually start on a previous page.
         */
        internal static UIntPtr Last(UIntPtr page)
        {
            UIntPtr currAddr = InteriorPtrTable.First(page);
            UIntPtr endAddr  = PageTable.PageAddr(page + 1);

            // Look out for the unused space token: this page may not
            // have been completely allocated: its "first" object might not
            // be valid.
            if (BumpAllocator.IsUnusedSpace(currAddr) || currAddr >= endAddr)
            {
                // Back up to the previous object.  Should be fast
                // since First updated the InteriorPtrTable entries.
                currAddr = Before(PageTable.PageAddr(page));
            }
            // REVIEW this is very similar to Find(addr) below.
            VTable.Assert(currAddr <= endAddr);
            while (true)
            {
                // Watch out for alignment padding; advance the pointer if
                // it points to a syncblock index rather than a vtable
                // pointer.  Note that we must do this before scrolling,
                // since the page table value was set before we knew the
                // required alignment.
                if (Allocator.IsAlignment(currAddr))
                {
                    currAddr += UIntPtr.Size;
                }
                else if (BumpAllocator.IsUnusedSpace(currAddr))
                {
                    UIntPtr nextAddr =
                        PageTable.PagePad(currAddr) + PreHeader.Size;
                    if (nextAddr >= endAddr)
                    {
                        return(currAddr);
                    }
                    else
                    {
                        currAddr = nextAddr;
                    }
                }
                else
                {
                    VTable.Assert(currAddr <= endAddr);
                    UIntPtr size     = ObjectSize(currAddr);
                    UIntPtr postAddr = currAddr + size;
                    if (postAddr > endAddr)
                    {
                        if (postAddr - PreHeader.Size > endAddr)
                        {
                            // The object spills over onto the next page
                            return(currAddr);
                        }
                        else
                        {
                            // The object ended at or before the page boundary
                            return(postAddr);
                        }
                    }
                    else
                    {
                        currAddr = postAddr;
                    }
                }
            }
        }