internal override void DestructHeap() { PrintGCTiming(); PrintAllocations(); CollectorStatistics.Event(GCEvent.DestroyHeap); CollectorStatistics.Summary(); }
internal static int ScanStacks(NonNullReferenceVisitor VisitThreadReference, NonNullReferenceVisitor VisitPinnedReference) { int limit = Thread.threadTable.Length; int countThreads = 0; for (int i = 0; i < limit; i++) { Thread t = Thread.threadTable[i]; if (t != null) { CollectorStatistics.Event(GCEvent.StackScanStart, i); ScanStack(t, VisitThreadReference, VisitPinnedReference); CollectorStatistics.Event(GCEvent.StackScanComplete, i); countThreads++; } } return countThreads; }
internal static void AllThreadRendezvous(int currentThreadIndex) { Transitions.MakeGCRequests(currentThreadIndex); for (int i = 0; i < Thread.threadTable.Length; i++) { if (Thread.threadTable[i] == null || i == currentThreadIndex) { continue; } CollectorStatistics.Event(GCEvent.StopThread, i); while (!Transitions.TakeGCControl(i) && !Transitions.UnderGCControl(i) && Transitions.HasGCRequest(i) && Thread.threadTable[i] != null) { // NOTE: there is no code in this loop that could // cause a signal on an event to be consumed. Thread.WaitForGCEvent(currentThreadIndex); } } }
internal override void CollectStopped(int currentThreadIndex, int generation) { #if SINGULARITY #if DEBUG #if THREAD_TIME_ACCOUNTING UIntPtr preGcTotalBytes = SegregatedFreeList.TotalBytes; #endif DebugStub.WriteLine("~~~~~ Start MarkSweep Cleanup [data={0:x8}, pid={1:x3}]", __arglist(SegregatedFreeList.TotalBytes, PageTable.processTag >> 16)); #endif #if SINGULARITY_KERNEL #if THREAD_TIME_ACCOUNTING TimeSpan ticks = Thread.CurrentThread.ExecutionTime; TimeSpan ticks2 = SystemClock.KernelUpTime; #else TimeSpan ticks = SystemClock.KernelUpTime; #endif #elif SINGULARITY_PROCESS #if THREAD_TIME_ACCOUNTING TimeSpan ticks = ProcessService.GetThreadTime(); TimeSpan ticks2 = ProcessService.GetUpTime(); #else TimeSpan ticks = ProcessService.GetUpTime(); #endif #endif #endif int before = 0; if (VTable.enableGCTiming) { before = Environment.TickCount; } if (GC.IsProfiling) { GcProfiler.NotifyPreGC(MinGeneration); // non-generational collector, so pretend Gen0 // Calls like ResurrectCandidates below can cause // allocations and thus, potentially, profiler // notifications. However, at that time the heap is // damaged in the sense that VPtrs have bits OR-ed in // for object marking. We do not want to accept // profiling during this window. // // There is no synchronization issue with setting this // flag because it will only be consulted by the // thread that sets and resets it. HeapDamaged = true; } // 1) Mark the live objects CollectorStatistics.Event(GCEvent.TraceStart); #if !VC TryAllManager.PreGCHookTryAll(); #endif MultiUseWord.PreGCHook(false /* don't use shadows */); Finalizer.PrepareCollectFinalizers(); int countThreads = CallStack.ScanStacks(threadMarkReferenceVisitor, threadMarkReferenceVisitor); Thread.VisitBootstrapData(markAndProcessReferenceVisitor); #if SINGULARITY_KERNEL Kernel.VisitSpecialData(markAndProcessReferenceVisitor); #endif MultiUseWord.VisitStrongRefs(markAndProcessReferenceVisitor, false /* Don't use shadows */); #if !VC TryAllManager.VisitStrongRefs(markAndProcessReferenceVisitor); #endif StaticData.ScanStaticData(markAndProcessReferenceVisitor); CollectorStatistics.Event(GCEvent.TraceSpecial); WeakReference.Process(updateReferenceVisitor, true, true); Finalizer.ResurrectCandidates(updateReferenceVisitor, markAndProcessReferenceVisitor, true); markReferenceVisitor.Cleanup(); UnmanagedPageList.ReleaseStandbyPages(); // 2) Sweep the garbage objects int afterTrace = 0; if (VTable.enableGCTiming) { afterTrace = Environment.TickCount; } CollectorStatistics.Event(GCEvent.SweepStart, TotalMemory); WeakReference.Process(updateReferenceVisitor, true, false); MultiUseWord.VisitWeakRefs(updateReferenceVisitor, false /* Don't use shadows */); #if !VC TryAllManager.VisitWeakRefs(updateReferenceVisitor); #endif SegregatedFreeList.VisitAllObjects(sweepVisitor); SegregatedFreeList.RecycleGlobalPages(); SegregatedFreeList.CommitFreedData(); CollectorStatistics.Event(GCEvent.SweepSpecial); MultiUseWord.PostGCHook(); if (GC.IsProfiling) { HeapDamaged = false; // Allocations may occur inside the PostGCHook. Hopefully a // sufficiently limited quantity that we don't recursively // trigger a GC. GcProfiler.NotifyPostGC(ProfileRoots, ProfileObjects); } Finalizer.ReleaseCollectFinalizers(); #if !VC TryAllManager.PostGCHookTryAll(); #endif CollectorStatistics.Event(GCEvent.CollectionComplete, TotalMemory); if (VTable.enableGCTiming) { int after = Environment.TickCount; numCollections++; traceTime += (afterTrace - before); sweepTime += (after - afterTrace); } // 3) Determine a new collection trigger UIntPtr testTrigger = (UIntPtr)this.TotalMemory >> 2; UIntPtr minTrigger = (UIntPtr)MinTrigger; UIntPtr maxTrigger = (UIntPtr)MaxTrigger; collectionTrigger = (testTrigger > minTrigger) ? (testTrigger < maxTrigger ? testTrigger : maxTrigger) : minTrigger; #if SINGULARITY #if SINGULARITY_KERNEL #if THREAD_TIME_ACCOUNTING int procId = Thread.CurrentProcess.ProcessId; ticks = Thread.CurrentThread.ExecutionTime - ticks; ticks2 = SystemClock.KernelUpTime - ticks2; #else ticks = SystemClock.KernelUpTime - ticks; #endif //Thread.CurrentProcess.SetGcPerformanceCounters(ticks, (long) SegregatedFreeList.TotalBytes); #elif SINGULARITY_PROCESS #if THREAD_TIME_ACCOUNTING ushort procId = ProcessService.GetCurrentProcessId(); ticks = ProcessService.GetThreadTime() - ticks; ticks2 = ProcessService.GetUpTime() - ticks2; #else ticks = ProcessService.GetUpTime() - ticks; #endif //ProcessService.SetGcPerformanceCounters(ticks, (long) SegregatedFreeList.TotalBytes); #endif #if DEBUG #if THREAD_TIME_ACCOUNTING DebugStub.WriteLine("~~~~~ Finish MarkSweep Cleanup [data={0:x8}, diff={7:x8} pid={1:x3}, ms(Thread)={2:d6}, ms(System)={3:d6}, thds={4}, procId={5}, tid={6}]", __arglist(SegregatedFreeList.TotalBytes, PageTable.processTag >> 16, ticks.Milliseconds, ticks2.Milliseconds, countThreads, procId, Thread.GetCurrentThreadIndex(), preGcTotalBytes - SegregatedFreeList.TotalBytes )); #else DebugStub.WriteLine("~~~~~ Finish MarkSweep Cleanup [data={0:x8}, pid={1:x3}, ms={2:d6}, thds={3}]", __arglist(SegregatedFreeList.TotalBytes, PageTable.processTag >> 16, ticks.Milliseconds, countThreads)); #endif #endif #endif }
private void PerformCollection(int currentThreadIndex, int generation) { // Clear the GCRequest bit (if necessary) before doing // anything that could cause a state transition. if (Transitions.HasGCRequest(currentThreadIndex)) { Transitions.ClearGCRequest(currentThreadIndex); } int startTicks = 0; bool enableGCTiming = VTable.enableGCTiming; if (enableGCTiming || VTable.enableFinalGCTiming) { VTable.enableGCTiming = false; startTicks = Environment.TickCount; if (enableGCTiming) { VTable.DebugPrint("[GC start: {0} bytes]\n", __arglist(TotalMemory)); } } #if SINGULARITY Tracing.Log(Tracing.Debug, "GC start"); #endif CollectorStatistics.Event(GCEvent.StopTheWorld); CurrentPhase = StopTheWorldPhase.Synchronizing; StopTheWorld(); CurrentPhase = StopTheWorldPhase.SingleThreaded; StartGCCycle(); #if SINGULARITY long preGcMemoryUsage = GC.GetTotalMemory(false); #if SINGULARITY_KERNEL #if THREAD_TIME_ACCOUNTING TimeSpan ticks = Thread.CurrentThread.ExecutionTime; TimeSpan ticks2 = SystemClock.KernelUpTime; #else TimeSpan ticks = SystemClock.KernelUpTime; #endif #elif SINGULARITY_PROCESS #if THREAD_TIME_ACCOUNTING TimeSpan ticks = ProcessService.GetThreadTime(); TimeSpan ticks2 = ProcessService.GetUpTime(); #else TimeSpan ticks = ProcessService.GetUpTime(); #endif #endif #endif //singularity #if SINGULARITY_KERNEL bool iflag = Processor.DisableInterrupts(); // Disable interrupts on other CPU's MpExecution.StopProcessorsForGC(); #endif #if SINGULARITY ulong beg = Isa.GetCycleCount(); #endif // Preparation GC.allocationGCInhibitCount++; // Verify the heap before GC if (VTable.enableGCVerify) { this.VerifyHeap(true); } // Invoke the chosen collector #if SINGULARITY Monitoring.Log(Monitoring.Provider.GC, (ushort)GarbageCollectorEvent.StartCollection); #endif this.CollectStopped(collectorThreadIndex, generation); #if SINGULARITY Monitoring.Log(Monitoring.Provider.GC, (ushort)GarbageCollectorEvent.EndCollection); #endif // Verify the heap after GC if (VTable.enableGCVerify) { this.VerifyHeap(false); } if (VTable.enableGCAccounting) { MemoryAccounting.Report(GC.gcType); } // Cleanup CollectorStatistics.Event(GCEvent.ResumeTheWorld); GC.allocationGCInhibitCount--; CurrentPhase = StopTheWorldPhase.Idle; #if SINGULARITY long postGcMemoryUsage = GC.GetTotalMemory(false); #endif if (enableGCTiming || VTable.enableFinalGCTiming) { int elapsedTicks = Environment.TickCount - startTicks; BaseCollector.RegisterPause(elapsedTicks); if (enableGCTiming) { VTable.DebugPrint("[GC end : {0} bytes, {1} ms]\n", __arglist(TotalMemory, elapsedTicks)); VTable.enableGCTiming = true; } } if (VTable.enableGCProfiling) { ulong totalMemory = (ulong)GC.GetTotalMemory(false); this.RegisterHeapSize(totalMemory); } ResumeTheWorld(); collectorThreadIndex = -1; #if SINGULARITY Tracing.Log(Tracing.Debug, "GC stop"); long pagesCollected = preGcMemoryUsage - postGcMemoryUsage; #if SINGULARITY_KERNEL #if THREAD_TIME_ACCOUNTING int procId = Thread.CurrentProcess.ProcessId; ticks = Thread.CurrentThread.ExecutionTime - ticks; ticks2 = SystemClock.KernelUpTime - ticks2; Process.kernelProcess.SetGcPerformanceCounters(ticks, (long)pagesCollected); #else ticks = SystemClock.KernelUpTime - ticks; #endif Thread.CurrentProcess.SetGcPerformanceCounters(ticks, (long)pagesCollected); #elif SINGULARITY_PROCESS #if THREAD_TIME_ACCOUNTING ushort procId = ProcessService.GetCurrentProcessId(); ticks = ProcessService.GetThreadTime() - ticks; ticks2 = ProcessService.GetUpTime() - ticks2; #else ticks = ProcessService.GetUpTime() - ticks; #endif ProcessService.SetGcPerformanceCounters(ticks, (long)pagesCollected); #endif #if DEBUG #if THREAD_TIME_ACCOUNTING DebugStub.WriteLine("~~~~~ StopTheWorld [collected pages={0:x8}, pid={1:x3}, ms(Thread)={2:d6}, ms(System)={3:d6}, procId={4}, tid={5}]", __arglist(pagesCollected, PageTable.processTag >> 16, ticks.Milliseconds, ticks2.Milliseconds, procId, Thread.GetCurrentThreadIndex() )); #endif #endif #endif #if SINGULARITY DebugStub.AddToPerfCounter(GC.perfCounter, Isa.GetCycleCount() - beg); #endif #if SINGULARITY_KERNEL // Resume interrupts on other CPU's MpExecution.ResumeProcessorsAfterGC(); Processor.RestoreInterrupts(iflag); #endif }
internal override void CollectGeneration(int generation, UIntPtr generationPageCount) { VTable.Assert(IsValidGeneration(generation)); // 1) Mark the live objects CollectorStatistics.Event(GCEvent.TraceStart); MultiUseWord.PreGCHook(true /* use shadows */); Finalizer.PrepareCollectFinalizers(); CallStack.ScanStacks(registerThreadReferenceVisitor, registerPinnedReferenceVisitor); registerThreadReferenceVisitor.ForwardReferences(); if (generation < (int)MAX_GENERATION) { // These calls must be done early, as they rely on the // contents of Thread.threadTable being intact. installedRemSet.Clean(); installedRemSet.Uniquify(); this.ScanRemSet((PageType)generation); } // Process runtime data that is allocated from SystemMemory Thread.VisitBootstrapData(markReferenceVisitor); #if SINGULARITY_KERNEL Kernel.VisitSpecialData(markReferenceVisitor); #endif MultiUseWord.VisitStrongRefs(markReferenceVisitor, true /* use shadows */); StaticData.ScanStaticData(markReferenceVisitor); CollectorStatistics.Event(GCEvent.TraceSpecial); WeakReference.Process(forwardReferenceVisitor, false, true); Finalizer.ResurrectCandidates(forwardReferenceVisitor, markReferenceVisitor, false); markReferenceVisitor.Cleanup(); // 2) Forward pointers and compact the live objects CollectorStatistics.Event(GCEvent.SweepStart, TotalMemory); WeakReference.Process(forwardReferenceVisitor, false, false); MultiUseWord.VisitWeakRefs(forwardReferenceVisitor, true /* use shadows */); UIntPtr oldAllocPtr; UIntPtr newLimit = this.ForwardReferences((PageType)generation, out oldAllocPtr); this.CompactHeapObjects(oldAllocPtr); #if SINGULARITY Thread.UpdateAfterGC(); #endif Thread currentThread = Thread.threadTable[collectorThreadIndex]; #if SINGULARITY_KERNEL Kernel.UpdateAfterGC(currentThread); #endif CallStack.ScanStacks(updateThreadReferenceVisitor, updateThreadReferenceVisitor); this.CompactPhaseCleanup(currentThread, (PageType)generation, newLimit); // Resetting the GC state CollectorStatistics.Event(GCEvent.SweepSpecial); installedRemSet.Reset(); MultiUseWord.PostGCHook(); Finalizer.ReleaseCollectFinalizers(); CollectorStatistics.Event(GCEvent.CollectionComplete, TotalMemory); }
internal override void CollectGeneration(int generation, UIntPtr generationPageCount) { UIntPtr fromSpacePageCountTotal = UIntPtr.Zero; for (int i = MinGeneration; i <= generation; i++) { fromSpacePageCountTotal += fromSpacePageCounts[i]; } PageType maxDestGeneration = (generation < (int)MAX_GENERATION) ? (PageType)(generation + 1) : MAX_GENERATION; // The real work: find the roots, copy reachable objects CollectorStatistics.Event(GCEvent.ComputeRootSet); #if !VC TryAllManager.PreGCHookTryAll(); #endif bool onlyNew = false; if (generation == (int)nurseryGeneration) { onlyNew = true; } MultiUseWord.PreGCHook(false, /* don't use shadows */ onlyNew /* only scan new EMUs */); Finalizer.PrepareCollectFinalizers(); CallStack.ScanStacks(null, pinnedReferenceVisitor); pinnedReferenceVisitor.ProcessPinnedPages(generalReferenceVisitor); CallStack.ScanStacks(threadReferenceVisitor, null); Thread.VisitBootstrapData(generalReferenceVisitor); #if SINGULARITY Thread.UpdateAfterGC(); #endif Thread currentThread = Thread.threadTable[collectorThreadIndex]; #if SINGULARITY_KERNEL Kernel.VisitSpecialData(generalReferenceVisitor); Kernel.UpdateAfterGC(currentThread); #endif MultiUseWord.VisitStrongRefs(generalReferenceVisitor, false /* Don't use shadows */); #if !VC TryAllManager.VisitStrongRefs(generalReferenceVisitor); #endif StaticData.ScanStaticData(generalReferenceVisitor); if ((PageType)generation < MAX_GENERATION) { installedRemSet.Clean(); this.ScanRemSet((PageType)generation); } installedRemSet.Reset(); CollectorStatistics.Event(GCEvent.TraceStart); this.ScanAndCopy(maxDestGeneration); CollectorStatistics.Event(GCEvent.TraceSpecial); WeakReference.Process(forwardOnlyReferenceVisitor, true, true); Finalizer.ResurrectCandidates(forwardOnlyReferenceVisitor, generalReferenceVisitor, true); this.ScanAndCopy(maxDestGeneration); WeakReference.Process(forwardOnlyReferenceVisitor, true, false); MultiUseWord.VisitWeakRefs(forwardOnlyReferenceVisitor, false /* Don't use shadows */); #if !VC TryAllManager.VisitWeakRefs(forwardOnlyReferenceVisitor); #endif CollectorStatistics.Event(GCEvent.CollectionComplete); // Clean up the auxiliary data structures for (int i = MinGeneration + 1; i <= (int)maxDestGeneration; i++) { copyScanners[i].Cleanup(); } UnmanagedPageList.ReleaseStandbyPages(); pinnedReferenceVisitor.CleanPinnedPages(); MultiUseWord.PostGCHook(onlyNew); Finalizer.ReleaseCollectFinalizers(); #if !VC TryAllManager.PostGCHookTryAll(); #endif }