public static MachineResources GetMachineResources() { var currentProcess = Process.GetCurrentProcess(); var workingSet = PlatformDetails.RunningOnPosix == false || PlatformDetails.RunningOnMacOsx ? currentProcess.WorkingSet64 : MemoryInformation.GetRssMemoryUsage(currentProcess.Id); var memoryInfoResult = MemoryInformation.GetMemoryInfo(); var installedMemory = memoryInfoResult.InstalledMemory.GetValue(SizeUnit.Bytes); var availableMemory = memoryInfoResult.AvailableMemory.GetValue(SizeUnit.Bytes); var mappedSharedMem = LowMemoryNotification.GetCurrentProcessMemoryMappedShared(); var shared = mappedSharedMem.GetValue(SizeUnit.Bytes); var cpuInfo = CpuUsage.Calculate(); var machineResources = new MachineResources { TotalMemory = installedMemory, MachineMemoryUsage = installedMemory - availableMemory, ProcessMemoryUsage = workingSet, ProcessMemoryExcludingSharedUsage = Math.Max(workingSet - shared, 0), MachineCpuUsage = cpuInfo.MachineCpuUsage, ProcessCpuUsage = cpuInfo.ProcessCpuUsage }; return(machineResources); }
public static IDisposable GetProcessMemoryUsage(out ProcessMemoryUsage currentUsage, out Size mappedSharedMem) { var currentProcess = Process.GetCurrentProcess(); // a lot of the memory that we use is actually from memory mapped files, as such, we can // rely on the OS to page it out (without needing to write, since it is read only in this case) // so we try to calculate how much such memory we can use with this assumption mappedSharedMem = LowMemoryNotification.GetCurrentProcessMemoryMappedShared(); currentUsage = new ProcessMemoryUsage(currentProcess.WorkingSet64, Math.Max(0, currentProcess.WorkingSet64 - mappedSharedMem.GetValue(SizeUnit.Bytes))); return(currentProcess); }
private void GrowArena(int requestedSize) { if (_lowMemoryFlag) { throw new LowMemoryException($"Request to grow the arena by {requestedSize} because we are under memory pressure"); } if (requestedSize >= MaxArenaSize) { throw new ArgumentOutOfRangeException(nameof(requestedSize)); } LowMemoryNotification.NotifyAllocationPending(); // we need the next allocation to cover at least the next expansion (also doubling) // so we'll allocate 3 times as much as was requested, or as much as we already have // the idea is that a single allocation can server for multiple (increasing in size) calls long newSize = Math.Max(Bits.NextPowerOf2(requestedSize) * 3, _initialSize); if (newSize > MaxArenaSize) { newSize = MaxArenaSize; } NativeMemory.ThreadStats thread; var newBuffer = NativeMemory.AllocateMemory(newSize, out thread); // Save the old buffer pointer to be released when the arena is reset if (_olderBuffers == null) { _olderBuffers = new List <Tuple <IntPtr, long, NativeMemory.ThreadStats> >(); } _olderBuffers.Add(Tuple.Create(new IntPtr(_ptrStart), _allocated, _allocatingThread)); _allocatingThread = thread; _allocated = newSize; _ptrStart = newBuffer; _ptrCurrent = _ptrStart; _used = 0; }
public static byte *AllocateMemory(long size, out ThreadStats thread) { thread = ThreadAllocations.Value; // Allocating when there isn't enough commit charge available is dangerous, on Linux, the OOM // will try to kill us. On Windows, we might get into memory allocation failures that are not // fun, so let's try to avoid it explicitly. // This is not expected to be called frequently, since we are caching the memory used here LowMemoryNotification.AssertNotAboutToRunOutOfMemory(); try { var ptr = (byte *)Marshal.AllocHGlobal((IntPtr)size).ToPointer(); thread.Allocations += size; Interlocked.Add(ref _totalAllocatedMemory, size); return(ptr); } catch (OutOfMemoryException e) { return(ThrowFailedToAllocate(size, thread, e)); } }
public static bool TryIncreasingMemoryUsageForThread(NativeMemory.ThreadStats threadStats, ref Size currentMaximumAllowedMemory, Size currentlyInUse, bool isRunningOn32Bits, Logger logger, out ProcessMemoryUsage currentUsage) { if (isRunningOn32Bits) { currentUsage = null; return(false); } // we run out our memory quota, so we need to see if we can increase it or break var memoryInfoResult = MemoryInformation.GetMemoryInfo(); using (var currentProcess = Process.GetCurrentProcess()) { // a lot of the memory that we use is actually from memory mapped files, as such, we can // rely on the OS to page it out (without needing to write, since it is read only in this case) // so we try to calculate how much such memory we can use with this assumption var mappedSharedMem = LowMemoryNotification.GetCurrentProcessMemoryMappedShared(); currentUsage = new ProcessMemoryUsage(currentProcess.WorkingSet64, Math.Max(0, currentProcess.WorkingSet64 - mappedSharedMem.GetValue(SizeUnit.Bytes))); var memoryAssumedFreeOrCheapToFree = memoryInfoResult.AvailableMemory + mappedSharedMem; // there isn't enough available memory to try, we want to leave some out for other things if (memoryAssumedFreeOrCheapToFree < Size.Min(memoryInfoResult.TotalPhysicalMemory / 50, new Size(1, SizeUnit.Gigabytes))) { if (logger.IsInfoEnabled) { logger.Info( $"{threadStats.Name} which is already using {currentlyInUse}/{currentMaximumAllowedMemory} and the system has " + $"{memoryInfoResult.AvailableMemory}/{memoryInfoResult.TotalPhysicalMemory} free RAM. Also have ~{mappedSharedMem} in mmap " + "files that can be cleanly released, not enough to proceed in batch."); } return(false); } // If there isn't enough here to double our current allocation, we won't allocate any more // we do this check in this way to prevent multiple indexes of hitting this at the // same time and each thinking that they have enough space if (memoryAssumedFreeOrCheapToFree < currentMaximumAllowedMemory) { if (logger.IsInfoEnabled) { logger.Info( $"{threadStats} which is already using {currentlyInUse}/{currentMaximumAllowedMemory} and the system has" + $"{memoryInfoResult.AvailableMemory}/{memoryInfoResult.TotalPhysicalMemory} free RAM. Also have ~{mappedSharedMem} in mmap " + "files that can be cleanly released, not enough to proceed in batch."); } return(false); } // even though we have twice as much memory as we have current allocated, we will // only increment by 16MB to avoid over allocation by multiple indexes. This way, // we'll check often as we go along this var oldBudget = currentMaximumAllowedMemory; currentMaximumAllowedMemory = currentlyInUse + new Size(16, SizeUnit.Megabytes); if (logger.IsInfoEnabled) { logger.Info( $"Increasing memory budget for {threadStats.Name} which is using {currentlyInUse}/{oldBudget} and the system has" + $"{memoryInfoResult.AvailableMemory}/{memoryInfoResult.TotalPhysicalMemory} free RAM with {mappedSharedMem} in mmap " + $"files that can be cleanly released. Budget increased to {currentMaximumAllowedMemory}"); } return(true); } }