public LongRunningWork LongRunning(Action <object> action, object state, string name) { if (_pool.TryDequeue(out var pooled) == false) { MemoryInformation.AssertNotAboutToRunOutOfMemory(); pooled = new PooledThread(this); var thread = new Thread(pooled.Run, PlatformDetails.Is32Bits ? 512 * Constants.Size.Kilobyte : 0) { Name = name, IsBackground = true, }; thread.Start(); } pooled.StartedAt = DateTime.UtcNow; return(pooled.SetWorkForThread(action, state, name)); }
public LongRunningWork LongRunning(Action <object> action, object state, string name) { if (_pool.TryDequeue(out var pooled) == false) { MemoryInformation.AssertNotAboutToRunOutOfMemory(_minimumFreeCommittedMemory); pooled = new PooledThread(this); var thread = new Thread(pooled.Run) { Name = name, IsBackground = true, }; thread.Start(); } pooled.StartedAt = DateTime.UtcNow; return(pooled.SetWorkForThread(action, state, name)); }
public static byte *AllocateMemory(long size, out ThreadStats thread) { thread = ThreadAllocations.Value; // Allocating when there isn't enough commit charge available is dangerous, on Linux, the OOM // will try to kill us. On Windows, we might get into memory allocation failures that are not // fun, so let's try to avoid it explicitly. // This is not expected to be called frequently, since we are caching the memory used here MemoryInformation.AssertNotAboutToRunOutOfMemory(_minimumFreeCommittedMemory); try { var ptr = (byte *)Marshal.AllocHGlobal((IntPtr)size).ToPointer(); thread.Allocations += size; return(ptr); } catch (OutOfMemoryException e) { return(ThrowFailedToAllocate(size, thread, e)); } }
public override void AssertNotAboutToRunOutOfMemory() { MemoryInformation.AssertNotAboutToRunOutOfMemory(); }