// The following represent what .NET bindings would look like for a native library that wishes // to deal in tensors and allocate them from native code using native memory (for instance // in memory accessible to the graphics card, or in its own buffer pool). public static DenseTensor <double> GetMultiplicationTable(int maxNumber) { Span <int> dimensions = stackalloc int[2]; dimensions[0] = dimensions[1] = maxNumber; var nativeMemory = new NativeMemory <double>(GetMultTableAllocateNative(maxNumber), maxNumber * maxNumber); return(new DenseTensor <double>(nativeMemory.Memory, dimensions)); }
public void AllocZeroedElementCountTest() { void *ptr = NativeMemory.AllocZeroed(1, 1); Assert.True(ptr != null); Assert.Equal(expected: 0, actual: ((byte *)ptr)[0]); NativeMemory.Free(ptr); }
public void RenewArena() { if (_ptrStart != null) { return; } _ptrStart = _ptrCurrent = NativeMemory.AllocateMemory(_allocated, out _allocatingThread); _used = 0; }
public Win32FileJournalWriter(StorageEnvironmentOptions options, VoronPathSetting filename, long journalSize, Win32NativeFileAccess access = Win32NativeFileAccess.GenericWrite, Win32NativeFileShare shareMode = Win32NativeFileShare.Read) { try { _options = options; _filename = filename; _handle = Win32NativeFileMethods.CreateFile(filename.FullPath, access, shareMode, IntPtr.Zero, Win32NativeFileCreationDisposition.OpenAlways, options.WinOpenFlags, IntPtr.Zero); if (_handle.IsInvalid) { throw new IOException("When opening file " + filename, new Win32Exception(Marshal.GetLastWin32Error())); } var length = new FileInfo(filename.FullPath).Length; if (length < journalSize) { try { Win32NativeFileMethods.SetFileLength(_handle, journalSize); } catch (Exception) { try { _handle?.Dispose(); _handle = null; File.Delete(_filename.FullPath); } catch (Exception) { // there's nothing we can do about it } throw; } length = journalSize; } NumberOfAllocated4Kb = (int)(length / (4 * Constants.Size.Kilobyte)); _nativeOverlapped = (NativeOverlapped *)NativeMemory.AllocateMemory(sizeof(NativeOverlapped)); _nativeOverlapped->InternalLow = IntPtr.Zero; _nativeOverlapped->InternalHigh = IntPtr.Zero; } catch { Dispose(); throw; } }
public void AlignedAllocTest(uint alignment) { void *ptr = NativeMemory.AlignedAlloc(1, alignment); Assert.True(ptr != null); Assert.True((nuint)ptr % alignment == 0); NativeMemory.AlignedFree(ptr); }
public override void ReleaseAllocationInfo(byte *baseAddress, long size) { base.ReleaseAllocationInfo(baseAddress, size); if (Win32MemoryMapNativeMethods.UnmapViewOfFile(baseAddress) == false) { throw new Win32Exception(Marshal.GetLastWin32Error(), "Failed to UnMapView of file " + FileName); } NativeMemory.UnregisterFileMapping(_fileInfo.FullName, new IntPtr(baseAddress), size); }
public ArenaMemoryAllocator(SharedMultipleUseFlag lowMemoryFlag, int initialSize = 1024 * 1024) { _initialSize = initialSize; _ptrStart = _ptrCurrent = NativeMemory.AllocateMemory(initialSize, out _allocatingThread); _allocated = initialSize; _used = 0; TotalUsed = 0; _lowMemoryFlag = lowMemoryFlag; }
public void AlignedReallocNullPtrTest() { void *ptr = NativeMemory.AlignedRealloc(null, 1, (uint)sizeof(nuint)); Assert.True(ptr != null); Assert.True((nuint)ptr % (uint)sizeof(nuint) == 0); NativeMemory.AlignedFree(ptr); }
private void Reserve(int count) { if (count > _count) { FreeNativeMemory(); _buffers = (QUIC_BUFFER *)NativeMemory.AllocZeroed((nuint)count, (nuint)sizeof(QUIC_BUFFER)); _count = count; } }
public void VoronEnvironmentFlushing() { NativeMemory.EnsureRegistered(); // We want this to always run, even if we dispose / create new storage env, this is // static for the life time of the process, and environments will register / unregister from // it as needed try { var avoidDuplicates = new HashSet <StorageEnvironment>(); while (true) { avoidDuplicates.Clear(); var maybeNeedSync = _maybeNeedToSync.Count; var millisecondsTimeout = 15000 - maybeNeedSync; if (millisecondsTimeout <= 0 || _flushWriterEvent.Wait(millisecondsTimeout) == false) { if (_maybeNeedToSync.Count == 0) { continue; } if (_log.IsInfoEnabled) { _log.Info($"Starting desired sync with {_maybeNeedToSync.Count:#,#} items to sync after {millisecondsTimeout:#,#} ms with no activity"); } // sync after 5 seconds if no flushing occurred, or if there has been a LOT of // writes that we would like to run SyncDesiredEnvironments(avoidDuplicates); continue; } _flushWriterEvent.Reset(); FlushEnvironments(avoidDuplicates); SyncRequiredEnvironments(avoidDuplicates); } } catch (Exception e) { if (_log.IsOperationsEnabled) { _log.Operations("Catastrophic failure in Voron environment flushing", e); } // wait for the message to be flushed to the logs Thread.Sleep(5000); // Note that we intentionally don't have error handling here. // If this code throws an exception that bubbles up to here, we WANT the process // to die, since we can't recover from the flusher thread dying. throw; } // ReSharper disable once FunctionNeverReturns }
public unsafe void Dispose() { if (_buffer != null) { NativeMemory.Free(_buffer); _buffer = null; _imageSize = 0; } }
public void TestNativeAlloc() { for (var i = 0; i < IterCount; i++) { var p = (byte *)NativeMemory.Alloc((nuint)Size); Consume(&p); NativeMemory.Free(p); } }
public void AlignedAllocZeroSizeTest() { void *ptr = NativeMemory.AlignedAlloc(0, (uint)sizeof(nuint)); Assert.True(ptr != null); Assert.True((nuint)ptr % (uint)sizeof(nuint) == 0); NativeMemory.AlignedFree(ptr); }
private static unsafe void CleanupScatterGatherBuffers(MemoryHandle[] handlesToDispose, IntPtr segmentsPtr) { foreach (MemoryHandle handle in handlesToDispose) { handle.Dispose(); } NativeMemory.Free((void *)segmentsPtr); }
private void MergeOperationThreadProc() { NativeMemory.EnsureRegistered(); try { while (_runTransactions) { if (_operations.IsEmpty) { using (var generalMeter = GeneralWaitPerformanceMetrics.MeterPerformanceRate()) { generalMeter.IncrementCounter(1); _waitHandle.Wait(_shutdown); } _waitHandle.Reset(); } MergeTransactionsOnce(); } } catch (OperationCanceledException) { // clean shutdown, nothing to do } catch (Exception e) { if (_log.IsOperationsEnabled) { _log.Operations( "Serious failure in transaction merging thread, the database must be restarted!", e); } Interlocked.Exchange(ref _edi, ExceptionDispatchInfo.Capture(e)); // cautionary, we make sure that stuff that is waiting on the // queue is notified about this catasropic error and we wait // just a bit more to verify that nothing racy can still get // there for (int i = 0; i < 3; i++) { while (_operations.TryDequeue(out MergedTransactionCommand result)) { result.Exception = e; NotifyOnThreadPool(result); } try { _waitHandle.Wait(50, _shutdown); _waitHandle.Reset(); } catch (OperationCanceledException) { break; } } } }
/** * Allocates 128 byte aligned memory block for binary serialized data * Stores pointer to memory in gMemBlocks for later deallocation */ static void *createAlignedBlock(uint size) { Debug.Assert(gMemBlockCount < MAX_MEMBLOCKS); byte *baseAddr = (byte *)NativeMemory.Alloc(size + PX_SERIAL_FILE_ALIGN - 1); gMemBlocks[gMemBlockCount++] = baseAddr; void *alignedBlock = (void *)(((nint)(baseAddr) + PX_SERIAL_FILE_ALIGN - 1) & ~(PX_SERIAL_FILE_ALIGN - 1)); return(alignedBlock); }
private void Resize() { int newCapacity = data->capacity * 2; byte *newArr = NativeMemory.Alloc(data->structSize * newCapacity, NativeMemory.NativeMemoryType.RawList); NativeMemory.Copy(newArr, data->arrayPtr, data->structSize * data->capacity); NativeMemory.Free(data->arrayPtr); data->arrayPtr = newArr; data->capacity = newCapacity; }
internal static unsafe void *CoTaskMemAllocAndZeroMemory(int size) { byte *ptr = (byte *)Marshal.AllocCoTaskMem(size); // Marshal.AllocCoTaskMem will throw OOMException if out of memory Debug.Assert(ptr != null); NativeMemory.Clear(ptr, (uint)size); return(ptr); }
public void AlignedReallocZeroAlignmentTest() { void *ptr = NativeMemory.AlignedAlloc(1, (uint)sizeof(nuint)); Assert.True(ptr != null); Assert.True((nuint)ptr % (uint)sizeof(nuint) == 0); Assert.Throws <ArgumentException>(() => NativeMemory.AlignedRealloc(ptr, (uint)sizeof(nuint), 0)); NativeMemory.AlignedFree(ptr); }
private void ReleaseUnmanagedResources() { foreach (var stack in _items) { while (stack.TryPop(out var allocation)) { NativeMemory.Free4KbAlignedMemory((byte *)allocation.Ptr, allocation.Size, allocation.AllocatingThread); } } }
public bool Read(byte *buffer, long numOfBytes, long offsetInFile) { if (_readHandle == null) { var handle = Win32NativeFileMethods.CreateFile(_filename.FullPath, Win32NativeFileAccess.GenericRead, Win32NativeFileShare.Write | Win32NativeFileShare.Read | Win32NativeFileShare.Delete, IntPtr.Zero, Win32NativeFileCreationDisposition.OpenExisting, Win32NativeFileAttributes.Normal, IntPtr.Zero); if (handle.IsInvalid) { throw new IOException("When opening file " + _filename, new Win32Exception(Marshal.GetLastWin32Error())); } _readHandle = handle; } var nativeOverlapped = (NativeOverlapped *)NativeMemory.AllocateMemory(sizeof(NativeOverlapped)); try { nativeOverlapped->OffsetLow = (int)(offsetInFile & 0xffffffff); nativeOverlapped->OffsetHigh = (int)(offsetInFile >> 32); nativeOverlapped->EventHandle = IntPtr.Zero; while (numOfBytes > 0) { if (Win32NativeFileMethods.ReadFile(_readHandle, buffer, (int)Math.Min(numOfBytes, int.MaxValue), out int read, nativeOverlapped) == false) { int lastWin32Error = Marshal.GetLastWin32Error(); if (lastWin32Error == Win32NativeFileMethods.ErrorHandleEof) { return(false); } if (lastWin32Error == Win32NativeFileMethods.ErrorInvalidHandle) { _readHandle = null; } throw new Win32Exception(lastWin32Error, $"Unable to read from {_filename}, error code: {lastWin32Error}"); } numOfBytes -= read; buffer += read; offsetInFile += read; nativeOverlapped->OffsetLow = (int)(offsetInFile & 0xffffffff); nativeOverlapped->OffsetHigh = (int)(offsetInFile >> 32); } return(true); } finally { NativeMemory.Free((byte *)nativeOverlapped, sizeof(NativeOverlapped)); } }
public void It_should_marshal_native_memory_to_JS() { var ptr = Marshal.AllocHGlobal(4); Marshal.WriteByte(ptr, 0, 1); Marshal.WriteByte(ptr, 1, 2); Marshal.WriteByte(ptr, 2, 3); Marshal.WriteByte(ptr, 3, 4); using var memory = NativeMemory.Create(ptr, 4, (pointer, _) => Marshal.FreeHGlobal(pointer)); Global.testObject.assertByteArray(memory); }
public void ReallocZeroSizeTest() { void *ptr = NativeMemory.Alloc(1); Assert.True(ptr != null); void *newPtr = NativeMemory.Realloc(ptr, 0); Assert.True(newPtr != null); NativeMemory.Free(newPtr); }
public void AlignedReallocLessThanVoidPtrAlignmentTest() { void *ptr = NativeMemory.AlignedAlloc(1, 1); Assert.True(ptr != null); void *newPtr = NativeMemory.AlignedRealloc(ptr, 1, 1); Assert.True(newPtr != null); NativeMemory.AlignedFree(newPtr); }
private void Reserve(int count) { if (_handles.Length < count) { _handles = new MemoryHandle[count]; FreeNativeMemory(); _buffers = (QUIC_BUFFER *)NativeMemory.Alloc((nuint)count, (nuint)sizeof(QUIC_BUFFER)); } _count = count; }
/// <summary> /// Creates and initializes a new <see cref="IDXGIFactory4As6Backcompat"/> instance. /// </summary> /// <param name="dxgiFactory4">The <see cref="IDXGIFactory4"/> instance to wrap.</param> /// <param name="dxgiFactory6">The resulting <see cref="IDXGIFactory6"/> instance.</param> public static void Create(IDXGIFactory4 *dxgiFactory4, IDXGIFactory6 **dxgiFactory6) { IDXGIFactory4As6Backcompat * @this = (IDXGIFactory4As6Backcompat *)NativeMemory.Alloc((nuint)sizeof(IDXGIFactory4As6Backcompat)); @this->lpVtbl = Vtbl; @this->dxgiFactory4 = dxgiFactory4; _ = dxgiFactory4->AddRef(); *dxgiFactory6 = (IDXGIFactory6 *)@this; }
public static unsafe IntPtr GetGenericMethodFunctionPointer(IntPtr canonFunctionPointer, IntPtr instantiationArgument) { if (instantiationArgument == IntPtr.Zero) { return(canonFunctionPointer); } lock (s_genericFunctionPointerDictionary) { var key = new GenericMethodDescriptorInfo { MethodFunctionPointer = canonFunctionPointer, InstantiationArgument = instantiationArgument }; uint index = 0; if (!s_genericFunctionPointerDictionary.TryGetValue(key, out index)) { // Capture new index value index = s_genericFunctionPointerNextIndex; int newChunkIndex = (int)(index / c_genericDictionaryChunkSize); uint newSubChunkIndex = index % c_genericDictionaryChunkSize; // Generate new chunk if existing chunks are insufficient if (s_genericFunctionPointerCollection.Count <= newChunkIndex) { System.Diagnostics.Debug.Assert(newSubChunkIndex == 0); // New generic descriptors are allocated on the native heap and not tracked in the GC. IntPtr pNewMem = (IntPtr)NativeMemory.Alloc(c_genericDictionaryChunkSize, (nuint)sizeof(GenericMethodDescriptor)); s_genericFunctionPointerCollection.Add(pNewMem); } ((GenericMethodDescriptor *)s_genericFunctionPointerCollection[newChunkIndex])[newSubChunkIndex] = new GenericMethodDescriptor(canonFunctionPointer, instantiationArgument); s_genericFunctionPointerDictionary.LookupOrAdd(key, index); // Now that we can no longer have failed, update the next index. s_genericFunctionPointerNextIndex++; } // Lookup within list int chunkIndex = (int)(index / c_genericDictionaryChunkSize); uint subChunkIndex = index % c_genericDictionaryChunkSize; GenericMethodDescriptor *genericFunctionPointer = &((GenericMethodDescriptor *)s_genericFunctionPointerCollection[chunkIndex])[subChunkIndex]; System.Diagnostics.Debug.Assert(canonFunctionPointer == genericFunctionPointer->MethodFunctionPointer); System.Diagnostics.Debug.Assert(instantiationArgument == genericFunctionPointer->InstantiationArgument); return((IntPtr)((byte *)genericFunctionPointer + FatFunctionPointerOffset)); } }
public IOCompletionPoller(nint port) { Debug.Assert(port != 0); _port = port; if (!UnsafeInlineIOCompletionCallbacks) { _nativeEvents = (Interop.Kernel32.OVERLAPPED_ENTRY *) NativeMemory.Alloc(NativeEventCapacity, (nuint)sizeof(Interop.Kernel32.OVERLAPPED_ENTRY)); _events = new(default);
public MessageBufferHG(MessageBufferHG right) { _wpos = right._wpos; _rpos = right._rpos; _size = right._size; _capacity = BitOperations.RoundUpToPowerOf2((uint)_size); _storage = NativeMemory.Alloc(_capacity); Buffer.MemoryCopy(right._storage, _storage, (ulong)_size, (ulong)_size); }
public void FillEmptyMemoryBlockShouldNoOpTest() { void *source = stackalloc byte[7] { 0, 0, 0, 0, 0, 0, 0 }; NativeMemory.Fill(source, 0, 42); Assert.Equal(-1, new Span <byte>(source, 7).IndexOf <byte>(42)); } }