/// <summary> /// Attempts to Pin the buffer, and create a native OnnxValue out of it. the pinned MemoryHandle is passed to output. /// In this case, the pinnedHandle should be kept alive till the native OnnxValue is used, then dispose it. /// If it is not possible to Pin the buffer, then creates OnnxValue from the copy of the data. The output pinnedMemoryHandle /// contains a default value in that case. /// Attempts to infer the type of the value while creating the OnnxValue /// </summary> /// <param name="onnxValue"></param> /// <param name="pinnedMemoryHandle"></param> internal void ToNativeOnnxValue(out IntPtr onnxValue, out MemoryHandle pinnedMemoryHandle) { //try to cast _value to Tensor<T> TensorElementType nativeElementType = TensorElementType.DataTypeMax; //invalid IntPtr dataBufferPointer = IntPtr.Zero; int dataBufferLength = 0; ReadOnlySpan <int> shape = null; int rank = 0; onnxValue = IntPtr.Zero; if (TryPinAsTensor <float>(out pinnedMemoryHandle, out dataBufferPointer, out dataBufferLength, out shape, out rank, out nativeElementType )) { } else if (TryPinAsTensor <double>(out pinnedMemoryHandle, out dataBufferPointer, out dataBufferLength, out shape, out rank, out nativeElementType )) { } else if (TryPinAsTensor <int>(out pinnedMemoryHandle, out dataBufferPointer, out dataBufferLength, out shape, out rank, out nativeElementType )) { } else if (TryPinAsTensor <uint>(out pinnedMemoryHandle, out dataBufferPointer, out dataBufferLength, out shape, out rank, out nativeElementType )) { } else if (TryPinAsTensor <long>(out pinnedMemoryHandle, out dataBufferPointer, out dataBufferLength, out shape, out rank, out nativeElementType )) { } else if (TryPinAsTensor <ulong>(out pinnedMemoryHandle, out dataBufferPointer, out dataBufferLength, out shape, out rank, out nativeElementType )) { } else if (TryPinAsTensor <short>(out pinnedMemoryHandle, out dataBufferPointer, out dataBufferLength, out shape, out rank, out nativeElementType )) { } else if (TryPinAsTensor <ushort>(out pinnedMemoryHandle, out dataBufferPointer, out dataBufferLength, out shape, out rank, out nativeElementType )) { } else if (TryPinAsTensor <byte>(out pinnedMemoryHandle, out dataBufferPointer, out dataBufferLength, out shape, out rank, out nativeElementType )) { } else if (TryPinAsTensor <bool>(out pinnedMemoryHandle, out dataBufferPointer, out dataBufferLength, out shape, out rank, out nativeElementType )) { } //TODO: add other types else { // nothing to cleanup here, since no memory has been pinned throw new NotSupportedException("The inference value " + nameof(_value) + " is not of a supported type"); } Debug.Assert(dataBufferPointer != IntPtr.Zero, "dataBufferPointer must be non-null after obtaining the pinned buffer"); // copy to an ulong[] shape to match size_t[] ulong[] longShape = new ulong[rank]; for (int i = 0; i < rank; i++) { longShape[i] = (ulong)shape[i]; } IntPtr status = NativeMethods.ONNXRuntimeCreateTensorWithDataAsONNXValue( NativeMemoryAllocatorInfo.DefaultInstance.Handle, dataBufferPointer, (ulong)(dataBufferLength), longShape, (ulong)rank, nativeElementType, out onnxValue ); try { NativeApiStatus.VerifySuccess(status); } catch (OnnxRuntimeException e) { pinnedMemoryHandle.Dispose(); throw e; } }