public unsafe void Run( IReadOnlyCollection <NamedOnnxValue> inputs, IReadOnlyCollection <string> outputNames, RunOptions options, IList <DisposableNamedOnnxValue> result) { string[] outputNamesArray = GetOutputNames((IReadOnlyList <string>)outputNames); var outputValueArray = stackalloc IntPtr[outputNamesArray.Length]; Run(inputs, options, outputNamesArray, outputValueArray); try { for (uint i = 0; i < outputNamesArray.Length; i++) { result.Add(DisposableNamedOnnxValue.CreateFromOnnxValue(outputNamesArray[i], outputValueArray[i])); } } catch (OnnxRuntimeException e) { // clean up the individual output tensors if it is not null; OrtReleaseValues(outputValueArray, outputNamesArray.Length); throw e; } }
/// <summary> /// Runs the loaded model for the given inputs, and fetches the specified outputs in <paramref name="outputNames"/>. Uses the given RunOptions for this run. /// </summary> /// <param name="inputNames">Specify a collection of string that indicates the input names. Should match <paramref name="inputValues"/>.</param> /// <param name="inputValues">Specify a collection of <see cref="FixedBufferOnnxValue"/> that indicates the input values.</param> /// <param name="outputNames">Specify a collection of string that indicates the output names to fetch.</param> /// <param name="options"></param> /// <returns>Output Tensors in a Collection of NamedOnnxValue. User must dispose the output.</returns> public IDisposableReadOnlyCollection <DisposableNamedOnnxValue> Run( IReadOnlyCollection <string> inputNames, IReadOnlyCollection <FixedBufferOnnxValue> inputValues, IReadOnlyCollection <string> outputNames, RunOptions options) { if (inputNames.Count != inputValues.Count) { throw new ArgumentException($"Length of {nameof(inputNames)} ({inputNames.Count}) must match that of {nameof(inputValues)} ({inputValues.Count})."); } // prepare inputs string[] inputNamesArray = inputNames as string[] ?? inputNames.ToArray(); IntPtr[] inputValuesArray = new IntPtr[inputNames.Count]; int inputIndex = 0; foreach (var input in inputValues) { inputValuesArray[inputIndex] = input.Value; inputIndex++; } // prepare outputs string[] outputNamesArray = outputNames as string[] ?? outputNames.ToArray(); IntPtr[] outputValuesArray = new IntPtr[outputNames.Count]; IntPtr status = NativeMethods.OrtRun( _nativeHandle, options.Handle, inputNamesArray, inputValuesArray, (UIntPtr)inputNames.Count, outputNamesArray, (UIntPtr)outputNames.Count, outputValuesArray /* Empty array is passed in to receive output OrtValue pointers */ ); try { NativeApiStatus.VerifySuccess(status); var result = new DisposableList <DisposableNamedOnnxValue>(outputValuesArray.Length); for (int i = 0; i < outputValuesArray.Length; i++) { result.Add(DisposableNamedOnnxValue.CreateFromOnnxValue(outputNamesArray[i], outputValuesArray[i])); } return(result); } catch (OnnxRuntimeException e) { //clean up the individual output tensors if it is not null; for (uint i = 0; i < outputValuesArray.Length; i++) { if (outputValuesArray[i] != IntPtr.Zero) { NativeMethods.OrtReleaseValue(outputValuesArray[i]); } } throw e; } }
/// <summary> /// Runs the loaded model for the given inputs, and fetches the specified outputs in <paramref name="outputNames"/>. Uses the given RunOptions for this run. /// </summary> /// <param name="inputs">Specify a collection of <see cref="NamedOnnxValue"/> that indicates the input values.</param> /// <param name="outputNames">Specify a collection of string that indicates the output names to fetch.</param> /// <param name="options"></param> /// <returns>Output Tensors in a Collection of NamedOnnxValue. User must dispose the output.</returns> public IDisposableReadOnlyCollection <DisposableNamedOnnxValue> Run(IReadOnlyCollection <NamedOnnxValue> inputs, IReadOnlyCollection <string> outputNames, RunOptions options) { // prepare inputs var inputNamesArray = new string[inputs.Count]; var inputValuesArray = new IntPtr[inputs.Count]; var pinnedInputBufferHandles = new System.Buffers.MemoryHandle[inputs.Count]; var disposeInputs = new bool[inputs.Count]; int inputIndex = 0; foreach (var input in inputs) { inputNamesArray[inputIndex] = input.Name; // create Tensor from the input if feasible, else throw notsupported exception for now input.ToNativeOnnxValue( out inputValuesArray[inputIndex], out pinnedInputBufferHandles[inputIndex], out disposeInputs[inputIndex]); inputIndex++; } // prepare outputs string[] outputNamesArray = outputNames as string[] ?? outputNames.ToArray(); IntPtr[] outputValuesArray = new IntPtr[outputNames.Count]; IntPtr status = NativeMethods.OrtRun( _nativeHandle, options.Handle, inputNamesArray, inputValuesArray, (UIntPtr)inputs.Count, outputNamesArray, (UIntPtr)outputNames.Count, outputValuesArray /* Empty array is passed in to receive output OrtValue pointers */ ); try { NativeApiStatus.VerifySuccess(status); var result = new DisposableList <DisposableNamedOnnxValue>(outputValuesArray.Length); for (int i = 0; i < outputValuesArray.Length; i++) { result.Add(DisposableNamedOnnxValue.CreateFromOnnxValue(outputNamesArray[i], outputValuesArray[i])); } return(result); } catch (OnnxRuntimeException e) { //clean up the individual output tensors if it is not null; for (int i = 0; i < outputValuesArray.Length; i++) { if (outputValuesArray[i] != IntPtr.Zero) { NativeMethods.OrtReleaseValue(outputValuesArray[i]); } } throw e; } finally { for (int i = 0; i < inputs.Count; i++) { if (disposeInputs[i]) { NativeMethods.OrtReleaseValue(inputValuesArray[i]); // For elementary type Tensors, this should not release the buffer, but should delete the native tensor object. // For string tensors, this releases the native memory allocated for the tensor, including the buffer pinnedInputBufferHandles[i].Dispose(); } } } }
/// <summary> /// Runs the loaded model for the given inputs, and fetches the specified outputs in <paramref name="outputNames"/>. /// </summary> /// <param name="inputs"></param> /// <param name="outputNames"></param> /// <param name="options"></param> /// <returns>Output Tensors in a Collection of NamedOnnxValue</returns> //TODO: kept internal until RunOptions is made public internal IDisposableReadOnlyCollection <DisposableNamedOnnxValue> Run(IReadOnlyCollection <NamedOnnxValue> inputs, IReadOnlyCollection <string> outputNames, RunOptions options) { var inputNames = new string[inputs.Count]; var inputTensors = new IntPtr[inputs.Count]; var pinnedBufferHandles = new System.Buffers.MemoryHandle[inputs.Count]; int inputIndex = 0; foreach (var input in inputs) { inputNames[inputIndex] = input.Name; // create Tensor from the input if feasible, else throw notsupported exception for now input.ToNativeOnnxValue(out inputTensors[inputIndex], out pinnedBufferHandles[inputIndex]); inputIndex++; } string[] outputNamesArray = outputNames.ToArray(); IntPtr[] outputValueArray = new IntPtr[outputNames.Count]; IntPtr status = NativeMethods.OrtRun( this._nativeHandle, IntPtr.Zero, // TODO: use Run options when Run options creation API is available // Passing null uses the default run options in the C-api inputNames, inputTensors, (UIntPtr)(inputTensors.Length), outputNamesArray, (UIntPtr)outputNames.Count, outputValueArray /* An array of output value pointers. Array must be allocated by the caller */ ); try { NativeApiStatus.VerifySuccess(status); var result = new DisposableList <DisposableNamedOnnxValue>(); for (uint i = 0; i < outputValueArray.Length; i++) { result.Add(DisposableNamedOnnxValue.CreateFromOnnxValue(outputNamesArray[i], outputValueArray[i])); } return(result); } catch (OnnxRuntimeException e) { //clean up the individual output tensors if it is not null; for (uint i = 0; i < outputValueArray.Length; i++) { if (outputValueArray[i] != IntPtr.Zero) { NativeMethods.OrtReleaseValue(outputValueArray[i]); } } throw e; } finally { // always unpin the input buffers, and delete the native Onnx value objects for (int i = 0; i < inputs.Count; i++) { NativeMethods.OrtReleaseValue(inputTensors[i]); // this should not release the buffer, but should delete the native tensor object pinnedBufferHandles[i].Dispose(); } } }
/// <summary> /// Runs the loaded model for the given inputs, and fetches the specified outputs in <paramref name="outputNames". Uses the given RunOptions for this run./>. /// </summary> /// <param name="inputs"></param> /// <param name="outputNames"></param> /// <param name="options"></param> /// <returns>Output Tensors in a Collection of NamedOnnxValue. User must dispose the output.</returns> public IDisposableReadOnlyCollection <DisposableNamedOnnxValue> Run(IReadOnlyCollection <NamedOnnxValue> inputs, IReadOnlyCollection <string> outputNames, RunOptions options) { var inputNames = new string[inputs.Count]; var inputTensors = new IntPtr[inputs.Count]; var pinnedBufferHandles = new System.Buffers.MemoryHandle[inputs.Count]; int inputIndex = 0; foreach (var input in inputs) { inputNames[inputIndex] = input.Name; // create Tensor from the input if feasible, else throw notsupported exception for now input.ToNativeOnnxValue(out inputTensors[inputIndex], out pinnedBufferHandles[inputIndex]); inputIndex++; } string[] outputNamesArray = outputNames.ToArray(); IntPtr[] outputValueArray = new IntPtr[outputNames.Count]; IntPtr status = NativeMethods.OrtRun( this._nativeHandle, options.Handle, inputNames, inputTensors, (UIntPtr)(inputTensors.Length), outputNamesArray, (UIntPtr)outputNames.Count, outputValueArray /* An array of output value pointers. Array must be allocated by the caller */ ); try { NativeApiStatus.VerifySuccess(status); var result = new DisposableList <DisposableNamedOnnxValue>(); for (uint i = 0; i < outputValueArray.Length; i++) { result.Add(DisposableNamedOnnxValue.CreateFromOnnxValue(outputNamesArray[i], outputValueArray[i])); } return(result); } catch (OnnxRuntimeException e) { //clean up the individual output tensors if it is not null; for (uint i = 0; i < outputValueArray.Length; i++) { if (outputValueArray[i] != IntPtr.Zero) { NativeMethods.OrtReleaseValue(outputValueArray[i]); } } throw e; } finally { inputIndex = 0; foreach (var input in inputs) { // For NamedOnnxValue, always unpin the input buffers, and delete the native Onnx value objects // For DisposableNamedOnnxValue, the user needs to do this by invoking Dispose if (input.GetType() == typeof(NamedOnnxValue)) { NativeMethods.OrtReleaseValue(inputTensors[inputIndex]); // For elementary type Tensors, this should not release the buffer, but should delete the native tensor object. // For string tensors, this releases the native memory allocated for the tensor, including the buffer pinnedBufferHandles[inputIndex].Dispose(); } inputIndex++; } } }