Esempio n. 1
0
        private static ONNX.SessionOptions _CreateOptionsCPU()
        {
            if (!GetProviders().Contains("CPUExecutionProvider"))
            {
                throw new InvalidOperationException("CPU device not supported");
            }

            var options = new ONNX.SessionOptions();

            // options.AppendExecutionProvider_CPU(); // it seems this is not needed for CPU
            return(options);

            // apparently everything below here is actually slower and blocks the computer!
            options.ExecutionMode       = ONNX.ExecutionMode.ORT_PARALLEL;
            options.EnableMemoryPattern = true;

            options.IntraOpNumThreads = 2;
            options.InterOpNumThreads = 6;
            options.AppendExecutionProvider_CPU();

            // options.IntraOpNumThreads = 2;
            // options.InterOpNumThreads = 6;
            // options.AppendExecutionProvider_CPU(0);
            return(options);
        }
Esempio n. 2
0
        internal OnnxSession(Byte[] model, ONNX.SessionOptions options)
        {
            _Session = new ONNX.InferenceSession(model, options);

            _InputMeta  = _Session.InputMetadata.ToArray();
            _OutputMeta = _Session.OutputMetadata.ToArray();

            _Inputs      = _InputMeta.Select(item => item.Value.CreateNamedOnnexValue(item.Key)).ToArray();
            _Outputs     = _OutputMeta.Select(item => item.Value.CreateNamedOnnexValue(item.Key)).ToArray();
            _OutputNames = _Outputs.Select(item => item.Name).ToArray();
        }
Esempio n. 3
0
        private static ONNX.SessionOptions CreateOptionsDirectML(int deviceId)
        {
            if (!GetProviders().Contains("DmlExecutionProvider"))
            {
                throw new InvalidOperationException("DirectML device not supported");
            }

            var options = new ONNX.SessionOptions();

            options.AppendExecutionProvider_DML(deviceId);
            return(options);
        }
Esempio n. 4
0
        private static ONNX.SessionOptions _CreateOptionsDevice(int deviceId)
        {
            var options = new ONNX.SessionOptions();

            var providers = ONNX.OrtEnv.Instance().GetAvailableProviders();

            if (providers.Any(item => item == "DmlExecutionProvider"))
            {
                options.AppendExecutionProvider_DML(0);
            }
            return(options);

            // return ONNX.SessionOptions.MakeSessionOptionWithCudaProvider(0);
            // return ONNX.SessionOptions.MakeSessionOptionWithTensorrtProvider(0);

            /*
             * var options = new ONNX.SessionOptions();
             *
             * options = new ONNX.SessionOptions
             * {
             *  LogSeverityLevel = ONNX.OrtLoggingLevel.ORT_LOGGING_LEVEL_INFO,
             *  GraphOptimizationLevel = ONNX.GraphOptimizationLevel.ORT_ENABLE_ALL,
             *  ExecutionMode = deviceId < 0 ?
             *      ONNX.ExecutionMode.ORT_PARALLEL :
             *      ONNX.ExecutionMode.ORT_SEQUENTIAL,
             *  EnableMemoryPattern = deviceId < 0
             * };
             *
             * if (deviceId >= 0)
             * {
             *  options.AppendExecutionProvider_CUDA(deviceId);
             *  // options.a(deviceId);
             * }
             *
             *
             * else
             * {
             *  options.IntraOpNumThreads = 2;
             *  options.ExecutionMode = ONNX.ExecutionMode.ORT_PARALLEL;
             *  options.InterOpNumThreads = 6;
             *  options.GraphOptimizationLevel = ONNX.GraphOptimizationLevel.ORT_ENABLE_ALL;
             *  options.AppendExecutionProvider_CPU(0);
             * }
             *
             * return options;
             */
        }
Esempio n. 5
0
 /// <summary>
 /// Constructs an InferenceSession from a model data in byte array, with some additional session options
 /// </summary>
 /// <param name="model"></param>
 /// <param name="options"></param>
 public InferenceSession(byte[] model, SessionOptions options)
 {
     Init(model, options);
 }
Esempio n. 6
0
 /// <summary>
 /// Constructs an InferenceSession from a model data in byte array
 /// </summary>
 /// <param name="model"></param>
 public InferenceSession(byte[] model)
 {
     _builtInSessionOptions = new SessionOptions(); // need to be disposed
     Init(model, _builtInSessionOptions);
 }
Esempio n. 7
0
 /// <summary>
 /// Constructs an InferenceSession from a model file, with some additional session options
 /// </summary>
 /// <param name="modelPath"></param>
 /// <param name="options"></param>
 public InferenceSession(string modelPath, SessionOptions options)
 {
     Init(modelPath, options);
 }
Esempio n. 8
0
 /// <summary>
 /// Constructs an InferenceSession from a model file
 /// </summary>
 /// <param name="modelPath"></param>
 public InferenceSession(string modelPath)
 {
     _builtInSessionOptions = new SessionOptions(); // need to be disposed
     Init(modelPath, _builtInSessionOptions);
 }
Esempio n. 9
0
 /// <summary>
 /// A helper method to construct a SessionOptions object for Nuphar execution.
 /// Use only if you have the onnxruntime package specific to this Execution Provider.
 /// </summary>
 /// <param name="settings">settings string, comprises of comma separated key:value pairs. default is empty</param>
 /// <returns>A SessionsOptions() object configured for execution with Nuphar</returns>
 public static SessionOptions MakeSessionOptionWithNupharProvider(String settings = "")
 {
     SessionOptions options = new SessionOptions();
     NativeApiStatus.VerifySuccess(NativeMethods.OrtSessionOptionsAppendExecutionProvider_Nuphar(options.Handle, 1, settings));
     return options;
 }