Example #1
0
        private void TestCUDAProviderOptions()
        {
            string modelPath = Path.Combine(Directory.GetCurrentDirectory(), "squeezenet.onnx");

            using (var cleanUp = new DisposableListTest <IDisposable>())
            {
                var cudaProviderOptions = new OrtCUDAProviderOptions();
                cleanUp.Add(cudaProviderOptions);

                var providerOptionsDict = new Dictionary <string, string>();
                providerOptionsDict["device_id"]                    = "0";
                providerOptionsDict["gpu_mem_limit"]                = "20971520";
                providerOptionsDict["arena_extend_strategy"]        = "kSameAsRequested";
                providerOptionsDict["cudnn_conv_algo_search"]       = "DEFAULT";
                providerOptionsDict["do_copy_in_default_stream"]    = "1";
                providerOptionsDict["cudnn_conv_use_max_workspace"] = "1";
                providerOptionsDict["cudnn_conv1d_pad_to_nc1d"]     = "1";
                cudaProviderOptions.UpdateOptions(providerOptionsDict);

                var resultProviderOptionsDict = new Dictionary <string, string>();
                ProviderOptionsValueHelper.StringToDict(cudaProviderOptions.GetOptions(), resultProviderOptionsDict);

                // test provider options configuration
                string value;
                value = resultProviderOptionsDict["device_id"];
                Assert.Equal("0", value);
                value = resultProviderOptionsDict["gpu_mem_limit"];
                Assert.Equal("20971520", value);
                value = resultProviderOptionsDict["arena_extend_strategy"];
                Assert.Equal("kSameAsRequested", value);
                value = resultProviderOptionsDict["cudnn_conv_algo_search"];
                Assert.Equal("DEFAULT", value);
                value = resultProviderOptionsDict["do_copy_in_default_stream"];
                Assert.Equal("1", value);
                value = resultProviderOptionsDict["cudnn_conv_use_max_workspace"];
                Assert.Equal("1", value);
                value = resultProviderOptionsDict["cudnn_conv1d_pad_to_nc1d"];
                Assert.Equal("1", value);

                // test correctness of provider options
                SessionOptions options = SessionOptions.MakeSessionOptionWithCudaProvider(cudaProviderOptions);
                cleanUp.Add(options);

                var session = new InferenceSession(modelPath, options);
                cleanUp.Add(session);

                var     inputMeta = session.InputMetadata;
                var     container = new List <NamedOnnxValue>();
                float[] inputData = TestDataLoader.LoadTensorFromFile(@"bench.in"); // this is the data for only one input tensor for this model
                foreach (var name in inputMeta.Keys)
                {
                    Assert.Equal(typeof(float), inputMeta[name].ElementType);
                    Assert.True(inputMeta[name].IsTensor);
                    var tensor = new DenseTensor <float>(inputData, inputMeta[name].Dimensions);
                    container.Add(NamedOnnxValue.CreateFromTensor <float>(name, tensor));
                }

                session.Run(container);
            }
        }
Example #2
0
        private void CanRunInferenceOnAModelWithTensorRT()
        {
            string modelPath = Path.Combine(Directory.GetCurrentDirectory(), "squeezenet.onnx");

            using (var cleanUp = new DisposableListTest <IDisposable>())
            {
                SessionOptions options = SessionOptions.MakeSessionOptionWithTensorrtProvider(0);
                cleanUp.Add(options);

                var session = new InferenceSession(modelPath, options);
                cleanUp.Add(session);

                var     inputMeta = session.InputMetadata;
                var     container = new List <NamedOnnxValue>();
                float[] inputData = TestDataLoader.LoadTensorFromFile(@"bench.in"); // this is the data for only one input tensor for this model
                foreach (var name in inputMeta.Keys)
                {
                    Assert.Equal(typeof(float), inputMeta[name].ElementType);
                    Assert.True(inputMeta[name].IsTensor);
                    var tensor = new DenseTensor <float>(inputData, inputMeta[name].Dimensions);
                    container.Add(NamedOnnxValue.CreateFromTensor <float>(name, tensor));
                }


                using (var results = session.Run(container))
                {
                    ValidateRunResults(results);
                }
            }
        }
Example #3
0
        private void TestGpu()
        {
            var tuple = OpenSessionSqueezeNet(0); // run on deviceID 0

            float[] expectedOutput = TestDataLoader.LoadTensorFromFile(@"bench.expected_out");

            using (var session = tuple.Item1)
            {
                var inputData = tuple.Item2;
                var tensor    = tuple.Item3;
                var inputMeta = session.InputMetadata;
                var container = new List <NamedOnnxValue>();
                container.Add(NamedOnnxValue.CreateFromTensor <float>("data_0", tensor));
                var res         = session.Run(container);
                var resultArray = res.First().AsTensor <float>().ToArray();
                Assert.Equal(expectedOutput, resultArray, new FloatComparer());
            }
        }
Example #4
0
        private void TestPreTrainedModels(string opset, string modelName)
        {
            var    modelsDir         = GetTestModelsDir();
            string onnxModelFileName = null;

            var modelDir = new DirectoryInfo(Path.Combine(modelsDir, opset, modelName));

            try
            {
                var  onnxModelNames  = modelDir.GetFiles("*.onnx");
                bool validModelFound = false;
                if (onnxModelNames.Length > 0)
                {
                    // TODO remove file "._resnet34v2.onnx" from test set
                    for (int i = 0; i < onnxModelNames.Length; i++)
                    {
                        if (onnxModelNames[i].Name != "._resnet34v2.onnx")
                        {
                            onnxModelNames[0] = onnxModelNames[i];
                            validModelFound   = true;
                        }
                    }
                }

                if (validModelFound)
                {
                    onnxModelFileName = Path.Combine(modelDir.FullName, onnxModelNames[0].Name);
                }
                else
                {
                    var modelNamesList = string.Join(",", onnxModelNames.Select(x => x.ToString()));
                    throw new Exception($"Opset {opset} Model {modelName}. Can't determine model file name. Found these :{modelNamesList}");
                }

                using (var session = new InferenceSession(onnxModelFileName))
                {
                    var    inMeta = session.InputMetadata;
                    string testDataDirNamePattern = "test_data*";
                    if (opset == "opset9" && modelName == "LSTM_Seq_lens_unpacked")
                    {
                        testDataDirNamePattern = "seq_lens*"; // discrepancy in data directory
                    }
                    foreach (var testDataDir in modelDir.EnumerateDirectories(testDataDirNamePattern))
                    {
                        var inputContainer  = new List <NamedOnnxValue>();
                        var outputContainer = new List <NamedOnnxValue>();
                        foreach (var f in testDataDir.EnumerateFiles("input_*.pb"))
                        {
                            inputContainer.Add(TestDataLoader.LoadTensorFromFilePb(f.FullName, inMeta));
                        }
                        foreach (var f in testDataDir.EnumerateFiles("output_*.pb"))
                        {
                            outputContainer.Add(TestDataLoader.LoadTensorFromFilePb(f.FullName, session.OutputMetadata));
                        }

                        using (var resultCollection = session.Run(inputContainer))
                        {
                            foreach (var result in resultCollection)
                            {
                                Assert.True(session.OutputMetadata.ContainsKey(result.Name));
                                var            outputMeta  = session.OutputMetadata[result.Name];
                                NamedOnnxValue outputValue = null;
                                foreach (var o in outputContainer)
                                {
                                    if (o.Name == result.Name)
                                    {
                                        outputValue = o;
                                        break;
                                    }
                                }
                                if (outputValue == null)
                                {
                                    outputValue = outputContainer.First(); // in case the output data file does not contain the name
                                }
                                if (outputMeta.IsTensor)
                                {
                                    if (outputMeta.ElementType == typeof(float))
                                    {
                                        Assert.Equal(result.AsTensor <float>(), outputValue.AsTensor <float>(), new FloatComparer());
                                    }
                                    else if (outputMeta.ElementType == typeof(double))
                                    {
                                        Assert.Equal(result.AsTensor <double>(), outputValue.AsTensor <double>(), new DoubleComparer());
                                    }
                                    else if (outputMeta.ElementType == typeof(int))
                                    {
                                        Assert.Equal(result.AsTensor <int>(), outputValue.AsTensor <int>(), new ExactComparer <int>());
                                    }
                                    else if (outputMeta.ElementType == typeof(uint))
                                    {
                                        Assert.Equal(result.AsTensor <uint>(), outputValue.AsTensor <uint>(), new ExactComparer <uint>());
                                    }
                                    else if (outputMeta.ElementType == typeof(short))
                                    {
                                        Assert.Equal(result.AsTensor <short>(), outputValue.AsTensor <short>(), new ExactComparer <short>());
                                    }
                                    else if (outputMeta.ElementType == typeof(ushort))
                                    {
                                        Assert.Equal(result.AsTensor <ushort>(), outputValue.AsTensor <ushort>(), new ExactComparer <ushort>());
                                    }
                                    else if (outputMeta.ElementType == typeof(long))
                                    {
                                        Assert.Equal(result.AsTensor <long>(), outputValue.AsTensor <long>(), new ExactComparer <long>());
                                    }
                                    else if (outputMeta.ElementType == typeof(ulong))
                                    {
                                        Assert.Equal(result.AsTensor <ulong>(), outputValue.AsTensor <ulong>(), new ExactComparer <ulong>());
                                    }
                                    else if (outputMeta.ElementType == typeof(byte))
                                    {
                                        Assert.Equal(result.AsTensor <byte>(), outputValue.AsTensor <byte>(), new ExactComparer <byte>());
                                    }
                                    else if (outputMeta.ElementType == typeof(bool))
                                    {
                                        Assert.Equal(result.AsTensor <bool>(), outputValue.AsTensor <bool>(), new ExactComparer <bool>());
                                    }
                                    else if (outputMeta.ElementType == typeof(Float16))
                                    {
                                        Assert.Equal(result.AsTensor <Float16>(), outputValue.AsTensor <Float16>(), new Float16Comparer {
                                            tolerance = 2
                                        });
                                    }
                                    else if (outputMeta.ElementType == typeof(BFloat16))
                                    {
                                        Assert.Equal(result.AsTensor <BFloat16>(), outputValue.AsTensor <BFloat16>(), new BFloat16Comparer {
                                            tolerance = 2
                                        });
                                    }
                                    else
                                    {
                                        Assert.True(false, $"{nameof(TestPreTrainedModels)} does not yet support output of type {outputMeta.ElementType}");
                                    }
                                }
                                else
                                {
                                    Assert.True(false, $"{nameof(TestPreTrainedModels)} cannot handle non-tensor outputs yet");
                                }
                            }
                        }
                    }
                }
            }
            catch (Exception ex)
            {
                var msg = $"Opset {opset}, Model {modelName}: ModelFile = {onnxModelFileName} error = {ex.Message}";
                if (ex.Message.Contains("ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions"))
                {
                    // If the exception is thrown because the opset version of the test model is
                    // not supported by ONNXRuntime yet, then ignore the test and proceed.
                    // ORT allows commits from ONNX master and in such cases we do come across new opsets which are
                    // not supported in ORT yet. In order to force these tests to run set env var ALLOW_RELEASED_ONNX_OPSET_ONLY=0
                    output.WriteLine("Skipping the model test as the latest ONNX opset is not supported yet. Error Message: " + msg);
                }
                else
                {
                    throw new Exception(msg + "\n" + ex.StackTrace);
                }
            }
        }
Example #5
0
        private void TestTensorRTProviderOptions()
        {
            string modelPath           = Path.Combine(Directory.GetCurrentDirectory(), "squeezenet.onnx");
            string calTablePath        = "squeezenet_calibration.flatbuffers";
            string enginePath          = "./";
            string engineDecrptLibPath = "engine_decryp";

            using (var cleanUp = new DisposableListTest <IDisposable>())
            {
                var trtProviderOptions = new OrtTensorRTProviderOptions();
                cleanUp.Add(trtProviderOptions);

                var providerOptionsDict = new Dictionary <string, string>();
                providerOptionsDict["device_id"]       = "0";
                providerOptionsDict["trt_fp16_enable"] = "1";
                providerOptionsDict["trt_int8_enable"] = "1";
                providerOptionsDict["trt_int8_calibration_table_name"] = calTablePath;
                providerOptionsDict["trt_engine_cache_enable"]         = "1";
                providerOptionsDict["trt_engine_cache_path"]           = enginePath;
                providerOptionsDict["trt_engine_decryption_enable"]    = "0";
                providerOptionsDict["trt_engine_decryption_lib_path"]  = engineDecrptLibPath;
                trtProviderOptions.UpdateOptions(providerOptionsDict);

                var resultProviderOptionsDict = new Dictionary <string, string>();
                ProviderOptionsValueHelper.StringToDict(trtProviderOptions.GetOptions(), resultProviderOptionsDict);

                // test provider options configuration
                string value;
                value = resultProviderOptionsDict["device_id"];
                Assert.Equal("0", value);
                value = resultProviderOptionsDict["trt_fp16_enable"];
                Assert.Equal("1", value);
                value = resultProviderOptionsDict["trt_int8_enable"];
                Assert.Equal("1", value);
                value = resultProviderOptionsDict["trt_int8_calibration_table_name"];
                Assert.Equal(calTablePath, value);
                value = resultProviderOptionsDict["trt_engine_cache_enable"];
                Assert.Equal("1", value);
                value = resultProviderOptionsDict["trt_engine_cache_path"];
                Assert.Equal(enginePath, value);
                value = resultProviderOptionsDict["trt_engine_decryption_enable"];
                Assert.Equal("0", value);
                value = resultProviderOptionsDict["trt_engine_decryption_lib_path"];
                Assert.Equal(engineDecrptLibPath, value);

                // test correctness of provider options
                SessionOptions options = SessionOptions.MakeSessionOptionWithTensorrtProvider(trtProviderOptions);
                cleanUp.Add(options);

                var session = new InferenceSession(modelPath, options);
                cleanUp.Add(session);

                var     inputMeta = session.InputMetadata;
                var     container = new List <NamedOnnxValue>();
                float[] inputData = TestDataLoader.LoadTensorFromFile(@"bench.in"); // this is the data for only one input tensor for this model
                foreach (var name in inputMeta.Keys)
                {
                    Assert.Equal(typeof(float), inputMeta[name].ElementType);
                    Assert.True(inputMeta[name].IsTensor);
                    var tensor = new DenseTensor <float>(inputData, inputMeta[name].Dimensions);
                    container.Add(NamedOnnxValue.CreateFromTensor <float>(name, tensor));
                }

                session.Run(container);
            }
        }