Beispiel #1
0
        private void CanRunInferenceOnAModel(uint graphOptimizationLevel, bool disableSequentialExecution)
        {
            string modelPath = Path.Combine(Directory.GetCurrentDirectory(), "squeezenet.onnx");

            // Set the graph optimization level for this session.
            SessionOptions options = new SessionOptions();

            Assert.True(options.SetSessionGraphOptimizationLevel(graphOptimizationLevel));
            if (disableSequentialExecution)
            {
                options.DisableSequentialExecution();
            }

            using (var session = new InferenceSession(modelPath, options))
            {
                var inputMeta = session.InputMetadata;
                var container = new List <NamedOnnxValue>();

                float[] inputData = LoadTensorFromFile(@"bench.in"); // this is the data for only one input tensor for this model

                foreach (var name in inputMeta.Keys)
                {
                    Assert.Equal(typeof(float), inputMeta[name].ElementType);
                    Assert.True(inputMeta[name].IsTensor);
                    var tensor = new DenseTensor <float>(inputData, inputMeta[name].Dimensions);
                    container.Add(NamedOnnxValue.CreateFromTensor <float>(name, tensor));
                }

                // Run the inference
                using (var results = session.Run(container))  // results is an IReadOnlyList<NamedOnnxValue> container
                {
                    Assert.Equal(1, results.Count);

                    float[] expectedOutput = LoadTensorFromFile(@"bench.expected_out");
                    // validate the results
                    foreach (var r in results)
                    {
                        Assert.Equal("softmaxout_1", r.Name);

                        var   resultTensor       = r.AsTensor <float>();
                        int[] expectedDimensions = { 1, 1000, 1, 1 };  // hardcoded for now for the test data
                        Assert.Equal(expectedDimensions.Length, resultTensor.Rank);

                        var resultDimensions = resultTensor.Dimensions;
                        for (int i = 0; i < expectedDimensions.Length; i++)
                        {
                            Assert.Equal(expectedDimensions[i], resultDimensions[i]);
                        }

                        var resultArray = r.AsTensor <float>().ToArray();
                        Assert.Equal(expectedOutput.Length, resultArray.Length);
                        Assert.Equal(expectedOutput, resultArray, new floatComparer());
                    }
                }
            }
        }
Beispiel #2
0
        static void RunModelOnnxRuntime(string modelPath, string inputPath, int iteration, DateTime[] timestamps, bool parallelExecution, uint optLevel)
        {
            if (timestamps.Length != (int)TimingPoint.TotalCount)
            {
                throw new ArgumentException("Timestamps array must have " + (int)TimingPoint.TotalCount + " size");
            }

            timestamps[(int)TimingPoint.Start] = DateTime.Now;
            SessionOptions options = new SessionOptions();

            if (parallelExecution)
            {
                options.DisableSequentialExecution();
            }
            options.SetSessionGraphOptimizationLevel(optLevel);
            using (var session = new InferenceSession(modelPath, options))
            {
                timestamps[(int)TimingPoint.ModelLoaded] = DateTime.Now;
                var inputMeta = session.InputMetadata;

                var container = new List <NamedOnnxValue>();
                foreach (var name in inputMeta.Keys)
                {
                    float[] rawData = LoadTensorFromFile(inputPath);
                    var     tensor  = new DenseTensor <float>(rawData, inputMeta[name].Dimensions);
                    container.Add(NamedOnnxValue.CreateFromTensor <float>(name, tensor));
                }



                timestamps[(int)TimingPoint.InputLoaded] = DateTime.Now;

                // Run the inference
                for (int i = 0; i < iteration; i++)
                {
                    var results = session.Run(container);  // results is an IReadOnlyList<NamedOnnxValue> container
                    Debug.Assert(results != null);
                    Debug.Assert(results.Count == 1);
                    //results = null;
                    //GC.Collect();
                    //GC.WaitForPendingFinalizers();
                }

                timestamps[(int)TimingPoint.RunComplete] = DateTime.Now;
            }
        }