Esempio n. 1
0
        public JsonResult predictCNN([FromBody] List <byte> imageBytes)
        {
            float[]          floatArray       = imageBytes.Select(i => Convert.ToSingle(i / 255.0)).ToArray();
            var              matrix           = floatArray.ToTensor().Reshape(new[] { 28, 28 });
            InferenceSession inferenceSession = _inferenceSessions["cnn"];
            var              tensor           = new DenseTensor <float>(floatArray, inferenceSession.InputMetadata["Input3"].Dimensions);
            var              results          = inferenceSession.Run(new List <NamedOnnxValue> {
                NamedOnnxValue.CreateFromTensor("Input3", tensor)
            }).ToArray();
            var weights = results[0].AsTensor <float>().ToList();
            var probs   = weights.Select(x => x + Math.Abs(weights.Min()));

            probs = probs.Select(x => x / probs.Sum()).ToArray();
            var pred          = probs.Select((n, i) => (Number: n, Index: i)).Max().Index;
            var WrappedReturn = new { prediction = pred, probabilities = probs };

            return(Json(WrappedReturn));
        }
Esempio n. 2
0
        private void TestModelInputSTRING()
        {
            // model takes 1x5 input of fixed type, echoes back
            string modelPath = Path.Combine(Directory.GetCurrentDirectory(), "test_types_STRING.onnx");

            using (var session = new InferenceSession(modelPath))
            {
                var container = new List <NamedOnnxValue>();
                var tensorIn  = new DenseTensor <string>(new string[] { "a", "c", "d", "z", "f" }, new int[] { 1, 5 });
                var nov       = NamedOnnxValue.CreateFromTensor("input", tensorIn);
                container.Add(nov);
                using (var res = session.Run(container))
                {
                    var tensorOut = res.First().AsTensor <string>();
                    Assert.True(tensorOut.SequenceEqual(tensorIn));
                }
            }
        }
Esempio n. 3
0
        public string Run(InferenceSession session, Tensor <float> oneTensor)
        {
            var inputs = new List <NamedOnnxValue>
            {
                NamedOnnxValue.CreateFromTensor("input", oneTensor)
            };

            string res = string.Empty;

            using (IDisposableReadOnlyCollection <DisposableNamedOnnxValue> results = session.Run(inputs))
            {
                // Postprocess to get predictions
                var resultsArray = results.ToArray();
                res = Decode(resultsArray);
            }

            return(res);
        }
Esempio n. 4
0
        private void TestModelInputUINT64()
        {
            // model takes 1x5 input of fixed type, echoes back
            string modelPath = Path.Combine(Directory.GetCurrentDirectory(), "test_types_UINT64.pb");

            using (var session = new InferenceSession(modelPath))
            {
                var container = new List <NamedOnnxValue>();
                var tensorIn  = new DenseTensor <UInt64>(new UInt64[] { 1, 2, 3, UInt64.MinValue, UInt64.MaxValue }, new int[] { 1, 5 });
                var nov       = NamedOnnxValue.CreateFromTensor("input", tensorIn);
                container.Add(nov);
                using (var res = session.Run(container))
                {
                    var tensorOut = res.First().AsTensor <UInt64>();
                    Assert.True(tensorOut.SequenceEqual(tensorIn));
                }
            }
        }
Esempio n. 5
0
        static void Main(string[] args)
        {
            string path = System.AppContext.BaseDirectory + "myModel.onnx";

            Console.WriteLine(path);
            Tensor <float> input  = new DenseTensor <float>(new[] { 32, 32 });
            Tensor <float> output = new DenseTensor <float>(new[] { 1, 4, 4 });

            for (int y = 0; y < 32; y++)
            {
                for (int x = 0; x < 32; x++)
                {
                    input[y, x] = (float)Math.E;
                }
            }

            //Console.WriteLine(input.GetArrayString());

            // Setup inputs
            List <NamedOnnxValue> inputs = new List <NamedOnnxValue>
            {
                NamedOnnxValue.CreateFromTensor("Input", input.Reshape(new [] { 1, 32, 32 }).ToDenseTensor()),
            };
            // Setup outputs
            List <NamedOnnxValue> outputs = new List <NamedOnnxValue>
            {
                NamedOnnxValue.CreateFromTensor("Output", output),
            };

            Stopwatch stopWatch = new Stopwatch();

            stopWatch.Start();

            // Run inference
            InferenceSession session = new InferenceSession(path);

            session.Run(inputs, outputs);
            output = outputs[0].AsTensor <float>();
            Console.WriteLine(output.Reshape(new[] { 4, 4 }).ToDenseTensor().GetArrayString());

            stopWatch.Stop();

            Console.WriteLine(stopWatch.ElapsedMilliseconds.ToString());
        }
Esempio n. 6
0
 public Predictor(string path_to_imgs,
                  Output write,
                  string path_to_model = "E:\\s02170150\\PredictorLibrary\\resnet18-v1-7.onnx") //"..\\..\\..\\..\\PredictorLibrary\\resnet18-v1-7.onnx"
 {
     this.path_to_imgs  = path_to_imgs;
     this.write        += write;
     this.path_to_model = path_to_model;
     proc_count         = Environment.ProcessorCount;
     try
     {
         session = new InferenceSession(path_to_model);
     }
     catch (Exception e)
     {
         Console.WriteLine($"Error here: {e.Message}");
     }
     out_mutex = new AutoResetEvent(true);
     cancel    = new ManualResetEvent(false);
 }
Esempio n. 7
0
        public int LoadAndPredict(Image <Rgb24> image)
        {
            // using var image =  Image.Load<Rgb24>(img_name);

            const int TargetWidth  = 28;
            const int TargetHeight = 28;

            image.Mutate(x =>
            {
                x.Resize(new ResizeOptions {
                    Size = new Size(TargetWidth, TargetHeight),
                    Mode = ResizeMode.Crop
                }).Grayscale();
            });

            var input = new DenseTensor <float>(new[] { 1, 1, TargetHeight, TargetWidth });

            for (int y = 0; y < TargetHeight; y++)
            {
                Span <Rgb24> pixelSpan = image.GetPixelRowSpan(y);

                for (int x = 0; x < TargetWidth; x++)
                {
                    input[0, 0, y, x] = pixelSpan[x].R / 255.0f;
                }
            }

            using var session = new InferenceSession(model_name);
            string input_name = session.InputMetadata.Keys.First();
            var    inputs     = new List <NamedOnnxValue> {
                NamedOnnxValue.CreateFromTensor(input_name, input)
            };

            using IDisposableReadOnlyCollection <DisposableNamedOnnxValue> results = session.Run(inputs);

            var output  = results.First().AsEnumerable <float>().ToArray();
            var sum     = output.Sum(x => (float)Math.Exp(x));
            var softmax = output.Select(x => (float)Math.Exp(x) / sum);
            var query   = softmax.Select((x, i) => new { Label = classLabels[i], Confidence = x })
                          .OrderByDescending(x => x.Confidence);

            return(Int32.Parse(query.First().Label));
        }
Esempio n. 8
0
        public static async Task <IActionResult> Run(
            [HttpTrigger(AuthorizationLevel.Function, "get", "post", Route = null)] HttpRequest req,
            ILogger log, ExecutionContext context)
        {
            log.LogInformation("C# HTTP trigger function processed a request.");

            string review = req.Query["review"];

            string  requestBody = await new StreamReader(req.Body).ReadToEndAsync();
            dynamic data        = JsonConvert.DeserializeObject(requestBody);

            review = review ?? data?.review;

            var models = new Dictionary <string, string>();

            models.Add("points", GetFileAndPathFromStorage(context, log, "model327", "pipeline_points_range.onnx"));
            models.Add("price", GetFileAndPathFromStorage(context, log, "model327", "pipeline_price_range.onnx"));
            models.Add("variety", GetFileAndPathFromStorage(context, log, "model327", "pipeline_variety.onnx"));

            var inputTensor = new DenseTensor <string>(new string[] { review }, new int[] { 1, 1 });
            //create input data for session.
            var input = new List <NamedOnnxValue> {
                NamedOnnxValue.CreateFromTensor <string>("input", inputTensor)
            };

            //create now object points: result
            var inferenceResults = new Dictionary <string, IDictionary <string, float> >();

            foreach (var model in models)
            {
                log.LogInformation($"Start inference session for {model.Key}");
                var session         = new InferenceSession(model.Value);
                var output          = session.Run(input).ToList().Last().AsEnumerable <NamedOnnxValue>();
                var inferenceResult = output.First().AsDictionary <string, float>();
                var topThreeResult  = inferenceResult.OrderByDescending(dict => dict.Value).Take(3)
                                      .ToDictionary(pair => pair.Key, pair => pair.Value);
                log.LogInformation($"Top five results for {model.Key} {topThreeResult}");
                inferenceResults.Add(model.Key, topThreeResult);
                Console.Write(inferenceResult);
            }

            return(new JsonResult(inferenceResults));
        }
Esempio n. 9
0
        private void TestInferenceSessionWithByteArray()
        {
            // model takes 1x5 input of fixed type, echoes back
            string modelPath = Path.Combine(Directory.GetCurrentDirectory(), "test_types_FLOAT.pb");

            byte[] modelData = File.ReadAllBytes(modelPath);

            using (var session = new InferenceSession(modelData))
            {
                var container = new List <NamedOnnxValue>();
                var tensorIn  = new DenseTensor <float>(new float[] { 1.0f, 2.0f, -3.0f, float.MinValue, float.MaxValue }, new int[] { 1, 5 });
                var nov       = NamedOnnxValue.CreateFromTensor("input", tensorIn);
                container.Add(nov);
                using (var res = session.Run(container))
                {
                    var tensorOut = res.First().AsTensor <float>();
                    Assert.True(tensorOut.SequenceEqual(tensorIn));
                }
            }
        }
Esempio n. 10
0
        /// <inheritdoc/>
        public void SetDevice(int deviceId)
        {
            Session?.Dispose();
            options?.Dispose();
            options = new SessionOptions
            {
                GraphOptimizationLevel = GraphOptimizationLevel.ORT_ENABLE_ALL,
                ExecutionMode          = deviceId < 0 ?
                                         ExecutionMode.ORT_PARALLEL :
                                         ExecutionMode.ORT_SEQUENTIAL,
                EnableMemoryPattern = deviceId < 0
            };

            if (deviceId >= 0)
            {
                options.AppendExecutionProvider_DML(deviceId);
            }

            Session = new InferenceSession(model, options);
        }
Esempio n. 11
0
        private void TestPreTrainedModelsOpset7And8()
        {
            var opsets = new[] { "opset7", "opset8" };

            foreach (var opset in opsets)
            {
                var modelRoot = new DirectoryInfo(opset);
                foreach (var model in modelRoot.EnumerateDirectories())
                {
                    // TODO: dims contains 'None'. Session throws error.
                    if (model.ToString() == "test_tiny_yolov2")
                    {
                        continue;
                    }
                    try
                    {
                        //TODO: sometimes, the file name is not 'model.onnx'
                        var session    = new InferenceSession($"{opset}\\{model}\\model.onnx");
                        var inMeta     = session.InputMetadata;
                        var innodepair = inMeta.First();
                        var innodename = innodepair.Key;
                        var innodedims = innodepair.Value.Dimensions;
                        var dataIn     = LoadTensorFromFilePb($"{opset}\\{model}\\test_data_set_0\\input_0.pb");
                        var dataOut    = LoadTensorFromFilePb($"{opset}\\{model}\\test_data_set_0\\output_0.pb");
                        var tensorIn   = new DenseTensor <float>(dataIn, innodedims);
                        var nov        = new List <NamedOnnxValue>();
                        nov.Add(NamedOnnxValue.CreateFromTensor <float>(innodename, tensorIn));
                        var resnov = session.Run(nov);
                        var res    = resnov.ToArray()[0].AsTensor <float>().ToArray <float>();
                        Assert.Equal(res, dataOut, new floatComparer());
                        session.Dispose();
                    }
                    catch (Exception ex)
                    {
                        var msg = $"Opset {opset}: Model {model}: error = {ex.Message}";
                        continue; //TODO: fix it
                        //throw new Exception(msg);
                    }
                } //model
            }     //opset
        }
Esempio n. 12
0
        static void createSession(int imageIndex)
        {
            string modelPath = Directory.GetCurrentDirectory() + @"/pytorch_mnist.onnx";

            // Optional : Create session options and set the graph optimization level for the session
            //SessionOptions options = new SessionOptions();
            //options.GraphOptimizationLevel = GraphOptimizationLevel.ORT_ENABLE_EXTENDED;
            //using (var session = new InferenceSession(modelPath, options))

            using (var session = new InferenceSession(modelPath))
            {
                //float[] inputData = LoadTensorFromFile(@"bench.in"); // this is the data for only one input tensor for this model
                Utilities.LoadTensorData();
                float[] inputData = Utilities.ImageData[imageIndex];
                string  label     = Utilities.ImageLabels[imageIndex];
                Console.WriteLine("Selected image is the number: " + label);

                var inputMeta = session.InputMetadata;
                var container = new List <NamedOnnxValue>();
                //PrintInputMetadata(inputMeta);

                foreach (var name in inputMeta.Keys)
                {
                    var tensor = new DenseTensor <float>(inputData, inputMeta[name].Dimensions);
                    container.Add(NamedOnnxValue.CreateFromTensor <float>(name, tensor));
                }

                // Run the inference
                using (var results = session.Run(container))  // results is an IDisposableReadOnlyCollection<DisposableNamedOnnxValue> container
                {
                    // Get the results
                    foreach (var r in results)
                    {
                        Console.WriteLine("Output Name: {0}", r.Name);
                        int prediction = MaxProbability(r.AsTensor <float>());
                        Console.WriteLine("Prediction: " + prediction.ToString());
                        //Console.WriteLine(r.AsTensor<float>().GetArrayString());
                    }
                }
            }
        }
Esempio n. 13
0
        public ORTWrapper(string modelPath, DNNMode mode)
        {
            // Optional : Create session options and set the graph optimization level for the session
            SessionOptions options = new SessionOptions();

            //options.GraphOptimizationLevel = GraphOptimizationLevel.ORT_ENABLE_EXTENDED;
            cfg = new Yolov3BaseConfig();

            this.mode = mode;
            switch (mode)
            {
            case DNNMode.LT:
            case DNNMode.Frame:
                session1 = new InferenceSession(modelPath, SessionOptions.MakeSessionOptionWithCudaProvider(0));
                break;

            case DNNMode.CC:
                session2 = new InferenceSession(modelPath, SessionOptions.MakeSessionOptionWithCudaProvider(0));
                break;
            }
        }
Esempio n. 14
0
        private static void TestSinglePrediction(MLContext mlContext)
        {
            ConsoleHelper.ConsoleWriteHeader("=============== Testing prediction engine ===============");


            var session = new InferenceSession(modelPath);

            /*cont onnx*/

            var inputMeta = session.InputMetadata;

            var container = new List <NamedOnnxValue>();

            //10298,36,15.20,40,0.25
            container.Add(GetNamedOnnxValue <string>(inputMeta, "ProductID", "63"));
            container.Add(GetNamedOnnxValue <float>(inputMeta, "UnitPrice", 35.1f));
            container.Add(GetNamedOnnxValue <float>(inputMeta, "Quantity", 80f));
            container.Add(GetNamedOnnxValue <string>(inputMeta, "Discount", "0"));
            /* output onnx*/
            var result = session.Run(container);
            var output = result.First(x => x.Name == "PredictedLabel0").AsTensor <bool>().GetValue(0);

            Console.WriteLine($"**********************************************************************");
            Console.WriteLine($"Predicted Pay Full Price: {output:0.####}, actual : false");
            Console.WriteLine($"**********************************************************************");

            container = new List <NamedOnnxValue>();
            //10298,36,15.20,40,0.25
            container.Add(GetNamedOnnxValue <string>(inputMeta, "ProductID", "11"));
            container.Add(GetNamedOnnxValue <float>(inputMeta, "UnitPrice", 14f));
            container.Add(GetNamedOnnxValue <float>(inputMeta, "Quantity", 12f));
            container.Add(GetNamedOnnxValue <string>(inputMeta, "Discount", "0"));
            /* output onnx*/
            result = session.Run(container);
            output = result.First(x => x.Name == "PredictedLabel0").AsTensor <bool>().GetValue(0);

            Console.WriteLine($"**********************************************************************");
            Console.WriteLine($"Predicted Pay Full Price: {output:0.####}, actual : true");
            Console.WriteLine($"**********************************************************************");
        }
Esempio n. 15
0
        static void RunModelOnnxRuntime(string modelPath, string inputPath, int iteration, DateTime[] timestamps)
        {
            if (timestamps.Length != (int)TimingPoint.TotalCount)
            {
                throw new ArgumentException("Timestamps array must have " + (int)TimingPoint.TotalCount + " size");
            }

            timestamps[(int)TimingPoint.Start] = DateTime.Now;

            using (var session = new InferenceSession(modelPath))
            {
                timestamps[(int)TimingPoint.ModelLoaded] = DateTime.Now;
                var inputMeta = session.InputMetadata;

                var container = new List <NamedOnnxValue>();
                foreach (var name in inputMeta.Keys)
                {
                    float[] rawData = LoadTensorFromFile(inputPath);
                    var     tensor  = new DenseTensor <float>(rawData, inputMeta[name].Dimensions);
                    container.Add(NamedOnnxValue.CreateFromTensor <float>(name, tensor));
                }



                timestamps[(int)TimingPoint.InputLoaded] = DateTime.Now;

                // Run the inference
                for (int i = 0; i < iteration; i++)
                {
                    var results = session.Run(container);  // results is an IReadOnlyList<NamedOnnxValue> container
                    Debug.Assert(results != null);
                    Debug.Assert(results.Count == 1);
                    //results = null;
                    //GC.Collect();
                    //GC.WaitForPendingFinalizers();
                }

                timestamps[(int)TimingPoint.RunComplete] = DateTime.Now;
            }
        }
Esempio n. 16
0
        public float[] forward()
        {
            float[]        resData       = new float[10];
            int[]          resDimensions = { 10 };
            Tensor <float> lastResult    = new DenseTensor <float>(resData, resDimensions);
            SessionOptions options       = new SessionOptions();

            using (var session = new InferenceSession(modelPath))//, options))
            {
                var inputMeta = session.InputMetadata;
                var container = new List <NamedOnnxValue>();

                Bitmap  img       = Image.FromFile(dataPath) as Bitmap;
                float[] inputData = new float[1 * 28 * 28];
                for (int i = 0; i < img.Width; i++)
                {
                    for (int j = 0; j < img.Height; j++)
                    {
                        Color pixel = img.GetPixel(i, j);
                        inputData[i * img.Height + j] = pixel.R / 255.0f;
                    }
                }
                int[] dimensions = { 1, 1, 28, 28 };
                foreach (var name in inputMeta.Keys)
                {
                    var tensor = new DenseTensor <float>(inputData, dimensions);
                    container.Add(NamedOnnxValue.CreateFromTensor <float>(name, tensor));
                }
                // Run the inference
                using (var results = session.Run(container))  // results is an IDisposableReadOnlyCollection<DisposableNamedOnnxValue> container
                {
                    // dump the results
                    foreach (var r in results)
                    {
                        lastResult = r.AsTensor <float>().Clone();
                    }
                }
            }
            return(lastResult.ToArray <float>());
        }
Esempio n. 17
0
        static void Main(string[] args)
        {
            /*SessionOptions options = new SessionOptions();
             * /*options.GraphOptimizationLevel = GraphOptimizationLevel.ORT_ENABLE_EXTENDED;*/

            Console.WriteLine("qqqqwwwwwwwwwyay");
            var session = new InferenceSession("C:\\Users\\natank\\Desktop\\model_2good.onnx");

            Console.WriteLine("wwwwwyay");

            //Mock data fix the file address
            NDarray xtr  = np.load("C:\\Natan\\fff\\fff\\NKLA_D_100a2200.npy");
            NDarray xtr2 = np.load("C:\\Natan\\fff\\fff\\NKLA_D_100a2318.npy");

            Console.WriteLine(xtr2.shape);

            var model = Model.LoadModel("C:\\Natan\\fff\\fff\\_reg_best.h5");

            Console.WriteLine("yay");

            // Fix the model address
            NDarray scores = model.Predict(xtr2);

            Console.WriteLine(scores.shape);
            Console.WriteLine(scores[0, 0]);

            scores = model.Predict(xtr);
            Console.WriteLine(scores.shape);

            /*     foreach (double sc in scores)
             * {
             *  Console.WriteLine(sc);
             *  //Console.WriteLine(scmodel..metrics_names[1], scores[1] * 100))
             * }*/


            Console.WriteLine("Hello World! we tested");
            Console.ReadKey();
        }
Esempio n. 18
0
        private void buttonLoad_Click(object sender, EventArgs e)
        {
            if (textUrl.Enabled)
            {
                if (openFileOnnx.ShowDialog() == DialogResult.OK &&
                    File.Exists(openFileOnnx.FileName))
                {
                    try
                    {
                        var file = openFileOnnx.FileName;
                        _session        = new InferenceSession(file);
                        textUrl.Text    = $"Local Mode: {Path.GetFileName(file)}";
                        textUrl.Enabled = false;
                        buttonLoad.Text = "Use Service";
                        Clear(false);
                    }
                    catch (Exception error)
                    {
                        logger.Error(error, "Load Model Exception");
                        StringBuilder sb = new StringBuilder();
                        do
                        {
                            sb.AppendLine($"Error: {error.GetType()}, {error.Message}");
                            error = error.InnerException;
                        } while (error != null);

                        MessageBox.Show("Error", sb.ToString(), MessageBoxButtons.OK, MessageBoxIcon.Error);
                    }
                }
            }
            else
            {
                textUrl.Text    = Properties.Settings.Default.Uri;
                textUrl.Enabled = true;
                _session        = null;
                buttonLoad.Text = "Load Model";
                Clear(false);
            }
        }
Esempio n. 19
0
 public NeuralImage(string model_path, string answer_path)
 {
     Console.WriteLine("INITIALIZE neuralImage");
     try
     {
         using (StreamReader sr = new StreamReader(answer_path))
         {
             while (!sr.EndOfStream)
             {
                 answers.Add(sr.ReadLine());
             }
             sr.Close();
         }
         session    = new InferenceSession(model_path);
         DbProvider = new DBprovider();
     }
     catch (Exception ex)
     {
         Debug.WriteLine("MODEL OR ANSWERS DIDN'T LOADED");
         exception = ex;
     }
 }
Esempio n. 20
0
        public string Predict(string img)
        {
            using var image = Image.Load <Rgb24>(img);

            image.Mutate(x =>
            {
                x.Resize(new ResizeOptions
                {
                    Size = new Size(TargetWidth, TargetHeight),
                    Mode = ResizeMode.Crop
                });
                x.Grayscale();
            });

            var input = new DenseTensor <float>(new[] { 1, 1, TargetHeight, TargetWidth });

            for (int y = 0; y < TargetHeight; y++)
            {
                Span <Rgb24> pixelSpan = image.GetPixelRowSpan(y);
                for (int x = 0; x < TargetWidth; x++)
                {
                    input[0, 0, y, x] = pixelSpan[x].R / 255.0f;
                }
            }
            using var session = new InferenceSession(ModelFolder + ModelFile);

            var inputs = new List <NamedOnnxValue>
            {
                NamedOnnxValue.CreateFromTensor(session.InputMetadata.Keys.First(), input)
            };

            using IDisposableReadOnlyCollection <DisposableNamedOnnxValue> results = session.Run(inputs);

            var output  = results.First().AsEnumerable <float>().ToArray();
            var sum     = output.Sum(x => (float)Math.Exp(x));
            var softmax = output.Select(x => (float)Math.Exp(x) / sum);

            return(classLabels[softmax.ToList().IndexOf(softmax.Max())]);
        }
Esempio n. 21
0
        public void Configure(IApplicationBuilder app, IHostingEnvironment env)
        {
            app.UseDeveloperExceptionPage();

            var onnxPath = Path.Combine(env.ContentRootPath, "products.onnx");
            var session  = new InferenceSession(onnxPath);

            app.Run(context =>
            {
                var inputImagePath = Path.Combine(env.ContentRootPath, "drill.jpg");
                var data           = ConvertImageToTensor(inputImagePath);
                var input          = NamedOnnxValue.CreateFromTensor <float>("data", data);
                using (var output = session.Run(new[] { input }))
                {
                    var prediction = output
                                     .First(i => i.Name == "classLabel")
                                     .AsEnumerable <string>()
                                     .First();
                    return(context.Response.WriteAsync(prediction));
                }
            });
        }
Esempio n. 22
0
        public float[] Predict(float[] inputData)
        {
            List <float> res = new List <float>();

            using (var session = new InferenceSession(modelPath, options))
            {
                var inputMeta = session.InputMetadata;
                var container = new List <NamedOnnxValue>();

                foreach (var name in inputMeta.Keys)
                {
                    var tensor = new DenseTensor <float>(inputData, new[] { 1, 4 });
                    container.Add(NamedOnnxValue.CreateFromTensor <float>(name, tensor));
                }

                using (var results = session.Run(container))  // results is an IDisposableReadOnlyCollection<DisposableNamedOnnxValue> container
                {
                    // dump the results
                    foreach (var r in results)
                    {
                        //Debug.WriteLine("Output for {0}", r.Name);
                        if (r.Name == "output_probability")
                        {
                            List <DisposableNamedOnnxValue> v = (List <DisposableNamedOnnxValue>)r.Value;
                            var d = v[0].AsDictionary <string, float>();

                            foreach (var item in d)
                            {
                                res.Add(item.Value);
                            }
                        }
                        //Console.WriteLine(r.AsTensor<string>().GetArrayString());
                    }
                }
            }

            return(res.ToArray());
        }
Esempio n. 23
0
        static Score PredictLocal(InferenceSession session, float[] digit)
        {
            var            now = DateTime.Now;
            Tensor <float> x   = new DenseTensor <float>(digit.Length);

            for (int i = 0; i <= digit.Length - 1; i++)
            {
                x[i] = digit[i] / 255.0f;
            }

            int[] dims = { 1, 1, 28, 28 };  // hardcoded for now for the test data
            x = x.Reshape(dims);

            var input = new List <NamedOnnxValue>()
            {
                NamedOnnxValue.CreateFromTensor("Input3", x)
            };

            try
            {
                var prediction = session.Run(input).First().AsTensor <float>().ToArray();
                return(new Score()
                {
                    Status = $"Local Mode: {session}",
                    Empty = false,
                    Prediction = Array.IndexOf(prediction, prediction.Max()),
                    Scores = prediction.Select(i => System.Convert.ToDouble(i)).ToList(),
                    Time = (DateTime.Now - now).TotalSeconds
                });
            }
            catch (Exception e)
            {
                return(new Score()
                {
                    Status = e.Message
                });
            }
        }
Esempio n. 24
0
        public void Init(string lastPath)
        {
            var session1 = new InferenceSession(lastPath);

            foreach (var item in session1.OutputMetadata.Keys)
            {
                var dims = session1.OutputMetadata[item].Dimensions;
                _nodes.Add(new NodeInfo()
                {
                    Name = item, Dims = dims
                });
            }

            foreach (var name in session1.InputMetadata.Keys)
            {
                var dims = session1.InputMetadata[name].Dimensions;
                var s1   = string.Join("x", dims);
                _nodes.Add(new NodeInfo()
                {
                    Name = name, Dims = dims, IsInput = true
                });
            }
        }
Esempio n. 25
0
        public static (float[], double[]) GetRawPrediction(int sampleRate, float[] samples)
        {
            var fft = GetFft(samples);
            var pcp = PitchClassProfile(fft, sampleRate);

            var inputTensor = new DenseTensor <float>(new[] { 1, 12 });

            for (var i = 0; i < 12; i++)
            {
                inputTensor[0, i] = (float)pcp[i];
            }

            var input = new List <NamedOnnxValue>
            {
                NamedOnnxValue.CreateFromTensor("dense_1_input", inputTensor)
            };
            var session =
                new InferenceSession("models/binary_crossentropy.onnx");

            using var results = session.Run(input);

            return(results.First().AsEnumerable <float>().ToArray(), pcp);
        }
Esempio n. 26
0
        static void UseApi()
        {
            string basepath  = "..\\..\\..\\testdata\\";
            string modelPath = basepath + "squeezenet.onnx";

            Debug.Assert(File.Exists(modelPath));
            // Optional : Create session options and set the graph optimization level for the session
            SessionOptions options = new SessionOptions();

            options.GraphOptimizationLevel = GraphOptimizationLevel.ORT_ENABLE_EXTENDED;

            using (var session = new InferenceSession(modelPath, options))
            {
                var inputMeta = session.InputMetadata;
                var container = new List <NamedOnnxValue>();

                float[] inputData = LoadTensorFromFile(basepath + "bench.in"); // this is the data for only one input tensor for this model

                foreach (var name in inputMeta.Keys)
                {
                    var tensor = new DenseTensor <float>(inputData, inputMeta[name].Dimensions);
                    container.Add(NamedOnnxValue.CreateFromTensor <float>(name, tensor));
                }

                // Run the inference
                using (var results = session.Run(container))  // results is an IDisposableReadOnlyCollection<DisposableNamedOnnxValue> container
                {
                    // dump the results
                    foreach (var r in results)
                    {
                        Console.WriteLine("Output for {0}", r.Name);
                        Console.WriteLine(r.AsTensor <float>().GetArrayString());
                    }
                }
            }
        }
Esempio n. 27
0
        private void TestPreTrainedModelsOpset7And8()
        {
            // 16-bit float not supported type in C#.
            var skipModels = new[] {
                "fp16_inception_v1",
                "fp16_shufflenet",
                "fp16_tiny_yolov2"
            };

            var opsets    = new[] { "opset7", "opset8" };
            var modelsDir = GetTestModelsDir();

            foreach (var opset in opsets)
            {
                var modelRoot = new DirectoryInfo(Path.Combine(modelsDir, opset));
                //var cwd = Directory.GetCurrentDirectory();
                foreach (var modelDir in modelRoot.EnumerateDirectories())
                {
                    String onnxModelFileName = null;

                    if (skipModels.Contains(modelDir.Name))
                    {
                        continue;
                    }

                    try
                    {
                        var onnxModelNames = modelDir.GetFiles("*.onnx");
                        if (onnxModelNames.Length > 1)
                        {
                            // TODO remove file "._resnet34v2.onnx" from test set
                            bool validModelFound = false;
                            for (int i = 0; i < onnxModelNames.Length; i++)
                            {
                                if (onnxModelNames[i].Name != "._resnet34v2.onnx")
                                {
                                    onnxModelNames[0] = onnxModelNames[i];
                                    validModelFound   = true;
                                }
                            }

                            if (!validModelFound)
                            {
                                var modelNamesList = string.Join(",", onnxModelNames.Select(x => x.ToString()));
                                throw new Exception($"Opset {opset}: Model {modelDir}. Can't determine model file name. Found these :{modelNamesList}");
                            }
                        }

                        onnxModelFileName = Path.Combine(modelsDir, opset, modelDir.Name, onnxModelNames[0].Name);
                        using (var session = new InferenceSession(onnxModelFileName))
                        {
                            var inMeta     = session.InputMetadata;
                            var innodepair = inMeta.First();
                            var innodename = innodepair.Key;
                            var innodedims = innodepair.Value.Dimensions;
                            for (int i = 0; i < innodedims.Length; i++)
                            {
                                if (innodedims[i] < 0)
                                {
                                    innodedims[i] = -1 * innodedims[i];
                                }
                            }

                            var testRoot = new DirectoryInfo(Path.Combine(modelsDir, opset, modelDir.Name));
                            var testData = testRoot.EnumerateDirectories("test_data*").First();
                            var dataIn   = LoadTensorFromFilePb(Path.Combine(modelsDir, opset, modelDir.Name, testData.ToString(), "input_0.pb"));
                            var dataOut  = LoadTensorFromFilePb(Path.Combine(modelsDir, opset, modelDir.Name, testData.ToString(), "output_0.pb"));
                            var tensorIn = new DenseTensor <float>(dataIn, innodedims);
                            var nov      = new List <NamedOnnxValue>();
                            nov.Add(NamedOnnxValue.CreateFromTensor <float>(innodename, tensorIn));
                            using (var resnov = session.Run(nov))
                            {
                                var res = resnov.ToArray()[0].AsTensor <float>().ToArray <float>();
                                Assert.Equal(res, dataOut, new floatComparer());
                            }
                        }
                    }
                    catch (Exception ex)
                    {
                        var msg = $"Opset {opset}: Model {modelDir}: ModelFile = {onnxModelFileName} error = {ex.Message}";
                        throw new Exception(msg);
                    }
                } //model
            }     //opset
        }
Esempio n. 28
0
        /// <summary>
        /// Constructs OnnxModel object from file.
        /// </summary>
        /// <param name="modelFile">Model file path.</param>
        /// <param name="gpuDeviceId">GPU device ID to execute on. Null for CPU.</param>
        /// <param name="fallbackToCpu">If true, resumes CPU execution quietly upon GPU error.</param>
        /// <param name="ownModelFile">If true, the <paramref name="modelFile"/> will be deleted when <see cref="OnnxModel"/> is
        /// no longer needed.</param>
        /// <param name="shapeDictionary"></param>
        public OnnxModel(string modelFile, int?gpuDeviceId = null, bool fallbackToCpu           = false,
                         bool ownModelFile = false, IDictionary <string, int[]> shapeDictionary = null)
        {
            // If we don't own the model file, _disposed should be false to prevent deleting user's file.
            _disposed = false;

            if (gpuDeviceId != null)
            {
                try
                {
                    _session = new InferenceSession(modelFile,
                                                    SessionOptions.MakeSessionOptionWithCudaProvider(gpuDeviceId.Value));
                }
                catch (OnnxRuntimeException)
                {
                    if (fallbackToCpu)
                    {
                        _session = new InferenceSession(modelFile);
                    }
                    else
                    {
                        // If called from OnnxTransform, is caught and rethrown
                        throw;
                    }
                }
            }
            else
            {
                _session = new InferenceSession(modelFile);
            }

            try
            {
                // Load ONNX model file and parse its input and output schema. The reason of doing so is that ONNXRuntime
                // doesn't expose full type information via its C# APIs.
                var model = new OnnxCSharpToProtoWrapper.ModelProto();
                // If we own the model file set the DeleteOnClose flag so it is always deleted.
                if (ownModelFile)
                {
                    ModelStream = new FileStream(modelFile, FileMode.Open, FileAccess.Read, FileShare.Read, 4096, FileOptions.DeleteOnClose);
                }
                else
                {
                    ModelStream = new FileStream(modelFile, FileMode.Open, FileAccess.Read);
                }

                // The CodedInputStream auto closes the stream, and we need to make sure that our main stream stays open, so creating a new one here.
                using (var modelStream = new FileStream(modelFile, FileMode.Open, FileAccess.Read, FileShare.Delete | FileShare.Read))
                    using (var codedStream = Google.Protobuf.CodedInputStream.CreateWithLimits(modelStream, Int32.MaxValue, 100))
                        model = OnnxCSharpToProtoWrapper.ModelProto.Parser.ParseFrom(codedStream);

                // Parse actual input and output types stored in the loaded ONNX model to get their DataViewType's.
                var inputTypePool = new Dictionary <string, DataViewType>();
                foreach (var valueInfo in model.Graph.Input)
                {
                    inputTypePool[valueInfo.Name] = OnnxTypeParser.GetDataViewType(valueInfo.Type);
                }

                var initializerTypePool = new Dictionary <string, DataViewType>();
                foreach (var valueInfo in model.Graph.Initializer)
                {
                    initializerTypePool[valueInfo.Name] = OnnxTypeParser.GetScalarDataViewType(valueInfo.DataType);
                }

                var outputTypePool = new Dictionary <string, DataViewType>();
                // Build casters which maps NamedOnnxValue to .NET objects.
                var casterPool = new Dictionary <string, Func <NamedOnnxValue, object> >();
                foreach (var valueInfo in model.Graph.Output)
                {
                    outputTypePool[valueInfo.Name] = OnnxTypeParser.GetDataViewType(valueInfo.Type);
                    casterPool[valueInfo.Name]     = OnnxTypeParser.GetDataViewValueCasterAndResultedType(valueInfo.Type, out Type actualType);
                }

                var inputInfos  = GetOnnxVariablesFromMetadata(_session.InputMetadata, shapeDictionary, inputTypePool, null);
                var outputInfos = GetOnnxVariablesFromMetadata(_session.OutputMetadata, shapeDictionary, outputTypePool, casterPool);
                var overrideableInitializers = GetOnnxVariablesFromMetadata(_session.OverridableInitializerMetadata, shapeDictionary, inputTypePool, null);

                // Create a view to the used ONNX model from ONNXRuntime's perspective.
                ModelInfo = new OnnxModelInfo(inputInfos, outputInfos, overrideableInitializers);

                Graph = model.Graph;
            }
            catch
            {
                _session.Dispose();
                _session = null;
                throw;
            }
        }
 /// <summary>
 /// Initializes face detector.
 /// </summary>
 /// <param name="options">Session options</param>
 /// <param name="confidenceThreshold">Confidence threshold</param>
 /// <param name="nmsThreshold">NonMaxSuppression threshold</param>
 public FaceDetectorLight(SessionOptions options, float confidenceThreshold = 0.95f, float nmsThreshold = 0.5f)
 {
     _session            = new InferenceSession(Properties.Resources.face_detector_320, options);
     ConfidenceThreshold = confidenceThreshold;
     NmsThreshold        = nmsThreshold;
 }
Esempio n. 30
0
        static void Main(string[] args)
        {
            using var image = Image.Load <Rgb24>(args.FirstOrDefault() ?? "image.jpg");

            const int TargetWidth  = 224;
            const int TargetHeight = 224;

            // Изменяем размер картинки до 224 x 224
            image.Mutate(x =>
            {
                x.Resize(new ResizeOptions
                {
                    Size = new Size(TargetWidth, TargetHeight),
                    Mode = ResizeMode.Crop // Сохраняем пропорции обрезая лишнее
                });
            });

            // Перевод пикселов в тензор и нормализация
            var input  = new DenseTensor <float>(new[] { 1, 3, TargetHeight, TargetWidth });
            var mean   = new[] { 0.485f, 0.456f, 0.406f };
            var stddev = new[] { 0.229f, 0.224f, 0.225f };

            for (int y = 0; y < TargetHeight; y++)
            {
                Span <Rgb24> pixelSpan = image.GetPixelRowSpan(y);
                for (int x = 0; x < TargetWidth; x++)
                {
                    input[0, 0, y, x] = ((pixelSpan[x].R / 255f) - mean[0]) / stddev[0];
                    input[0, 1, y, x] = ((pixelSpan[x].G / 255f) - mean[1]) / stddev[1];
                    input[0, 2, y, x] = ((pixelSpan[x].B / 255f) - mean[2]) / stddev[2];
                }
            }

            // Подготавливаем входные данные нейросети. Имя input задано в файле модели
            var inputs = new List <NamedOnnxValue>
            {
                NamedOnnxValue.CreateFromTensor("input", input)
            };

            // Загружаем модель из встроенного ресурса.
            // См. <EmbeddedResource> в файле проекта
            using var modelStream  = typeof(Program).Assembly.GetManifestResourceStream("OnnxSample.shufflenet-v2-10.onnx");
            using var memoryStream = new MemoryStream();
            modelStream.CopyTo(memoryStream);
            using var session = new InferenceSession(memoryStream.ToArray());

            // Вычисляем предсказание нейросетью
            Console.WriteLine("Predicting contents of image...");
            using IDisposableReadOnlyCollection <DisposableNamedOnnxValue> results = session.Run(inputs);

            // Получаем 1000 выходов и считаем для них softmax
            var output  = results.First().AsEnumerable <float>().ToArray();
            var sum     = output.Sum(x => (float)Math.Exp(x));
            var softmax = output.Select(x => (float)Math.Exp(x) / sum);

            // Выдаем 10 наиболее вероятных результатов на экран
            foreach (var p in softmax
                     .Select((x, i) => new { Label = classLabels[i], Confidence = x })
                     .OrderByDescending(x => x.Confidence)
                     .Take(10))
            {
                Console.WriteLine($"{p.Label} with confidence {p.Confidence}");
            }
        }