예제 #1
0
        public RecognitionInfo ProcessImage(string img_path)
        {
            var input  = PreprocessImage(img_path);
            var inputs = new List <NamedOnnxValue>
            {
                NamedOnnxValue.CreateFromTensor(Session.InputMetadata.Keys.First(), input)
            };
            var Results = Session.Run(inputs);

            // Получаем 10 выходов и считаем для них softmax
            var output  = Results.First().AsEnumerable <float>().ToArray();
            var sum     = output.Sum(x => (float)Math.Exp(x));
            var softmax = output.Select(x => (float)Math.Exp(x) / sum);

            RecognitionInfo recognitionResult = new RecognitionInfo(img_path, ClassLabels[softmax.ToList().IndexOf(softmax.Max())], softmax.Max());

            return(recognitionResult);
        }
예제 #2
0
        public JsonResult predictCNN([FromBody] List <byte> imageBytes)
        {
            float[]          floatArray       = imageBytes.Select(i => Convert.ToSingle(i / 255.0)).ToArray();
            var              matrix           = floatArray.ToTensor().Reshape(new[] { 28, 28 });
            InferenceSession inferenceSession = _inferenceSessions["cnn"];
            var              tensor           = new DenseTensor <float>(floatArray, inferenceSession.InputMetadata["Input3"].Dimensions);
            var              results          = inferenceSession.Run(new List <NamedOnnxValue> {
                NamedOnnxValue.CreateFromTensor("Input3", tensor)
            }).ToArray();
            var weights = results[0].AsTensor <float>().ToList();
            var probs   = weights.Select(x => x + Math.Abs(weights.Min()));

            probs = probs.Select(x => x / probs.Sum()).ToArray();
            var pred          = probs.Select((n, i) => (Number: n, Index: i)).Max().Index;
            var WrappedReturn = new { prediction = pred, probabilities = probs };

            return(Json(WrappedReturn));
        }
예제 #3
0
        private void TestModelInputSTRING()
        {
            // model takes 1x5 input of fixed type, echoes back
            string modelPath = Path.Combine(Directory.GetCurrentDirectory(), "test_types_STRING.onnx");

            using (var session = new InferenceSession(modelPath))
            {
                var container = new List <NamedOnnxValue>();
                var tensorIn  = new DenseTensor <string>(new string[] { "a", "c", "d", "z", "f" }, new int[] { 1, 5 });
                var nov       = NamedOnnxValue.CreateFromTensor("input", tensorIn);
                container.Add(nov);
                using (var res = session.Run(container))
                {
                    var tensorOut = res.First().AsTensor <string>();
                    Assert.True(tensorOut.SequenceEqual(tensorIn));
                }
            }
        }
예제 #4
0
        private void TestModelInputUINT16()
        {
            // model takes 1x5 input of fixed type, echoes back
            string modelPath = Path.Combine(Directory.GetCurrentDirectory(), "test_types_UINT16.pb");

            using (var session = new InferenceSession(modelPath))
            {
                var container = new List <NamedOnnxValue>();
                var tensorIn  = new DenseTensor <UInt16>(new UInt16[] { 1, 2, 3, UInt16.MinValue, UInt16.MaxValue }, new int[] { 1, 5 });
                var nov       = NamedOnnxValue.CreateFromTensor("input", tensorIn);
                container.Add(nov);
                using (var res = session.Run(container))
                {
                    var tensorOut = res.First().AsTensor <UInt16>();
                    Assert.True(tensorOut.SequenceEqual(tensorIn));
                }
            }
        }
예제 #5
0
        public string ProcessImage(string path)
        {
            Image image = Image.FromStream(new MemoryStream(Convert.FromBase64String(path)));

            const int TargetWidth  = 224;
            const int TargetHeight = 224;

            var bitmap = ResizeImage(image, TargetWidth, TargetHeight);

            // Перевод пикселов в тензор и нормализация
            //var input = new Tensor<float>();
            var input  = new DenseTensor <float>(new[] { 1, 3, TargetHeight, TargetWidth });
            var mean   = new[] { 0.485f, 0.456f, 0.406f };
            var stddev = new[] { 0.229f, 0.224f, 0.225f };

            for (int y = 0; y < TargetHeight; y++)
            {
                for (int x = 0; x < TargetWidth; x++)
                {
                    var color = bitmap.GetPixel(x, y);
                    input[0, 0, y, x] = ((color.R / 255f) - mean[0]) / stddev[0];
                    input[0, 1, y, x] = ((color.G / 255f) - mean[1]) / stddev[1];
                    input[0, 2, y, x] = ((color.B / 255f) - mean[2]) / stddev[2];
                }
            }

            // Подготавливаем входные данные нейросети. Имя input задано в файле модели
            var inputs = new List <NamedOnnxValue>
            {
                NamedOnnxValue.CreateFromTensor("data", input)
            };

            //Console.WriteLine("Predicting contents of image...");
            using IDisposableReadOnlyCollection <DisposableNamedOnnxValue> results = session.Run(inputs);

            // Получаем 1000 выходов и считаем для них softmax
            var output  = results.First().AsEnumerable <float>().ToArray();
            var sum     = output.Sum(x => (float)Math.Exp(x));
            var softmax = output.Select(x => (float)Math.Exp(x) / sum);

            return(softmax
                   .Select((x, i) => new { Label = classLabels[i], Confidence = x })
                   .OrderByDescending(x => x.Confidence).FirstOrDefault().Label);
        }
예제 #6
0
        static void Main(string[] args)
        {
            string path = System.AppContext.BaseDirectory + "myModel.onnx";

            Console.WriteLine(path);
            Tensor <float> input  = new DenseTensor <float>(new[] { 32, 32 });
            Tensor <float> output = new DenseTensor <float>(new[] { 1, 4, 4 });

            for (int y = 0; y < 32; y++)
            {
                for (int x = 0; x < 32; x++)
                {
                    input[y, x] = (float)Math.E;
                }
            }

            //Console.WriteLine(input.GetArrayString());

            // Setup inputs
            List <NamedOnnxValue> inputs = new List <NamedOnnxValue>
            {
                NamedOnnxValue.CreateFromTensor("Input", input.Reshape(new [] { 1, 32, 32 }).ToDenseTensor()),
            };
            // Setup outputs
            List <NamedOnnxValue> outputs = new List <NamedOnnxValue>
            {
                NamedOnnxValue.CreateFromTensor("Output", output),
            };

            Stopwatch stopWatch = new Stopwatch();

            stopWatch.Start();

            // Run inference
            InferenceSession session = new InferenceSession(path);

            session.Run(inputs, outputs);
            output = outputs[0].AsTensor <float>();
            Console.WriteLine(output.Reshape(new[] { 4, 4 }).ToDenseTensor().GetArrayString());

            stopWatch.Stop();

            Console.WriteLine(stopWatch.ElapsedMilliseconds.ToString());
        }
예제 #7
0
        private static Prediction OneImgRecognition(string path)
        {
            using var image = Image.Load <Rgb24>(path);
            const int TargetWidth  = 28;
            const int TargetHeight = 28;

            image.Mutate(x =>
            {
                x.Resize(new ResizeOptions
                {
                    Size = new Size(TargetWidth, TargetHeight),
                    Mode = ResizeMode.Crop,
                });
            });

            var input  = new DenseTensor <float>(new[] { 1, 1, TargetHeight, TargetWidth });
            var mean   = new[] { 0.485f, 0.456f, 0.406f };
            var stddev = new[] { 0.229f, 0.224f, 0.225f };

            for (int y = 0; y < TargetHeight; y++)
            {
                Span <Rgb24> pixelSpan = image.GetPixelRowSpan(y);
                for (int x = 0; x < TargetWidth; x++)
                {
                    input[0, 0, y, x] = ((pixelSpan[x].R / 255f) - mean[0]) / stddev[0];
                }
            }

            var inputs = new List <NamedOnnxValue>
            {
                NamedOnnxValue.CreateFromTensor("Input3", input),
            };

            using IDisposableReadOnlyCollection <DisposableNamedOnnxValue> results = session.Run(inputs);

            var output  = results.First().AsEnumerable <float>().ToArray();
            var sum     = output.Sum(x => (float)Math.Exp(x));
            var softmax = output.Select(x => (float)Math.Exp(x) / sum);

            float confidence = softmax.Max();
            int   label      = softmax.ToList().IndexOf(confidence);

            return(new Prediction(path, label, confidence));
        }
예제 #8
0
파일: NNP.cs 프로젝트: TimSyn/s02170686
        public int LoadAndPredict(Image <Rgb24> image)
        {
            // using var image =  Image.Load<Rgb24>(img_name);

            const int TargetWidth  = 28;
            const int TargetHeight = 28;

            image.Mutate(x =>
            {
                x.Resize(new ResizeOptions {
                    Size = new Size(TargetWidth, TargetHeight),
                    Mode = ResizeMode.Crop
                }).Grayscale();
            });

            var input = new DenseTensor <float>(new[] { 1, 1, TargetHeight, TargetWidth });

            for (int y = 0; y < TargetHeight; y++)
            {
                Span <Rgb24> pixelSpan = image.GetPixelRowSpan(y);

                for (int x = 0; x < TargetWidth; x++)
                {
                    input[0, 0, y, x] = pixelSpan[x].R / 255.0f;
                }
            }

            using var session = new InferenceSession(model_name);
            string input_name = session.InputMetadata.Keys.First();
            var    inputs     = new List <NamedOnnxValue> {
                NamedOnnxValue.CreateFromTensor(input_name, input)
            };

            using IDisposableReadOnlyCollection <DisposableNamedOnnxValue> results = session.Run(inputs);

            var output  = results.First().AsEnumerable <float>().ToArray();
            var sum     = output.Sum(x => (float)Math.Exp(x));
            var softmax = output.Select(x => (float)Math.Exp(x) / sum);
            var query   = softmax.Select((x, i) => new { Label = classLabels[i], Confidence = x })
                          .OrderByDescending(x => x.Confidence);

            return(Int32.Parse(query.First().Label));
        }
예제 #9
0
        public static async Task <IActionResult> Run(
            [HttpTrigger(AuthorizationLevel.Function, "get", "post", Route = null)] HttpRequest req,
            ILogger log, ExecutionContext context)
        {
            log.LogInformation("C# HTTP trigger function processed a request.");

            string review = req.Query["review"];

            string  requestBody = await new StreamReader(req.Body).ReadToEndAsync();
            dynamic data        = JsonConvert.DeserializeObject(requestBody);

            review = review ?? data?.review;

            var models = new Dictionary <string, string>();

            models.Add("points", GetFileAndPathFromStorage(context, log, "model327", "pipeline_points_range.onnx"));
            models.Add("price", GetFileAndPathFromStorage(context, log, "model327", "pipeline_price_range.onnx"));
            models.Add("variety", GetFileAndPathFromStorage(context, log, "model327", "pipeline_variety.onnx"));

            var inputTensor = new DenseTensor <string>(new string[] { review }, new int[] { 1, 1 });
            //create input data for session.
            var input = new List <NamedOnnxValue> {
                NamedOnnxValue.CreateFromTensor <string>("input", inputTensor)
            };

            //create now object points: result
            var inferenceResults = new Dictionary <string, IDictionary <string, float> >();

            foreach (var model in models)
            {
                log.LogInformation($"Start inference session for {model.Key}");
                var session         = new InferenceSession(model.Value);
                var output          = session.Run(input).ToList().Last().AsEnumerable <NamedOnnxValue>();
                var inferenceResult = output.First().AsDictionary <string, float>();
                var topThreeResult  = inferenceResult.OrderByDescending(dict => dict.Value).Take(3)
                                      .ToDictionary(pair => pair.Key, pair => pair.Value);
                log.LogInformation($"Top five results for {model.Key} {topThreeResult}");
                inferenceResults.Add(model.Key, topThreeResult);
                Console.Write(inferenceResult);
            }

            return(new JsonResult(inferenceResults));
        }
예제 #10
0
        private void TestInferenceSessionWithByteArray()
        {
            // model takes 1x5 input of fixed type, echoes back
            string modelPath = Path.Combine(Directory.GetCurrentDirectory(), "test_types_FLOAT.pb");

            byte[] modelData = File.ReadAllBytes(modelPath);

            using (var session = new InferenceSession(modelData))
            {
                var container = new List <NamedOnnxValue>();
                var tensorIn  = new DenseTensor <float>(new float[] { 1.0f, 2.0f, -3.0f, float.MinValue, float.MaxValue }, new int[] { 1, 5 });
                var nov       = NamedOnnxValue.CreateFromTensor("input", tensorIn);
                container.Add(nov);
                using (var res = session.Run(container))
                {
                    var tensorOut = res.First().AsTensor <float>();
                    Assert.True(tensorOut.SequenceEqual(tensorIn));
                }
            }
        }
예제 #11
0
파일: Model.cs 프로젝트: rodion-s/s02170224
        private PredictionResult Predict(DenseTensor <float> input, string single_image_path)
        {
            var inputs = new List <NamedOnnxValue>
            {
                NamedOnnxValue.CreateFromTensor("data", input)
            };

            using IDisposableReadOnlyCollection <DisposableNamedOnnxValue> results = session.Run(inputs);

            // Получаем 1000 выходов и считаем для них softmax
            var output  = results.First().AsEnumerable <float>().ToArray();
            var sum     = output.Sum(x => (float)Math.Exp(x));
            var softmax = output.Select(x => (float)Math.Exp(x) / sum);

            var confidence = softmax.Max();
            var class_idx  = softmax.ToList().IndexOf(confidence);


            return(new PredictionResult(single_image_path, LabelMap.classLabels[class_idx], confidence));
        }
예제 #12
0
        private void TestPreTrainedModelsOpset7And8()
        {
            var opsets = new[] { "opset7", "opset8" };

            foreach (var opset in opsets)
            {
                var modelRoot = new DirectoryInfo(opset);
                foreach (var model in modelRoot.EnumerateDirectories())
                {
                    // TODO: dims contains 'None'. Session throws error.
                    if (model.ToString() == "test_tiny_yolov2")
                    {
                        continue;
                    }
                    try
                    {
                        //TODO: sometimes, the file name is not 'model.onnx'
                        var session    = new InferenceSession($"{opset}\\{model}\\model.onnx");
                        var inMeta     = session.InputMetadata;
                        var innodepair = inMeta.First();
                        var innodename = innodepair.Key;
                        var innodedims = innodepair.Value.Dimensions;
                        var dataIn     = LoadTensorFromFilePb($"{opset}\\{model}\\test_data_set_0\\input_0.pb");
                        var dataOut    = LoadTensorFromFilePb($"{opset}\\{model}\\test_data_set_0\\output_0.pb");
                        var tensorIn   = new DenseTensor <float>(dataIn, innodedims);
                        var nov        = new List <NamedOnnxValue>();
                        nov.Add(NamedOnnxValue.CreateFromTensor <float>(innodename, tensorIn));
                        var resnov = session.Run(nov);
                        var res    = resnov.ToArray()[0].AsTensor <float>().ToArray <float>();
                        Assert.Equal(res, dataOut, new floatComparer());
                        session.Dispose();
                    }
                    catch (Exception ex)
                    {
                        var msg = $"Opset {opset}: Model {model}: error = {ex.Message}";
                        continue; //TODO: fix it
                        //throw new Exception(msg);
                    }
                } //model
            }     //opset
        }
예제 #13
0
        static void createSession(int imageIndex)
        {
            string modelPath = Directory.GetCurrentDirectory() + @"/pytorch_mnist.onnx";

            // Optional : Create session options and set the graph optimization level for the session
            //SessionOptions options = new SessionOptions();
            //options.GraphOptimizationLevel = GraphOptimizationLevel.ORT_ENABLE_EXTENDED;
            //using (var session = new InferenceSession(modelPath, options))

            using (var session = new InferenceSession(modelPath))
            {
                //float[] inputData = LoadTensorFromFile(@"bench.in"); // this is the data for only one input tensor for this model
                Utilities.LoadTensorData();
                float[] inputData = Utilities.ImageData[imageIndex];
                string  label     = Utilities.ImageLabels[imageIndex];
                Console.WriteLine("Selected image is the number: " + label);

                var inputMeta = session.InputMetadata;
                var container = new List <NamedOnnxValue>();
                //PrintInputMetadata(inputMeta);

                foreach (var name in inputMeta.Keys)
                {
                    var tensor = new DenseTensor <float>(inputData, inputMeta[name].Dimensions);
                    container.Add(NamedOnnxValue.CreateFromTensor <float>(name, tensor));
                }

                // Run the inference
                using (var results = session.Run(container))  // results is an IDisposableReadOnlyCollection<DisposableNamedOnnxValue> container
                {
                    // Get the results
                    foreach (var r in results)
                    {
                        Console.WriteLine("Output Name: {0}", r.Name);
                        int prediction = MaxProbability(r.AsTensor <float>());
                        Console.WriteLine("Prediction: " + prediction.ToString());
                        //Console.WriteLine(r.AsTensor<float>().GetArrayString());
                    }
                }
            }
        }
예제 #14
0
        public ResultClassification PredictModel(string imageFilePath)
        {
            DenseTensor <float> TensorImage = OnnxClassifier.PreprocImage(imageFilePath);

            var inputs = new List <NamedOnnxValue>
            {
                NamedOnnxValue.CreateFromTensor(session.InputMetadata.Keys.First(), TensorImage)
            };

            using IDisposableReadOnlyCollection <DisposableNamedOnnxValue> results = session.Run(inputs);

            var   output = results.First().AsEnumerable <float>().ToArray();
            float sum    = output.Sum(x => (float)Math.Exp(x));

            var softmax = output.Select(x => (float)Math.Exp(x) / sum).ToList();

            string cl = LabelMap.Labels[softmax.IndexOf(softmax.Max())];
            ResultClassification result = new ResultClassification(imageFilePath, cl, softmax.Max());

            return(result);
        }
예제 #15
0
        public float[] forward()
        {
            float[]        resData       = new float[10];
            int[]          resDimensions = { 10 };
            Tensor <float> lastResult    = new DenseTensor <float>(resData, resDimensions);
            SessionOptions options       = new SessionOptions();

            using (var session = new InferenceSession(modelPath))//, options))
            {
                var inputMeta = session.InputMetadata;
                var container = new List <NamedOnnxValue>();

                Bitmap  img       = Image.FromFile(dataPath) as Bitmap;
                float[] inputData = new float[1 * 28 * 28];
                for (int i = 0; i < img.Width; i++)
                {
                    for (int j = 0; j < img.Height; j++)
                    {
                        Color pixel = img.GetPixel(i, j);
                        inputData[i * img.Height + j] = pixel.R / 255.0f;
                    }
                }
                int[] dimensions = { 1, 1, 28, 28 };
                foreach (var name in inputMeta.Keys)
                {
                    var tensor = new DenseTensor <float>(inputData, dimensions);
                    container.Add(NamedOnnxValue.CreateFromTensor <float>(name, tensor));
                }
                // Run the inference
                using (var results = session.Run(container))  // results is an IDisposableReadOnlyCollection<DisposableNamedOnnxValue> container
                {
                    // dump the results
                    foreach (var r in results)
                    {
                        lastResult = r.AsTensor <float>().Clone();
                    }
                }
            }
            return(lastResult.ToArray <float>());
        }
예제 #16
0
        static void RunModelOnnxRuntime(string modelPath, string inputPath, int iteration, DateTime[] timestamps)
        {
            if (timestamps.Length != (int)TimingPoint.TotalCount)
            {
                throw new ArgumentException("Timestamps array must have " + (int)TimingPoint.TotalCount + " size");
            }

            timestamps[(int)TimingPoint.Start] = DateTime.Now;

            using (var session = new InferenceSession(modelPath))
            {
                timestamps[(int)TimingPoint.ModelLoaded] = DateTime.Now;
                var inputMeta = session.InputMetadata;

                var container = new List <NamedOnnxValue>();
                foreach (var name in inputMeta.Keys)
                {
                    float[] rawData = LoadTensorFromFile(inputPath);
                    var     tensor  = new DenseTensor <float>(rawData, inputMeta[name].Dimensions);
                    container.Add(NamedOnnxValue.CreateFromTensor <float>(name, tensor));
                }



                timestamps[(int)TimingPoint.InputLoaded] = DateTime.Now;

                // Run the inference
                for (int i = 0; i < iteration; i++)
                {
                    var results = session.Run(container);  // results is an IReadOnlyList<NamedOnnxValue> container
                    Debug.Assert(results != null);
                    Debug.Assert(results.Count == 1);
                    //results = null;
                    //GC.Collect();
                    //GC.WaitForPendingFinalizers();
                }

                timestamps[(int)TimingPoint.RunComplete] = DateTime.Now;
            }
        }
예제 #17
0
        /// <summary>
        /// Get the input tensor from an image
        /// </summary>
        /// <param name="originalImage"></param>
        /// <returns></returns>
        public float[] GetTensors(Image image)
        {
            // resize the input image to match the model's input size
            Bitmap resizedImage = ImageUtils.ResizeImage(image, _yolo2CoCoSettings.InputWidth, _yolo2CoCoSettings.InputHeight);

            // convert the image to an array of floats, and convert to a tensor input for the model
            var bitData = ImageUtils.GetNormalizedFloatArray(resizedImage);
            var Inputs  = new List <NamedOnnxValue>();
            var tensor  = new DenseTensor <float>(bitData, _inferenceSession.InputMetadata["input.1"].Dimensions);

            Inputs.Add(NamedOnnxValue.CreateFromTensor <float>("input.1", tensor));

            // run the inference and convert the Idisposable result back to a float[]
            float[] resultTensor;
            using (var results = _inferenceSession.Run(Inputs))
            {
                resultTensor = results.FirstOrDefault().AsTensor <float>().ToArray();
            }

            // return
            return(resultTensor);
        }
예제 #18
0
        public string Predict(string imagePath)
        {
            // Read image
            using (Image <Rgb24> image = Image.Load <Rgb24>(imagePath))
            {
                // Resize image
                image.Mutate(x =>
                {
                    x.Resize(new ResizeOptions
                    {
                        Size = new Size(150, 150),
                        Mode = ResizeMode.Crop
                    });
                });

                // Preprocess image
                Tensor <float> input = new DenseTensor <float>(new[] { 1, 3, 150, 150 });
                for (int y = 0; y < image.Height; y++)
                {
                    Span <Rgb24> pixelSpan = image.GetPixelRowSpan(y);
                    for (int x = 0; x < image.Width; x++)
                    {
                        input[0, 0, y, x] = (pixelSpan[x].R / 255f);
                        input[0, 1, y, x] = (pixelSpan[x].G / 255f);
                        input[0, 2, y, x] = (pixelSpan[x].B / 255f);
                    }
                }

                // Setup inputs
                var inputs = new List <NamedOnnxValue>
                {
                    NamedOnnxValue.CreateFromTensor("input.1", input)
                };

                IDisposableReadOnlyCollection <DisposableNamedOnnxValue> results = _session?.Run(inputs);
                float output = results.First().AsEnumerable <float>().First();
                return((output >= 0.5) ? "Male" : "Female");
            }
        }
예제 #19
0
        public string Predict(string img)
        {
            using var image = Image.Load <Rgb24>(img);

            image.Mutate(x =>
            {
                x.Resize(new ResizeOptions
                {
                    Size = new Size(TargetWidth, TargetHeight),
                    Mode = ResizeMode.Crop
                });
                x.Grayscale();
            });

            var input = new DenseTensor <float>(new[] { 1, 1, TargetHeight, TargetWidth });

            for (int y = 0; y < TargetHeight; y++)
            {
                Span <Rgb24> pixelSpan = image.GetPixelRowSpan(y);
                for (int x = 0; x < TargetWidth; x++)
                {
                    input[0, 0, y, x] = pixelSpan[x].R / 255.0f;
                }
            }
            using var session = new InferenceSession(ModelFolder + ModelFile);

            var inputs = new List <NamedOnnxValue>
            {
                NamedOnnxValue.CreateFromTensor(session.InputMetadata.Keys.First(), input)
            };

            using IDisposableReadOnlyCollection <DisposableNamedOnnxValue> results = session.Run(inputs);

            var output  = results.First().AsEnumerable <float>().ToArray();
            var sum     = output.Sum(x => (float)Math.Exp(x));
            var softmax = output.Select(x => (float)Math.Exp(x) / sum);

            return(classLabels[softmax.ToList().IndexOf(softmax.Max())]);
        }
예제 #20
0
        public string Infer(float[] input)
        {
            string result_str = "";

            int[]          dimensions = { 12300 }; // and the dimensions of the input is stored here
            Tensor <float> t1         = new DenseTensor <float>(input, dimensions);


            var inputs = new List <NamedOnnxValue>()
            {
                NamedOnnxValue.CreateFromTensor <float>("input", t1),
            };

            using (var results = session.Run(inputs))
            {
                // manipulate the results
                var result = results.First();
                var probs  = ((DenseTensor <float>)result.Value).ToArray();
                result_str = labels[Array.IndexOf(probs, probs.Max())];
            }
            return(result_str);
        }
예제 #21
0
        public void Configure(IApplicationBuilder app, IHostingEnvironment env)
        {
            app.UseDeveloperExceptionPage();

            var onnxPath = Path.Combine(env.ContentRootPath, "products.onnx");
            var session  = new InferenceSession(onnxPath);

            app.Run(context =>
            {
                var inputImagePath = Path.Combine(env.ContentRootPath, "drill.jpg");
                var data           = ConvertImageToTensor(inputImagePath);
                var input          = NamedOnnxValue.CreateFromTensor <float>("data", data);
                using (var output = session.Run(new[] { input }))
                {
                    var prediction = output
                                     .First(i => i.Name == "classLabel")
                                     .AsEnumerable <string>()
                                     .First();
                    return(context.Response.WriteAsync(prediction));
                }
            });
        }
예제 #22
0
        /// <summary>
        /// Returns segmentation mask.
        /// </summary>
        /// <param name="image">Input image</param>
        /// <returns>Segmentation mask</returns>
        public Bitmap Fit(Bitmap image)
        {
            // scaling image
            var width  = image.Width;
            var height = image.Height;
            var ratio  = 1.0f * _size / Math.Max(width, height);
            var size   = new Size(
                (int)(ratio * width),
                (int)(ratio * height));
            var resized = new Bitmap(image, size);

            // creating tensor
            Console.WriteLine("Creating image tensor...");
            var tic        = Environment.TickCount;
            var inputMeta  = _session.InputMetadata;
            var name       = inputMeta.Keys.ToArray()[0];
            var dimentions = new int[] { 1, size.Height, size.Width, 3 };
            var inputData  = Onnx.ToTensor(resized);

            resized.Dispose();
            Console.WriteLine($"Tensor was created in {Environment.TickCount - tic} mls.");

            // prediction
            Console.WriteLine("Creating segmentation mask...");
            tic = Environment.TickCount;
            var t1     = new DenseTensor <byte>(inputData, dimentions);
            var inputs = new List <NamedOnnxValue>()
            {
                NamedOnnxValue.CreateFromTensor(name, t1)
            };
            var results = _session.Run(inputs).ToArray();
            var map     = results[0].AsTensor <long>().ToArray();
            var mask    = DeepPersonLab.FromSegmentationMap(map, size.Width, size.Height);

            Console.WriteLine($"Segmentation was created in {Environment.TickCount - tic} mls.");

            // return mask
            return(new Bitmap(mask, width, height));
        }
예제 #23
0
파일: Program.cs 프로젝트: volody/onnx-demo
        static Score PredictLocal(InferenceSession session, float[] digit)
        {
            var            now = DateTime.Now;
            Tensor <float> x   = new DenseTensor <float>(digit.Length);

            for (int i = 0; i <= digit.Length - 1; i++)
            {
                x[i] = digit[i] / 255.0f;
            }

            int[] dims = { 1, 1, 28, 28 };  // hardcoded for now for the test data
            x = x.Reshape(dims);

            var input = new List <NamedOnnxValue>()
            {
                NamedOnnxValue.CreateFromTensor("Input3", x)
            };

            try
            {
                var prediction = session.Run(input).First().AsTensor <float>().ToArray();
                return(new Score()
                {
                    Status = $"Local Mode: {session}",
                    Empty = false,
                    Prediction = Array.IndexOf(prediction, prediction.Max()),
                    Scores = prediction.Select(i => System.Convert.ToDouble(i)).ToList(),
                    Time = (DateTime.Now - now).TotalSeconds
                });
            }
            catch (Exception e)
            {
                return(new Score()
                {
                    Status = e.Message
                });
            }
        }
예제 #24
0
파일: MLClass.cs 프로젝트: my-commits/Iris
        public float[] Predict(float[] inputData)
        {
            List <float> res = new List <float>();

            using (var session = new InferenceSession(modelPath, options))
            {
                var inputMeta = session.InputMetadata;
                var container = new List <NamedOnnxValue>();

                foreach (var name in inputMeta.Keys)
                {
                    var tensor = new DenseTensor <float>(inputData, new[] { 1, 4 });
                    container.Add(NamedOnnxValue.CreateFromTensor <float>(name, tensor));
                }

                using (var results = session.Run(container))  // results is an IDisposableReadOnlyCollection<DisposableNamedOnnxValue> container
                {
                    // dump the results
                    foreach (var r in results)
                    {
                        //Debug.WriteLine("Output for {0}", r.Name);
                        if (r.Name == "output_probability")
                        {
                            List <DisposableNamedOnnxValue> v = (List <DisposableNamedOnnxValue>)r.Value;
                            var d = v[0].AsDictionary <string, float>();

                            foreach (var item in d)
                            {
                                res.Add(item.Value);
                            }
                        }
                        //Console.WriteLine(r.AsTensor<string>().GetArrayString());
                    }
                }
            }

            return(res.ToArray());
        }
예제 #25
0
        public static (float[], double[]) GetRawPrediction(int sampleRate, float[] samples)
        {
            var fft = GetFft(samples);
            var pcp = PitchClassProfile(fft, sampleRate);

            var inputTensor = new DenseTensor <float>(new[] { 1, 12 });

            for (var i = 0; i < 12; i++)
            {
                inputTensor[0, i] = (float)pcp[i];
            }

            var input = new List <NamedOnnxValue>
            {
                NamedOnnxValue.CreateFromTensor("dense_1_input", inputTensor)
            };
            var session =
                new InferenceSession("models/binary_crossentropy.onnx");

            using var results = session.Run(input);

            return(results.First().AsEnumerable <float>().ToArray(), pcp);
        }
예제 #26
0
        public void result(Tuple <DenseTensor <float>, string> input)
        {
            var inputs = new List <NamedOnnxValue>
            {
                NamedOnnxValue.CreateFromTensor("data", input.Item1)
            };

            using IDisposableReadOnlyCollection <DisposableNamedOnnxValue> results = session.Run(inputs);

            // Получаем 1000 выходов и считаем для них softmax
            var output  = results.First().AsEnumerable <float>().ToArray();
            var sum     = output.Sum(x => (float)Math.Exp(x));
            var softmax = output.Select(x => (float)Math.Exp(x) / sum);

            // Выдаем 10 наиболее вероятных результатов на экран
            foreach (var p in softmax
                     .Select((x, i) => new { Label = classLabels[i], Confidence = x })
                     .OrderByDescending(x => x.Confidence)
                     .Take(1))
            {
                Console.WriteLine(input.Item2.Substring(input.Item2.LastIndexOf('\\') + 1) + " - " + p.Label + " confidence = " + p.Confidence);
            }
        }
예제 #27
0
        static void UseApi()
        {
            string basepath  = "..\\..\\..\\testdata\\";
            string modelPath = basepath + "squeezenet.onnx";

            Debug.Assert(File.Exists(modelPath));
            // Optional : Create session options and set the graph optimization level for the session
            SessionOptions options = new SessionOptions();

            options.GraphOptimizationLevel = GraphOptimizationLevel.ORT_ENABLE_EXTENDED;

            using (var session = new InferenceSession(modelPath, options))
            {
                var inputMeta = session.InputMetadata;
                var container = new List <NamedOnnxValue>();

                float[] inputData = LoadTensorFromFile(basepath + "bench.in"); // this is the data for only one input tensor for this model

                foreach (var name in inputMeta.Keys)
                {
                    var tensor = new DenseTensor <float>(inputData, inputMeta[name].Dimensions);
                    container.Add(NamedOnnxValue.CreateFromTensor <float>(name, tensor));
                }

                // Run the inference
                using (var results = session.Run(container))  // results is an IDisposableReadOnlyCollection<DisposableNamedOnnxValue> container
                {
                    // dump the results
                    foreach (var r in results)
                    {
                        Console.WriteLine("Output for {0}", r.Name);
                        Console.WriteLine(r.AsTensor <float>().GetArrayString());
                    }
                }
            }
        }
예제 #28
0
        private void TestPreTrainedModelsOpset7And8()
        {
            // 16-bit float not supported type in C#.
            var skipModels = new[] {
                "fp16_inception_v1",
                "fp16_shufflenet",
                "fp16_tiny_yolov2"
            };

            var opsets    = new[] { "opset7", "opset8" };
            var modelsDir = GetTestModelsDir();

            foreach (var opset in opsets)
            {
                var modelRoot = new DirectoryInfo(Path.Combine(modelsDir, opset));
                //var cwd = Directory.GetCurrentDirectory();
                foreach (var modelDir in modelRoot.EnumerateDirectories())
                {
                    String onnxModelFileName = null;

                    if (skipModels.Contains(modelDir.Name))
                    {
                        continue;
                    }

                    try
                    {
                        var onnxModelNames = modelDir.GetFiles("*.onnx");
                        if (onnxModelNames.Length > 1)
                        {
                            // TODO remove file "._resnet34v2.onnx" from test set
                            bool validModelFound = false;
                            for (int i = 0; i < onnxModelNames.Length; i++)
                            {
                                if (onnxModelNames[i].Name != "._resnet34v2.onnx")
                                {
                                    onnxModelNames[0] = onnxModelNames[i];
                                    validModelFound   = true;
                                }
                            }

                            if (!validModelFound)
                            {
                                var modelNamesList = string.Join(",", onnxModelNames.Select(x => x.ToString()));
                                throw new Exception($"Opset {opset}: Model {modelDir}. Can't determine model file name. Found these :{modelNamesList}");
                            }
                        }

                        onnxModelFileName = Path.Combine(modelsDir, opset, modelDir.Name, onnxModelNames[0].Name);
                        using (var session = new InferenceSession(onnxModelFileName))
                        {
                            var inMeta     = session.InputMetadata;
                            var innodepair = inMeta.First();
                            var innodename = innodepair.Key;
                            var innodedims = innodepair.Value.Dimensions;
                            for (int i = 0; i < innodedims.Length; i++)
                            {
                                if (innodedims[i] < 0)
                                {
                                    innodedims[i] = -1 * innodedims[i];
                                }
                            }

                            var testRoot = new DirectoryInfo(Path.Combine(modelsDir, opset, modelDir.Name));
                            var testData = testRoot.EnumerateDirectories("test_data*").First();
                            var dataIn   = LoadTensorFromFilePb(Path.Combine(modelsDir, opset, modelDir.Name, testData.ToString(), "input_0.pb"));
                            var dataOut  = LoadTensorFromFilePb(Path.Combine(modelsDir, opset, modelDir.Name, testData.ToString(), "output_0.pb"));
                            var tensorIn = new DenseTensor <float>(dataIn, innodedims);
                            var nov      = new List <NamedOnnxValue>();
                            nov.Add(NamedOnnxValue.CreateFromTensor <float>(innodename, tensorIn));
                            using (var resnov = session.Run(nov))
                            {
                                var res = resnov.ToArray()[0].AsTensor <float>().ToArray <float>();
                                Assert.Equal(res, dataOut, new floatComparer());
                            }
                        }
                    }
                    catch (Exception ex)
                    {
                        var msg = $"Opset {opset}: Model {modelDir}: ModelFile = {onnxModelFileName} error = {ex.Message}";
                        throw new Exception(msg);
                    }
                } //model
            }     //opset
        }
예제 #29
0
 /// <summary>
 /// Uses an open session to score a list of NamedOnnxValues.
 /// </summary>
 /// <param name="inputNamedOnnxValues">The NamedOnnxValues to score.</param>
 /// <param name="outputColumns">The active output columns.</param>
 /// <returns>Resulting output NamedOnnxValues list.</returns>
 public IDisposableReadOnlyCollection <DisposableNamedOnnxValue> Run(List <NamedOnnxValue> inputNamedOnnxValues, List <string> outputColumns)
 {
     return(_session.Run(inputNamedOnnxValues, outputColumns));
 }
예제 #30
0
        /// <summary>
        /// Returns face detection results.
        /// </summary>
        /// <param name="image">Bitmap</param>
        /// <returns>Rectangles</returns>
        public Rectangle[] Forward(Bitmap image)
        {
            var size = new Size(320, 240);

            using var clone = Imaging.Resize(image, size);
            int width     = clone.Width;
            int height    = clone.Height;
            var inputMeta = _session.InputMetadata;
            var name      = inputMeta.Keys.ToArray()[0];

            // pre-processing
            var dimentions = new int[] { 1, 3, height, width };
            var tensors    = clone.ToFloatTensor(true);

            tensors.Operator(new float[] { 127.0f, 127.0f, 127.0f }, Vector.Sub);
            tensors.Operator(128, Vector.Div);
            var inputData = tensors.Merge(true);

            // session run
            var t      = new DenseTensor <float>(inputData, dimentions);
            var inputs = new List <NamedOnnxValue> {
                NamedOnnxValue.CreateFromTensor(name, t)
            };
            var results     = _session.Run(inputs).ToArray();
            var confidences = results[0].AsTensor <float>().ToArray();
            var boxes       = results[1].AsTensor <float>().ToArray();
            var length      = confidences.Length;

            // post-proccessing
            var boxes_picked = new List <Rectangle>();

            for (int i = 0, j = 0; i < length; i += 2, j += 4)
            {
                if (confidences[i + 1] > ConfidenceThreshold)
                {
                    boxes_picked.Add(
                        Imaging.ToBox(
                            Rectangle.FromLTRB
                            (
                                (int)(boxes[j + 0] * image.Width),
                                (int)(boxes[j + 1] * image.Height),
                                (int)(boxes[j + 2] * image.Width),
                                (int)(boxes[j + 3] * image.Height)
                            )));
                }
            }

            // non-max suppression
            length = boxes_picked.Count;

            for (int i = 0; i < length; i++)
            {
                var first = boxes_picked[i];

                for (int j = i + 1; j < length; j++)
                {
                    var second = boxes_picked[j];
                    var iou    = Imaging.IoU(first, second);

                    if (iou > NmsThreshold)
                    {
                        boxes_picked.RemoveAt(j);
                        length = boxes_picked.Count;
                        j--;
                    }
                }
            }

            // dispose
            foreach (var result in results)
            {
                result.Dispose();
            }

            return(boxes_picked.ToArray());
        }