예제 #1
0
        private int PredictImage(Tensor <float> tensor)
        {
            var inputs = new List <NamedOnnxValue> {
                NamedOnnxValue.CreateFromTensor("data", tensor)
            };

            using IDisposableReadOnlyCollection <DisposableNamedOnnxValue> results = session.Run(inputs);

            var output  = results.First().AsEnumerable <float>().ToArray();
            var sum     = output.Sum(x => (float)Math.Exp(x));
            var softmax = output.Select(x => (float)Math.Exp(x) / sum);

            return(softmax
                   .Select((x, i) => new Tuple <int, float>(i, x))
                   .OrderByDescending(x => x.Item2)
                   .Take(1).First().Item1);
        }
예제 #2
0
파일: Program.cs 프로젝트: alecona/onnx_clo
        static void RunModelOnnxRuntime(string modelPath, string inputPath, int iteration, DateTime[] timestamps, bool parallelExecution, GraphOptimizationLevel optLevel)
        {
            if (timestamps.Length != (int)TimingPoint.TotalCount)
            {
                throw new ArgumentException("Timestamps array must have " + (int)TimingPoint.TotalCount + " size");
            }

            timestamps[(int)TimingPoint.Start] = DateTime.Now;
            SessionOptions options = new SessionOptions();

            if (parallelExecution)
            {
                options.ExecutionMode = ExecutionMode.ORT_PARALLEL;
            }
            options.GraphOptimizationLevel = optLevel;
            using (var session = new InferenceSession(modelPath, options))
            {
                timestamps[(int)TimingPoint.ModelLoaded] = DateTime.Now;
                var inputMeta = session.InputMetadata;

                var container = new List <NamedOnnxValue>();
                foreach (var name in inputMeta.Keys)
                {
                    float[] rawData = LoadTensorFromFile(inputPath);
                    var     tensor  = new DenseTensor <float>(rawData, inputMeta[name].Dimensions);
                    container.Add(NamedOnnxValue.CreateFromTensor <float>(name, tensor));
                }



                timestamps[(int)TimingPoint.InputLoaded] = DateTime.Now;

                // Run the inference
                for (int i = 0; i < iteration; i++)
                {
                    var results = session.Run(container);  // results is an IReadOnlyList<NamedOnnxValue> container
                    Debug.Assert(results != null);
                    Debug.Assert(results.Count == 1);
                    //results = null;
                    //GC.Collect();
                    //GC.WaitForPendingFinalizers();
                }

                timestamps[(int)TimingPoint.RunComplete] = DateTime.Now;
            }
        }
예제 #3
0
    private float[] InferenceOnnx(float[] input)
    {
        var inputName   = session.InputMetadata.First().Key;
        var inputDim    = session.InputMetadata.First().Value.Dimensions;
        var inputTensor = new DenseTensor <float>(new System.Memory <float>(input), inputDim);

        // OnnxRuntimeでの入力形式であるNamedOnnxValueを作成する
        var inputOnnxValues = new List <NamedOnnxValue> {
            NamedOnnxValue.CreateFromTensor(inputName, inputTensor)
        };

        // 推論を実行
        var results = session.Run(inputOnnxValues);
        var scores  = results.First().AsTensor <float>().ToArray();

        return(scores);
    }
        /// <summary>
        /// Returns face landmarks.
        /// </summary>
        /// <param name="image">Bitmap</param>
        /// <returns>Points</returns>
        public Point[] Forward(Bitmap image)
        {
            var size = new Size(112, 112);

            using var clone = Imaging.Resize(image, size);
            int width     = clone.Width;
            int height    = clone.Height;
            var inputMeta = _session.InputMetadata;
            var name      = inputMeta.Keys.ToArray()[0];

            // pre-processing
            var dimentions = new int[] { 1, 3, height, width };
            var tensors    = clone.ToFloatTensor(true);

            tensors.Operator(255.0f, Vector.Div);

            // normalizers for mobilenet_se
            //tensors.Operator(new float[] { 0.485f, 0.456f, 0.406f }, Vector.Sub);
            //tensors.Operator(new float[] { 0.229f, 0.224f, 0.225f }, Vector.Div);
            var inputData = tensors.Merge(true);

            // session run
            var t      = new DenseTensor <float>(inputData, dimentions);
            var inputs = new List <NamedOnnxValue> {
                NamedOnnxValue.CreateFromTensor(name, t)
            };
            var results     = _session.Run(inputs).ToArray();
            var length      = results.Length;
            var confidences = results[length - 1].AsTensor <float>().ToArray();
            var points      = new Point[confidences.Length / 2];

            for (int i = 0, j = 0; i < (length = confidences.Length); i += 2)
            {
                points[j++] = new Point(
                    (int)(confidences[i + 0] * image.Width),
                    (int)(confidences[i + 1] * image.Height));
            }

            // dispose
            foreach (var result in results)
            {
                result.Dispose();
            }

            return(points);
        }
예제 #5
0
        private void TestGpu()
        {
            // TODO: execute based on test pool directly (cpu or gpu)
            var gpu       = Environment.GetEnvironmentVariable("TESTONGPU");
            var tuple     = (gpu != null) ? OpenSessionSqueezeNet(Int32.Parse(gpu)) : OpenSessionSqueezeNet();
            var session   = tuple.Item1;
            var inputData = tuple.Item2;
            var tensor    = tuple.Item3;
            var inputMeta = session.InputMetadata;
            var container = new List <NamedOnnxValue>();

            container.Add(NamedOnnxValue.CreateFromTensor <float>("input", tensor));
            var ex = Assert.Throws <OnnxRuntimeException>(() => session.Run(container));

            Assert.Equal("[ErrorCode:InvalidArgument] Missing required inputs: data_0", ex.Message);
            session.Dispose();
        }
예제 #6
0
        private void ThrowDuplicateInput()
        {
            var tuple     = OpenSessionSqueezeNet();
            var session   = tuple.Item1;
            var inputData = tuple.Item2;
            var tensor    = tuple.Item3;
            var inputMeta = session.InputMetadata;
            var container = new List <NamedOnnxValue>();
            var nov       = NamedOnnxValue.CreateFromTensor <float>("data_0", tensor);

            container.Add(nov);
            container.Add(nov);
            var ex = Assert.Throws <OnnxRuntimeException>(() => session.Run(container));

            Assert.Equal("[ErrorCode:InvalidArgument] duplicated input name", ex.Message);
            session.Dispose();
        }
예제 #7
0
        private void ThrowWrongInputType()
        {
            var tuple     = OpenSessionSqueezeNet();
            var session   = tuple.Item1;
            var inputData = tuple.Item2;
            var inputMeta = session.InputMetadata;
            var container = new List <NamedOnnxValue>();

            int[] inputDataInt = inputData.Select(x => (int)x).ToArray();
            var   tensor       = new DenseTensor <int>(inputDataInt, inputMeta["data_0"].Dimensions);

            container.Add(NamedOnnxValue.CreateFromTensor <int>("data_0", tensor));
            var ex = Assert.Throws <OnnxRuntimeException>(() => session.Run(container));

            Assert.Equal("[ErrorCode:InvalidArgument] Unexpected input data type. Actual: (class onnxruntime::NonOnnxType<int>) , expected: (class onnxruntime::NonOnnxType<float>)", ex.Message);
            session.Dispose();
        }
예제 #8
0
        private void CanRunInferenceOnAModel()
        {
            string modelPath = Path.Combine(Directory.GetCurrentDirectory(), "squeezenet.onnx");

            using (var session = new InferenceSession(modelPath))
            {
                var inputMeta = session.InputMetadata;
                var container = new List <NamedOnnxValue>();

                float[] inputData = LoadTensorFromFile(@"bench.in"); // this is the data for only one input tensor for this model

                foreach (var name in inputMeta.Keys)
                {
                    Assert.Equal(typeof(float), inputMeta[name].ElementType);
                    Assert.True(inputMeta[name].IsTensor);
                    var tensor = new DenseTensor <float>(inputData, inputMeta[name].Dimensions);
                    container.Add(NamedOnnxValue.CreateFromTensor <float>(name, tensor));
                }

                // Run the inference
                var results = session.Run(container);  // results is an IReadOnlyList<NamedOnnxValue> container

                Assert.Equal(1, results.Count);

                float[] expectedOutput = LoadTensorFromFile(@"bench.expected_out");
                // validate the results
                foreach (var r in results)
                {
                    Assert.Equal("softmaxout_1", r.Name);

                    var   resultTensor       = r.AsTensor <float>();
                    int[] expectedDimensions = { 1, 1000, 1, 1 };  // hardcoded for now for the test data
                    Assert.Equal(expectedDimensions.Length, resultTensor.Rank);

                    var resultDimensions = resultTensor.Dimensions;
                    for (int i = 0; i < expectedDimensions.Length; i++)
                    {
                        Assert.Equal(expectedDimensions[i], resultDimensions[i]);
                    }

                    var resultArray = r.AsTensor <float>().ToArray();
                    Assert.Equal(expectedOutput.Length, resultArray.Length);
                    Assert.Equal(expectedOutput, resultArray, new floatComparer());
                }
            }
        }
예제 #9
0
        static void Main(string[] args)
        {
            // data pre-processing
            // REMEMBER the tokenizer + vocab
            // see the project https://github.com/Microsoft/BlingFire
            var t = new long[] {
                102, 4714, 395, 1538, 2692, 103, 0, 0, 0, 0, 0, 0,
                0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
                0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
                0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
                0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
                0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
                0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
                0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
                0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
                0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
                0, 0, 0, 0, 0, 0, 0, 0
            };

            // create the tensor
            var input = new DenseTensor <long>(new[] { 1, 128 });

            for (int i = 0; i < 128; i++)
            {
                input[0, i] = t[i];
            }
            var inputs = new List <NamedOnnxValue>
            {
                NamedOnnxValue.CreateFromTensor("input_ids", input)
            };

            // sentiments classification
            var modelFilePath = "c:\\temp\\BERTsentiment.onnx";

            using var session = new InferenceSession(modelFilePath);
            using IDisposableReadOnlyCollection <DisposableNamedOnnxValue> results = session.Run(inputs);

            // show result
            var values = (DenseTensor <float>)results.First().Value;

            for (var i = 0; i < values.Length; i++)
            {
                Console.WriteLine(values[0, i]);
            }
            Console.ReadKey();
        }
예제 #10
0
        static float Run(ModelProto model)
        {
            model.WriteFile(model.Graph.Name);
            SessionOptions options = new SessionOptions();

            options.SetSessionGraphOptimizationLevel(2);

            using (var session = new InferenceSession(model.Graph.Name, options))
            {
                var inputMeta = session.InputMetadata;
                var container = new List <NamedOnnxValue>();
                foreach (var key in inputMeta.Keys)
                {
                    if (inputMeta[key].ElementType == typeof(Int64))
                    {
                        container.Add(NamedOnnxValue.CreateFromTensor <Int64>(key,
                                                                              GetIntTensor(inputMeta[key].Dimensions, key)));
                    }
                    else if (inputMeta[key].ElementType == typeof(double))
                    {
                        container.Add(NamedOnnxValue.CreateFromTensor <double>(key,
                                                                               GetRandomDoubleTensor(inputMeta[key].Dimensions)));
                    }
                    else
                    {
                        container.Add(NamedOnnxValue.CreateFromTensor <float>(key,
                                                                              GetRandomTensor(inputMeta[key].Dimensions)));
                    }
                }

                if (!warmed.Contains(model.Graph.Name))
                {
                    session.Run(container);
                    warmed.Add(model.Graph.Name);
                }

                stopWatch.Reset();
                stopWatch.Start();
                var results = session.Run(container);
                stopWatch.Stop();
                results.Dispose();
                return
                    ((float)stopWatch.Elapsed.Ticks / TimeSpan.TicksPerMillisecond);
            }
        }
예제 #11
0
        public JsonResult predictCNN([FromBody] List <byte> imageBytes)
        {
            float[]          floatArray       = imageBytes.Select(i => Convert.ToSingle(i / 255.0)).ToArray();
            var              matrix           = floatArray.ToTensor().Reshape(new[] { 28, 28 });
            InferenceSession inferenceSession = _inferenceSessions["cnn"];
            var              tensor           = new DenseTensor <float>(floatArray, inferenceSession.InputMetadata["Input3"].Dimensions);
            var              results          = inferenceSession.Run(new List <NamedOnnxValue> {
                NamedOnnxValue.CreateFromTensor("Input3", tensor)
            }).ToArray();
            var weights = results[0].AsTensor <float>().ToList();
            var probs   = weights.Select(x => x + Math.Abs(weights.Min()));

            probs = probs.Select(x => x / probs.Sum()).ToArray();
            var pred          = probs.Select((n, i) => (Number: n, Index: i)).Max().Index;
            var WrappedReturn = new { prediction = pred, probabilities = probs };

            return(Json(WrappedReturn));
        }
예제 #12
0
        private void UpdateStyleVector()
        {
            Update?.Invoke(this, new EffectGraphEventArgs(GraphEvent.CalculateStyle));

            var sw = Stopwatch.StartNew();

            if (!effectParams.IsStyleVectorValid)
            {
                using (var results = style.Session.Run(new NamedOnnxValue[]
                {
                    NamedOnnxValue.CreateFromTensor(style.InputImage, effectParams.Style)
                }))
                {
                    effectParams.StyleVector = results.Single().AsTensor <float>().Clone();
                }
            }
            effectParams.StyleTime = sw.Elapsed;
        }
예제 #13
0
        private void TestModelInputFLOAT16()
        {
            // model takes 1x5 input of fixed type, echoes back
            string modelPath = Path.Combine(Directory.GetCurrentDirectory(), "test_types_FLOAT16.pb");

            using (var session = new InferenceSession(modelPath))
            {
                var container = new List <NamedOnnxValue>();
                var tensorIn  = new DenseTensor <float>(new float[] { 1.0f, 2.0f, -3.0f, float.MinValue, float.MaxValue }, new int[] { 1, 5 });
                var nov       = NamedOnnxValue.CreateFromTensor("input", tensorIn);
                container.Add(nov);
                using (var res = session.Run(container))
                {
                    var tensorOut = res.First().AsTensor <float>();
                    Assert.True(tensorOut.SequenceEqual(tensorIn));
                }
            }
        }
예제 #14
0
        private void TestModelInputSTRING()
        {
            // model takes 1x5 input of fixed type, echoes back
            string modelPath = Path.Combine(Directory.GetCurrentDirectory(), "test_types_STRING.onnx");

            using (var session = new InferenceSession(modelPath))
            {
                var container = new List <NamedOnnxValue>();
                var tensorIn  = new DenseTensor <string>(new string[] { "a", "c", "d", "z", "f" }, new int[] { 1, 5 });
                var nov       = NamedOnnxValue.CreateFromTensor("input", tensorIn);
                container.Add(nov);
                using (var res = session.Run(container))
                {
                    var tensorOut = res.First().AsTensor <string>();
                    Assert.True(tensorOut.SequenceEqual(tensorIn));
                }
            }
        }
예제 #15
0
        private void ThrowExtraInputs()
        {
            var tuple     = OpenSessionSqueezeNet();
            var session   = tuple.Item1;
            var inputData = tuple.Item2;
            var tensor    = tuple.Item3;
            var inputMeta = session.InputMetadata;
            var container = new List <NamedOnnxValue>();
            var nov1      = NamedOnnxValue.CreateFromTensor <float>("data_0", tensor);
            var nov2      = NamedOnnxValue.CreateFromTensor <float>("extra", tensor);

            container.Add(nov1);
            container.Add(nov2);
            var ex = Assert.Throws <OnnxRuntimeException>(() => session.Run(container));

            Assert.StartsWith("[ErrorCode:InvalidArgument] Invalid Feed Input Names: extra. Valid input names are: ", ex.Message);
            session.Dispose();
        }
예제 #16
0
        public static List <NamedOnnxValue> StringToEmbeddedTensor(string sentence)
        {
            var     container = new List <NamedOnnxValue>();
            NDArray features  = np.zeros((256, 128));

            for (int i = 0; i < sentence.Length; i++)
            {
                int idx = Char2Index(sentence[i]);
                features[string.Format(":,{0}", i)] = idx != -1 ? char_embedding[string.Format("{0}", idx)] : np.random.stardard_normal(256);
            }
            features = features.astype(np.float32);
            features = features.flatten();
            var tensor    = new DenseTensor <float>(features.ToArray <float>(), new int[] { 1, 256, 128 });
            var onnxvalue = NamedOnnxValue.CreateFromTensor <float>("input", tensor);

            container.Add(onnxvalue);
            return(container);
        }
예제 #17
0
        public RecognitionInfo ProcessImage(string img_path)
        {
            var input  = PreprocessImage(img_path);
            var inputs = new List <NamedOnnxValue>
            {
                NamedOnnxValue.CreateFromTensor(Session.InputMetadata.Keys.First(), input)
            };
            var Results = Session.Run(inputs);

            // Получаем 10 выходов и считаем для них softmax
            var output  = Results.First().AsEnumerable <float>().ToArray();
            var sum     = output.Sum(x => (float)Math.Exp(x));
            var softmax = output.Select(x => (float)Math.Exp(x) / sum);

            RecognitionInfo recognitionResult = new RecognitionInfo(img_path, ClassLabels[softmax.ToList().IndexOf(softmax.Max())], softmax.Max());

            return(recognitionResult);
        }
예제 #18
0
        private void TestGpu()
        {
            var tuple = OpenSessionSqueezeNet(0); // run on deviceID 0

            float[] expectedOutput = TestDataLoader.LoadTensorFromFile(@"bench.expected_out");

            using (var session = tuple.Item1)
            {
                var inputData = tuple.Item2;
                var tensor    = tuple.Item3;
                var inputMeta = session.InputMetadata;
                var container = new List <NamedOnnxValue>();
                container.Add(NamedOnnxValue.CreateFromTensor <float>("data_0", tensor));
                var res         = session.Run(container);
                var resultArray = res.First().AsTensor <float>().ToArray();
                Assert.Equal(expectedOutput, resultArray, new FloatComparer());
            }
        }
예제 #19
0
        private void UpdateIdentityTransform()
        {
            Update?.Invoke(this, new EffectGraphEventArgs(GraphEvent.CalculateIdentity));

            var sw = Stopwatch.StartNew();

            if (effectParams.IsIdentityRequired)
            {
                using (var results = style.Session.Run(new NamedOnnxValue[]
                {
                    NamedOnnxValue.CreateFromTensor(style.InputImage, effectParams.ScaledContent)
                }))
                {
                    effectParams.IdentityVector = results.Single().AsTensor <float>().Clone();
                }
            }
            effectParams.IdentityTime = sw.Elapsed;
        }
예제 #20
0
        public string Run(InferenceSession session, Tensor <float> oneTensor)
        {
            var inputs = new List <NamedOnnxValue>
            {
                NamedOnnxValue.CreateFromTensor("input", oneTensor)
            };

            string res = string.Empty;

            using (IDisposableReadOnlyCollection <DisposableNamedOnnxValue> results = session.Run(inputs))
            {
                // Postprocess to get predictions
                var resultsArray = results.ToArray();
                res = Decode(resultsArray);
            }

            return(res);
        }
예제 #21
0
        static void Main(string[] args)
        {
            string path = System.AppContext.BaseDirectory + "myModel.onnx";

            Console.WriteLine(path);
            Tensor <float> input  = new DenseTensor <float>(new[] { 32, 32 });
            Tensor <float> output = new DenseTensor <float>(new[] { 1, 4, 4 });

            for (int y = 0; y < 32; y++)
            {
                for (int x = 0; x < 32; x++)
                {
                    input[y, x] = (float)Math.E;
                }
            }

            //Console.WriteLine(input.GetArrayString());

            // Setup inputs
            List <NamedOnnxValue> inputs = new List <NamedOnnxValue>
            {
                NamedOnnxValue.CreateFromTensor("Input", input.Reshape(new [] { 1, 32, 32 }).ToDenseTensor()),
            };
            // Setup outputs
            List <NamedOnnxValue> outputs = new List <NamedOnnxValue>
            {
                NamedOnnxValue.CreateFromTensor("Output", output),
            };

            Stopwatch stopWatch = new Stopwatch();

            stopWatch.Start();

            // Run inference
            InferenceSession session = new InferenceSession(path);

            session.Run(inputs, outputs);
            output = outputs[0].AsTensor <float>();
            Console.WriteLine(output.Reshape(new[] { 4, 4 }).ToDenseTensor().GetArrayString());

            stopWatch.Stop();

            Console.WriteLine(stopWatch.ElapsedMilliseconds.ToString());
        }
예제 #22
0
        public string ProcessImage(string path)
        {
            Image image = Image.FromStream(new MemoryStream(Convert.FromBase64String(path)));

            const int TargetWidth  = 224;
            const int TargetHeight = 224;

            var bitmap = ResizeImage(image, TargetWidth, TargetHeight);

            // Перевод пикселов в тензор и нормализация
            //var input = new Tensor<float>();
            var input  = new DenseTensor <float>(new[] { 1, 3, TargetHeight, TargetWidth });
            var mean   = new[] { 0.485f, 0.456f, 0.406f };
            var stddev = new[] { 0.229f, 0.224f, 0.225f };

            for (int y = 0; y < TargetHeight; y++)
            {
                for (int x = 0; x < TargetWidth; x++)
                {
                    var color = bitmap.GetPixel(x, y);
                    input[0, 0, y, x] = ((color.R / 255f) - mean[0]) / stddev[0];
                    input[0, 1, y, x] = ((color.G / 255f) - mean[1]) / stddev[1];
                    input[0, 2, y, x] = ((color.B / 255f) - mean[2]) / stddev[2];
                }
            }

            // Подготавливаем входные данные нейросети. Имя input задано в файле модели
            var inputs = new List <NamedOnnxValue>
            {
                NamedOnnxValue.CreateFromTensor("data", input)
            };

            //Console.WriteLine("Predicting contents of image...");
            using IDisposableReadOnlyCollection <DisposableNamedOnnxValue> results = session.Run(inputs);

            // Получаем 1000 выходов и считаем для них softmax
            var output  = results.First().AsEnumerable <float>().ToArray();
            var sum     = output.Sum(x => (float)Math.Exp(x));
            var softmax = output.Select(x => (float)Math.Exp(x) / sum);

            return(softmax
                   .Select((x, i) => new { Label = classLabels[i], Confidence = x })
                   .OrderByDescending(x => x.Confidence).FirstOrDefault().Label);
        }
예제 #23
0
        private static Prediction OneImgRecognition(string path)
        {
            using var image = Image.Load <Rgb24>(path);
            const int TargetWidth  = 28;
            const int TargetHeight = 28;

            image.Mutate(x =>
            {
                x.Resize(new ResizeOptions
                {
                    Size = new Size(TargetWidth, TargetHeight),
                    Mode = ResizeMode.Crop,
                });
            });

            var input  = new DenseTensor <float>(new[] { 1, 1, TargetHeight, TargetWidth });
            var mean   = new[] { 0.485f, 0.456f, 0.406f };
            var stddev = new[] { 0.229f, 0.224f, 0.225f };

            for (int y = 0; y < TargetHeight; y++)
            {
                Span <Rgb24> pixelSpan = image.GetPixelRowSpan(y);
                for (int x = 0; x < TargetWidth; x++)
                {
                    input[0, 0, y, x] = ((pixelSpan[x].R / 255f) - mean[0]) / stddev[0];
                }
            }

            var inputs = new List <NamedOnnxValue>
            {
                NamedOnnxValue.CreateFromTensor("Input3", input),
            };

            using IDisposableReadOnlyCollection <DisposableNamedOnnxValue> results = session.Run(inputs);

            var output  = results.First().AsEnumerable <float>().ToArray();
            var sum     = output.Sum(x => (float)Math.Exp(x));
            var softmax = output.Select(x => (float)Math.Exp(x) / sum);

            float confidence = softmax.Max();
            int   label      = softmax.ToList().IndexOf(confidence);

            return(new Prediction(path, label, confidence));
        }
예제 #24
0
파일: NNP.cs 프로젝트: TimSyn/s02170686
        public int LoadAndPredict(Image <Rgb24> image)
        {
            // using var image =  Image.Load<Rgb24>(img_name);

            const int TargetWidth  = 28;
            const int TargetHeight = 28;

            image.Mutate(x =>
            {
                x.Resize(new ResizeOptions {
                    Size = new Size(TargetWidth, TargetHeight),
                    Mode = ResizeMode.Crop
                }).Grayscale();
            });

            var input = new DenseTensor <float>(new[] { 1, 1, TargetHeight, TargetWidth });

            for (int y = 0; y < TargetHeight; y++)
            {
                Span <Rgb24> pixelSpan = image.GetPixelRowSpan(y);

                for (int x = 0; x < TargetWidth; x++)
                {
                    input[0, 0, y, x] = pixelSpan[x].R / 255.0f;
                }
            }

            using var session = new InferenceSession(model_name);
            string input_name = session.InputMetadata.Keys.First();
            var    inputs     = new List <NamedOnnxValue> {
                NamedOnnxValue.CreateFromTensor(input_name, input)
            };

            using IDisposableReadOnlyCollection <DisposableNamedOnnxValue> results = session.Run(inputs);

            var output  = results.First().AsEnumerable <float>().ToArray();
            var sum     = output.Sum(x => (float)Math.Exp(x));
            var softmax = output.Select(x => (float)Math.Exp(x) / sum);
            var query   = softmax.Select((x, i) => new { Label = classLabels[i], Confidence = x })
                          .OrderByDescending(x => x.Confidence);

            return(Int32.Parse(query.First().Label));
        }
예제 #25
0
        private Tensor <float> TransformContent(Tensor <float> styleInput)
        {
            Tensor <float> stylised;

            Update?.Invoke(this, new EffectGraphEventArgs(GraphEvent.TransformContent));
            var sw = Stopwatch.StartNew();

            using (var results = transformer.Session.Run(new NamedOnnxValue[]
            {
                NamedOnnxValue.CreateFromTensor(transformer.ContentImage, effectParams.Content),
                NamedOnnxValue.CreateFromTensor(transformer.StyleVector, styleInput)
            }))
            {
                stylised = results.Single().AsTensor <float>().Clone();
            }

            effectParams.TransformTime = sw.Elapsed;
            return(stylised);
        }
예제 #26
0
        private void ThrowWrongInputType()
        {
            var tuple     = OpenSessionSqueezeNet();
            var session   = tuple.Item1;
            var inputData = tuple.Item2;
            var inputMeta = session.InputMetadata;
            var container = new List <NamedOnnxValue>();

            int[] inputDataInt = inputData.Select(x => (int)x).ToArray();
            var   tensor       = new DenseTensor <int>(inputDataInt, inputMeta["data_0"].Dimensions);

            container.Add(NamedOnnxValue.CreateFromTensor <int>("data_0", tensor));
            var ex  = Assert.Throws <OnnxRuntimeException>(() => session.Run(container));
            var msg = ex.ToString().Substring(0, 101);

            // TODO: message is diff in LInux. Use substring match
            Assert.Equal("Microsoft.ML.OnnxRuntime.OnnxRuntimeException: [ErrorCode:InvalidArgument] Unexpected input data type", msg);
            session.Dispose();
        }
예제 #27
0
        private void ThrowWrongDimensions()
        {
            var tuple     = OpenSessionSqueezeNet();
            var session   = tuple.Item1;
            var inputMeta = session.InputMetadata;
            var container = new List <NamedOnnxValue>();
            var inputData = new float[] { 0.1f, 0.2f, 0.3f };
            var tensor    = new DenseTensor <float>(inputData, new int[] { 1, 3 });

            container.Add(NamedOnnxValue.CreateFromTensor <float>("data_0", tensor));
            var ex = Assert.Throws <OnnxRuntimeException>(() => session.Run(container));

            Assert.True(
                !string.IsNullOrEmpty(ex.Message) &&
                ex.Message.StartsWith("[ErrorCode:Fail]") &&
                ex.Message.Contains("X num_dims does not match W num_dims. X: {1,3} W: {64,3,3,3}")
                );
            session.Dispose();
        }
예제 #28
0
        public static async Task <IActionResult> Run(
            [HttpTrigger(AuthorizationLevel.Function, "get", "post", Route = null)] HttpRequest req,
            ILogger log, ExecutionContext context)
        {
            log.LogInformation("C# HTTP trigger function processed a request.");

            string review = req.Query["review"];

            string  requestBody = await new StreamReader(req.Body).ReadToEndAsync();
            dynamic data        = JsonConvert.DeserializeObject(requestBody);

            review = review ?? data?.review;

            var models = new Dictionary <string, string>();

            models.Add("points", GetFileAndPathFromStorage(context, log, "model327", "pipeline_points_range.onnx"));
            models.Add("price", GetFileAndPathFromStorage(context, log, "model327", "pipeline_price_range.onnx"));
            models.Add("variety", GetFileAndPathFromStorage(context, log, "model327", "pipeline_variety.onnx"));

            var inputTensor = new DenseTensor <string>(new string[] { review }, new int[] { 1, 1 });
            //create input data for session.
            var input = new List <NamedOnnxValue> {
                NamedOnnxValue.CreateFromTensor <string>("input", inputTensor)
            };

            //create now object points: result
            var inferenceResults = new Dictionary <string, IDictionary <string, float> >();

            foreach (var model in models)
            {
                log.LogInformation($"Start inference session for {model.Key}");
                var session         = new InferenceSession(model.Value);
                var output          = session.Run(input).ToList().Last().AsEnumerable <NamedOnnxValue>();
                var inferenceResult = output.First().AsDictionary <string, float>();
                var topThreeResult  = inferenceResult.OrderByDescending(dict => dict.Value).Take(3)
                                      .ToDictionary(pair => pair.Key, pair => pair.Value);
                log.LogInformation($"Top five results for {model.Key} {topThreeResult}");
                inferenceResults.Add(model.Key, topThreeResult);
                Console.Write(inferenceResult);
            }

            return(new JsonResult(inferenceResults));
        }
예제 #29
0
        public PredictionResult Predict(string ImgPath, byte[] img = null)
        {
            var input = (img == null)?ProcessImage(ImgPath):ProcessImage(img);

            // Вычисляем предсказание нейросетью

            var inputs = new List <NamedOnnxValue>
            {
                NamedOnnxValue.CreateFromTensor(Session.InputMetadata.Keys.First(), input)
            };

            using IDisposableReadOnlyCollection <DisposableNamedOnnxValue> results = Session.Run(inputs);

            // Получаем 1000 выходов и считаем для них softmax
            var output  = results.First().AsEnumerable <float>().ToArray();
            var sum     = output.Sum(x => (float)Math.Exp(x));
            var softmax = output.Select(x => (float)Math.Exp(x) / sum);

            return(new PredictionResult(classLabels[softmax.ToList().IndexOf(softmax.Max())], ImgPath, softmax.ToList().Max()));
        }
예제 #30
0
파일: Model.cs 프로젝트: rodion-s/s02170224
        private PredictionResult Predict(DenseTensor <float> input, string single_image_path)
        {
            var inputs = new List <NamedOnnxValue>
            {
                NamedOnnxValue.CreateFromTensor("data", input)
            };

            using IDisposableReadOnlyCollection <DisposableNamedOnnxValue> results = session.Run(inputs);

            // Получаем 1000 выходов и считаем для них softmax
            var output  = results.First().AsEnumerable <float>().ToArray();
            var sum     = output.Sum(x => (float)Math.Exp(x));
            var softmax = output.Select(x => (float)Math.Exp(x) / sum);

            var confidence = softmax.Max();
            var class_idx  = softmax.ToList().IndexOf(confidence);


            return(new PredictionResult(single_image_path, LabelMap.classLabels[class_idx], confidence));
        }