Exemplo n.º 1
0
        internal static NamedOnnxValue CreateNamedOnnxValueFromRawData <T>(string name, byte[] rawData, int elemWidth, int[] dimensions)
        {
            T[] typedArr = new T[rawData.Length / elemWidth];
            var typeOf   = typeof(T);

            if (typeOf == typeof(Float16) || typeOf == typeof(BFloat16))
            {
                using (var memSrcHandle = new Memory <byte>(rawData).Pin())
                    using (var memDstHandle = new Memory <T>(typedArr).Pin())
                    {
                        unsafe
                        {
                            Buffer.MemoryCopy(memSrcHandle.Pointer, memDstHandle.Pointer, typedArr.Length * elemWidth, rawData.Length);
                        }
                    }
            }
            else
            {
                Buffer.BlockCopy(rawData, 0, typedArr, 0, rawData.Length);
            }
            var dt = new DenseTensor <T>(typedArr, dimensions);

            return(NamedOnnxValue.CreateFromTensor <T>(name, dt));
        }
Exemplo n.º 2
0
        static void UseApi()
        {
            string basepath  = "..\\..\\..\\testdata\\";
            string modelPath = basepath + "squeezenet.onnx";

            Debug.Assert(File.Exists(modelPath));
            // Optional : Create session options and set the graph optimization level for the session
            SessionOptions options = new SessionOptions();

            options.GraphOptimizationLevel = GraphOptimizationLevel.ORT_ENABLE_EXTENDED;

            using (var session = new InferenceSession(modelPath, options))
            {
                var inputMeta = session.InputMetadata;
                var container = new List <NamedOnnxValue>();

                float[] inputData = LoadTensorFromFile(basepath + "bench.in"); // this is the data for only one input tensor for this model

                foreach (var name in inputMeta.Keys)
                {
                    var tensor = new DenseTensor <float>(inputData, inputMeta[name].Dimensions);
                    container.Add(NamedOnnxValue.CreateFromTensor <float>(name, tensor));
                }

                // Run the inference
                using (var results = session.Run(container))  // results is an IDisposableReadOnlyCollection<DisposableNamedOnnxValue> container
                {
                    // dump the results
                    foreach (var r in results)
                    {
                        Console.WriteLine("Output for {0}", r.Name);
                        Console.WriteLine(r.AsTensor <float>().GetArrayString());
                    }
                }
            }
        }
Exemplo n.º 3
0
        public static void Main(string[] args)
        {
            // Read paths
            string modelFilePath    = args[0];
            string imageFilePath    = args[1];
            string outImageFilePath = args[2];

            // Read image
            using Image <Rgb24> image = Image.Load <Rgb24>(imageFilePath);

            // Resize image
            float ratio = 800f / Math.Min(image.Width, image.Height);

            image.Mutate(x => x.Resize((int)(ratio * image.Width), (int)(ratio * image.Height)));

            // Preprocess image
            var            paddedHeight = (int)(Math.Ceiling(image.Height / 32f) * 32f);
            var            paddedWidth  = (int)(Math.Ceiling(image.Width / 32f) * 32f);
            Tensor <float> input        = new DenseTensor <float>(new[] { 3, paddedHeight, paddedWidth });
            var            mean         = new[] { 102.9801f, 115.9465f, 122.7717f };

            image.ProcessPixelRows(accessor =>
            {
                for (int y = paddedHeight - accessor.Height; y < accessor.Height; y++)
                {
                    Span <Rgb24> pixelSpan = accessor.GetRowSpan(y);
                    for (int x = paddedWidth - accessor.Width; x < accessor.Width; x++)
                    {
                        input[0, y, x] = pixelSpan[x].B - mean[0];
                        input[1, y, x] = pixelSpan[x].G - mean[1];
                        input[2, y, x] = pixelSpan[x].R - mean[2];
                    }
                }
            });

            // Setup inputs and outputs
            var inputs = new List <NamedOnnxValue>
            {
                NamedOnnxValue.CreateFromTensor("image", input)
            };

            // Run inference
            using var session = new InferenceSession(modelFilePath);
            using IDisposableReadOnlyCollection <DisposableNamedOnnxValue> results = session.Run(inputs);

            // Postprocess to get predictions
            var resultsArray = results.ToArray();

            float[] boxes         = resultsArray[0].AsEnumerable <float>().ToArray();
            long[]  labels        = resultsArray[1].AsEnumerable <long>().ToArray();
            float[] confidences   = resultsArray[2].AsEnumerable <float>().ToArray();
            var     predictions   = new List <Prediction>();
            var     minConfidence = 0.7f;

            for (int i = 0; i < boxes.Length - 4; i += 4)
            {
                var index = i / 4;
                if (confidences[index] >= minConfidence)
                {
                    predictions.Add(new Prediction
                    {
                        Box        = new Box(boxes[i], boxes[i + 1], boxes[i + 2], boxes[i + 3]),
                        Label      = LabelMap.Labels[labels[index]],
                        Confidence = confidences[index]
                    });
                }
            }

            // Put boxes, labels and confidence on image and save for viewing
            using var outputImage = File.OpenWrite(outImageFilePath);
            Font font = SystemFonts.CreateFont("Arial", 16);

            foreach (var p in predictions)
            {
                image.Mutate(x =>
                {
                    x.DrawLines(Color.Red, 2f, new PointF[] {
                        new PointF(p.Box.Xmin, p.Box.Ymin),
                        new PointF(p.Box.Xmax, p.Box.Ymin),

                        new PointF(p.Box.Xmax, p.Box.Ymin),
                        new PointF(p.Box.Xmax, p.Box.Ymax),

                        new PointF(p.Box.Xmax, p.Box.Ymax),
                        new PointF(p.Box.Xmin, p.Box.Ymax),

                        new PointF(p.Box.Xmin, p.Box.Ymax),
                        new PointF(p.Box.Xmin, p.Box.Ymin)
                    });
                    x.DrawText($"{p.Label}, {p.Confidence:0.00}", font, Color.White, new PointF(p.Box.Xmin, p.Box.Ymin));
                });
            }
            image.SaveAsJpeg(outputImage);
        }
Exemplo n.º 4
0
        /// <summary>
        /// Returns face detection results.
        /// </summary>
        /// <param name="image">Bitmap</param>
        /// <returns>Rectangles</returns>
        public Rectangle[] Forward(Bitmap image)
        {
            var size = new Size(320, 240);

            using var clone = Imaging.Resize(image, size);
            int width     = clone.Width;
            int height    = clone.Height;
            var inputMeta = _session.InputMetadata;
            var name      = inputMeta.Keys.ToArray()[0];

            // pre-processing
            var dimentions = new int[] { 1, 3, height, width };
            var tensors    = clone.ToFloatTensor(true);

            tensors.Operator(new float[] { 127.0f, 127.0f, 127.0f }, Vector.Sub);
            tensors.Operator(128, Vector.Div);
            var inputData = tensors.Merge(true);

            // session run
            var t      = new DenseTensor <float>(inputData, dimentions);
            var inputs = new List <NamedOnnxValue> {
                NamedOnnxValue.CreateFromTensor(name, t)
            };
            var results     = _session.Run(inputs).ToArray();
            var confidences = results[0].AsTensor <float>().ToArray();
            var boxes       = results[1].AsTensor <float>().ToArray();
            var length      = confidences.Length;

            // post-proccessing
            var boxes_picked = new List <Rectangle>();

            for (int i = 0, j = 0; i < length; i += 2, j += 4)
            {
                if (confidences[i + 1] > ConfidenceThreshold)
                {
                    boxes_picked.Add(
                        Imaging.ToBox(
                            Rectangle.FromLTRB
                            (
                                (int)(boxes[j + 0] * image.Width),
                                (int)(boxes[j + 1] * image.Height),
                                (int)(boxes[j + 2] * image.Width),
                                (int)(boxes[j + 3] * image.Height)
                            )));
                }
            }

            // non-max suppression
            length = boxes_picked.Count;

            for (int i = 0; i < length; i++)
            {
                var first = boxes_picked[i];

                for (int j = i + 1; j < length; j++)
                {
                    var second = boxes_picked[j];
                    var iou    = Imaging.IoU(first, second);

                    if (iou > NmsThreshold)
                    {
                        boxes_picked.RemoveAt(j);
                        length = boxes_picked.Count;
                        j--;
                    }
                }
            }

            // dispose
            foreach (var result in results)
            {
                result.Dispose();
            }

            return(boxes_picked.ToArray());
        }
Exemplo n.º 5
0
//===========================================================================================//

        public static async Task RecognitionAsync(IEnumerable <string> imags)
        {
            var images = imags.ToArray();

            tasks = new Task[images.Length];
            try
            {
                for (int i = 0; i < images.Count(); i++)
                {
                    tasks[i] = Task.Factory.StartNew((imagePath) =>
                    {
                        Image <Rgb24> image = Image.Load <Rgb24>((string)imagePath, out IImageFormat format);

                        Stream imageStream = new MemoryStream();
                        image.Mutate(x =>
                        {
                            x.Resize(new ResizeOptions
                            {
                                Size = new Size(224, 224),
                                Mode = ResizeMode.Crop
                            });
                        });
                        image.Save(imageStream, format);

                        Tensor <float> input = new DenseTensor <float>(new[] { 1, 3, 224, 224 });
                        var mean             = new[] { 0.485f, 0.456f, 0.406f };
                        var stddev           = new[] { 0.229f, 0.224f, 0.225f };
                        for (int y = 0; y < image.Height; y++)
                        {
                            Span <Rgb24> pixelSpan = image.GetPixelRowSpan(y);
                            for (int x = 0; x < image.Width; x++)
                            {
                                input[0, 0, y, x] = ((pixelSpan[x].R / 255f) - mean[0]) / stddev[0];
                                input[0, 1, y, x] = ((pixelSpan[x].G / 255f) - mean[1]) / stddev[1];
                                input[0, 2, y, x] = ((pixelSpan[x].B / 255f) - mean[2]) / stddev[2];
                            }
                        }

                        List <NamedOnnxValue> inputs = new List <NamedOnnxValue>
                        {
                            NamedOnnxValue.CreateFromTensor("data", input)
                        };

                        var session = new InferenceSession(onnxModelPath);
                        IDisposableReadOnlyCollection <DisposableNamedOnnxValue> results = session.Run(inputs);

                        if (token.IsCancellationRequested)
                        {
                            return;
                        }

                        IEnumerable <float> output = results.First().AsEnumerable <float>();
                        float sum = output.Sum(x => (float)Math.Exp(x));
                        IEnumerable <float> softmax = output.Select(x => (float)Math.Exp(x) / sum);

                        IEnumerable <Prediction> top1 = softmax.Select((x, i) => new Prediction {
                            Label = LabelMap.Labels[i], Confidence = x
                        })
                                                        .OrderByDescending(x => x.Confidence)
                                                        .Take(1);
                        Prediction prediction = top1.First();
                        prediction.Path       = (string)imagePath;

                        if (token.IsCancellationRequested)
                        {
                            return;
                        }

                        Result?.Invoke(prediction);

                        session.Dispose();
                        image.Dispose();
                        imageStream.Dispose();
                    }, images[i], token);
                }
                await Task.WhenAll(tasks);
            }
            catch (OperationCanceledException e)
            {
                Trace.WriteLine($"{nameof(OperationCanceledException)} thrown with message: {e.Message}");
            }
        }
Exemplo n.º 6
0
        public static void Main(string[] args)
        {
            // Read paths
            string modelFilePath = args[0];
            string imageFilePath = args[1];

            // Read image
            using Image <Rgb24> image = Image.Load <Rgb24>(imageFilePath);

            // Resize image
            image.Mutate(x =>
            {
                x.Resize(new ResizeOptions
                {
                    Size = new Size(224, 224),
                    Mode = ResizeMode.Crop
                });
            });

            // Preprocess image
            Tensor <float> input  = new DenseTensor <float>(new[] { 1, 3, 224, 224 });
            var            mean   = new[] { 0.485f, 0.456f, 0.406f };
            var            stddev = new[] { 0.229f, 0.224f, 0.225f };

            image.ProcessPixelRows(accessor =>
            {
                for (int y = 0; y < accessor.Height; y++)
                {
                    Span <Rgb24> pixelSpan = accessor.GetRowSpan(y);
                    for (int x = 0; x < accessor.Width; x++)
                    {
                        input[0, 0, y, x] = ((pixelSpan[x].R / 255f) - mean[0]) / stddev[0];
                        input[0, 1, y, x] = ((pixelSpan[x].G / 255f) - mean[1]) / stddev[1];
                        input[0, 2, y, x] = ((pixelSpan[x].B / 255f) - mean[2]) / stddev[2];
                    }
                }
            });

            // Setup inputs
            var inputs = new List <NamedOnnxValue>
            {
                NamedOnnxValue.CreateFromTensor("data", input)
            };

            // Run inference
            using var session = new InferenceSession(modelFilePath);
            using IDisposableReadOnlyCollection <DisposableNamedOnnxValue> results = session.Run(inputs);

            // Postprocess to get softmax vector
            IEnumerable <float> output = results.First().AsEnumerable <float>();
            float sum = output.Sum(x => (float)Math.Exp(x));
            IEnumerable <float> softmax = output.Select(x => (float)Math.Exp(x) / sum);

            // Extract top 10 predicted classes
            IEnumerable <Prediction> top10 = softmax.Select((x, i) => new Prediction {
                Label = LabelMap.Labels[i], Confidence = x
            })
                                             .OrderByDescending(x => x.Confidence)
                                             .Take(10);

            // Print results to console
            Console.WriteLine("Top 10 predictions for ResNet50 v2...");
            Console.WriteLine("--------------------------------------------------------------");
            foreach (var t in top10)
            {
                Console.WriteLine($"Label: {t.Label}, Confidence: {t.Confidence}");
            }
        }
Exemplo n.º 7
0
        private void button1_Click(object sender, EventArgs e)
        {
            label1.Text       = string.Empty;
            pictureBox1.Image = null;
            pictureBox1.Refresh();
            bool isSuccess = false;

            try
            {
                pictureBox1.Load(textBox1.Text);
                isSuccess = true;
            }
            catch (Exception ex)
            {
                MessageBox.Show($"读取图片时出现错误:{ex.Message}");
                throw;
            }
            if (isSuccess)
            {
                // 图片加载成功后,从图片控件中取出224*224的位图对象
                Bitmap bitmap = new Bitmap(pictureBox1.Image, imageSize, imageSize);

                float[] imageArray = new float[imageSize * imageSize * 3];

                // 按照先行后列的方式依次取出图片的每个像素值
                for (int y = 0; y < imageSize; y++)
                {
                    for (int x = 0; x < imageSize; x++)
                    {
                        var color = bitmap.GetPixel(x, y);

                        // 使用Netron查看模型的输入发现
                        // 需要依次放置224 *224的蓝色分量、224*224的绿色分量、224*224的红色分量
                        imageArray[y * imageSize + x] = color.B;
                        imageArray[y * imageSize + x + 1 * imageSize * imageSize] = color.G;
                        imageArray[y * imageSize + x + 2 * imageSize * imageSize] = color.R;
                    }
                }

                string modelPath = AppDomain.CurrentDomain.BaseDirectory + "BearModel.onnx";

                using (var session = new InferenceSession(modelPath))
                {
                    var container = new List <NamedOnnxValue>();

                    // 用Netron看到需要的输入类型是float32[None,3,224,224]
                    // 第一维None表示可以传入多张图片进行推理
                    // 这里只使用一张图片,所以使用的输入数据尺寸为[1, 3, 224, 224]
                    var shape  = new int[] { 1, 3, imageSize, imageSize };
                    var tensor = new DenseTensor <float>(imageArray, shape);

                    // 支持多个输入,对于mnist模型,只需要一个输入,输入的名称是data
                    container.Add(NamedOnnxValue.CreateFromTensor <float>("data", tensor));

                    // 推理
                    var results = session.Run(container);

                    // 输出结果有两个,classLabel和loss,这里只关心classLabel
                    var label = results.FirstOrDefault(item => item.Name == "classLabel")? // 取出名为classLabel的输出
                                .AsTensor <string>()?
                                .FirstOrDefault();                                         // 支持多张图片同时推理,这里只推理了一张,取第一个结果值

                    // 显示在控件中
                    label3.Text = label;
                }
            }
        }
Exemplo n.º 8
0
        private void Form1_Load(object sender, EventArgs e)
        {
            // params
            var threshold = 0.0f;
            var c         = Color.Yellow;
            var font      = new Font("Arial", 22);

            // inference session
            Console.WriteLine("Starting inference session...");
            var tic       = Environment.TickCount;
            var session   = new InferenceSession(model);
            var inputMeta = session.InputMetadata;
            var name      = inputMeta.Keys.ToArray()[0];
            var labels    = File.ReadAllLines(prototxt);

            Console.WriteLine("Session started in " + (Environment.TickCount - tic) + " mls.");

            // image
            Console.WriteLine("Creating image tensor...");
            tic = Environment.TickCount;
            var image      = new Bitmap(file, false);
            var width      = image.Width;
            var height     = image.Height;
            var dimentions = new int[] { 1, height, width, 3 };
            var inputData  = Onnx.ToTensor(image);

            Console.WriteLine("Tensor was created in " + (Environment.TickCount - tic) + " mls.");

            // prediction
            Console.WriteLine("Detecting objects...");
            tic = Environment.TickCount;
            var t1     = new DenseTensor <byte>(inputData, dimentions);
            var inputs = new List <NamedOnnxValue>()
            {
                NamedOnnxValue.CreateFromTensor(name, t1)
            };
            var results = session.Run(inputs).ToArray();

            // dump the results
            foreach (var r in results)
            {
                Console.WriteLine(r.Name + "\n");
                Console.WriteLine(r.AsTensor <float>().GetArrayString());
            }
            Console.WriteLine("Detecting was finished in " + (Environment.TickCount - tic) + " mls.");

            // drawing results
            Console.WriteLine("Drawing inference results...");
            tic = Environment.TickCount;
            var detection_boxes   = results[0].AsTensor <float>();
            var detection_classes = results[1].AsTensor <float>();
            var detection_scores  = results[2].AsTensor <float>();
            var num_detections    = results[3].AsTensor <float>()[0];

            using (var g = Graphics.FromImage(image))
            {
                for (int i = 0; i < num_detections; i++)
                {
                    var score = detection_scores[0, i];

                    if (score > threshold)
                    {
                        var label = labels[(int)detection_classes[0, i] - 1];

                        var x = (int)(detection_boxes[0, i, 0] * height);
                        var y = (int)(detection_boxes[0, i, 1] * width);
                        var w = (int)(detection_boxes[0, i, 2] * height);
                        var h = (int)(detection_boxes[0, i, 3] * width);

                        // python rectangle
                        var rectangle = Rectangle.FromLTRB(y, x, h, w);
                        g.DrawString(label, font, new SolidBrush(c), y, x);
                        g.DrawRectangle(new Pen(c)
                        {
                            Width = 3
                        }, rectangle);
                    }
                }
            }

            BackgroundImage = image;
            Console.WriteLine("Drawing was finished in " + (Environment.TickCount - tic) + " mls.");
        }
Exemplo n.º 9
0
        static void Main(string[] args)
        {
            // string is null or empty
            if (args == null || args.Length < 3)
            {
                Console.WriteLine("Usage information: dotnet run model.onnx input.jpg output.jpg");
                return;
            }
            else
            {
                if (!(File.Exists(args[0])))
                {
                    Console.WriteLine("Model Path does not exist");
                    return;
                }
                if (!(File.Exists(args[1])))
                {
                    Console.WriteLine("Input Path does not exist");
                    return;
                }
            }

            // Read paths
            string modelFilePath    = args[0];
            string imageFilePath    = args[1];
            string outImageFilePath = args[2];

            using Image imageOrg = Image.Load(imageFilePath, out IImageFormat format);

            //Letterbox image
            var iw = imageOrg.Width;
            var ih = imageOrg.Height;
            var w  = 416;
            var h  = 416;

            if ((iw == 0) || (ih == 0))
            {
                Console.WriteLine("Math error: Attempted to divide by Zero");
                return;
            }

            float width  = (float)w / iw;
            float height = (float)h / ih;

            float scale = Math.Min(width, height);

            var nw = (int)(iw * scale);
            var nh = (int)(ih * scale);

            var pad_dims_w = (w - nw) / 2;
            var pad_dims_h = (h - nh) / 2;

            // Resize image using default bicubic sampler
            var image = imageOrg.Clone(x => x.Resize((nw), (nh)));

            var clone = new Image <Rgb24>(w, h);

            clone.Mutate(i => i.Fill(Color.Gray));
            clone.Mutate(o => o.DrawImage(image, new Point(pad_dims_w, pad_dims_h), 1f)); // draw the first one top left

            //Preprocessing image
            Tensor <float> input = new DenseTensor <float>(new[] { 1, 3, h, w });

            for (int y = 0; y < clone.Height; y++)
            {
                Span <Rgb24> pixelSpan = clone.GetPixelRowSpan(y);
                for (int x = 0; x < clone.Width; x++)
                {
                    input[0, 0, y, x] = pixelSpan[x].B / 255f;
                    input[0, 1, y, x] = pixelSpan[x].G / 255f;
                    input[0, 2, y, x] = pixelSpan[x].R / 255f;
                }
            }

            //Get the Image Shape
            var image_shape = new DenseTensor <float>(new[] { 1, 2 });

            image_shape[0, 0] = ih;
            image_shape[0, 1] = iw;

            // Setup inputs and outputs
            var container = new List <NamedOnnxValue>();

            container.Add(NamedOnnxValue.CreateFromTensor("input_1", input));
            container.Add(NamedOnnxValue.CreateFromTensor("image_shape", image_shape));

            // Session Options
            SessionOptions options = new SessionOptions();

            options.LogSeverityLevel = OrtLoggingLevel.ORT_LOGGING_LEVEL_INFO;
            options.AppendExecutionProvider_OpenVINO(@"MYRIAD_FP16");
            options.AppendExecutionProvider_CPU(1);

            // Run inference
            using var session = new InferenceSession(modelFilePath, options);

            using IDisposableReadOnlyCollection <DisposableNamedOnnxValue> results = session.Run(container);

            Console.WriteLine("Inference done");

            //Post Processing Steps
            var            resultsArray = results.ToArray();
            Tensor <float> boxes        = resultsArray[0].AsTensor <float>();
            Tensor <float> scores       = resultsArray[1].AsTensor <float>();

            int[] indices = resultsArray[2].AsTensor <int>().ToArray();

            var len         = indices.Length / 3;
            var out_classes = new int[len];

            float[] out_scores = new float[len];

            var predictions = new List <Prediction>();
            var count       = 0;

            for (int i = 0; i < indices.Length; i = i + 3)
            {
                out_classes[count] = indices[i + 1];
                out_scores[count]  = scores[indices[i], indices[i + 1], indices[i + 2]];
                predictions.Add(new Prediction
                {
                    Box = new Box(boxes[indices[i], indices[i + 2], 1],
                                  boxes[indices[i], indices[i + 2], 0],
                                  boxes[indices[i], indices[i + 2], 3],
                                  boxes[indices[i], indices[i + 2], 2]),
                    Class = LabelMap.Labels[out_classes[count]],
                    Score = out_scores[count]
                });
                count++;
            }

            // Put boxes, labels and confidence on image and save for viewing
            using var outputImage = File.OpenWrite(outImageFilePath);
            Font font = SystemFonts.CreateFont("Arial", 16);

            foreach (var p in predictions)
            {
                imageOrg.Mutate(x =>
                {
                    x.DrawLines(Color.Red, 2f, new PointF[] {
                        new PointF(p.Box.Xmin, p.Box.Ymin),
                        new PointF(p.Box.Xmax, p.Box.Ymin),

                        new PointF(p.Box.Xmax, p.Box.Ymin),
                        new PointF(p.Box.Xmax, p.Box.Ymax),

                        new PointF(p.Box.Xmax, p.Box.Ymax),
                        new PointF(p.Box.Xmin, p.Box.Ymax),

                        new PointF(p.Box.Xmin, p.Box.Ymax),
                        new PointF(p.Box.Xmin, p.Box.Ymin)
                    });
                    x.DrawText($"{p.Class}, {p.Score:0.00}", font, Color.White, new PointF(p.Box.Xmin, p.Box.Ymin));
                });
            }
            imageOrg.Save(outputImage, format);
        }
Exemplo n.º 10
0
        static void Main(string[] args)
        {
            if (args.Length < 2)
            {
                System.Console.WriteLine("Not enough arguments given, use input image output image");
                return;
            }
            // when using CPU / MKLDNN provider uncomment the next line
            // var options = new SessionOptions();
            // when using CUDA/GPU Provider if not comment this line
            var options = SessionOptions.MakeSessionOptionWithCudaProvider();

            options.GraphOptimizationLevel = GraphOptimizationLevel.ORT_ENABLE_ALL;
            options.InterOpNumThreads      = 8;
            options.IntraOpNumThreads      = 8;
            String           onnxfile = "YOUR_MUT1NY_DETECTOR_MODEL.onnx";
            InferenceSession session  = null;

            scales[0]             = 32.0f;
            scales[1]             = 16.0f;
            scales[2]             = 8.0f;
            anchorLevels[0, 0, 0] = 116.0f;
            anchorLevels[0, 0, 1] = 90.0f;
            anchorLevels[0, 1, 0] = 156.0f;
            anchorLevels[0, 1, 1] = 198.0f;
            anchorLevels[0, 2, 0] = 373.0f;
            anchorLevels[0, 2, 1] = 326.0f;

            anchorLevels[1, 0, 0] = 30.0f;
            anchorLevels[1, 0, 1] = 61.0f;
            anchorLevels[1, 1, 0] = 62.0f;
            anchorLevels[1, 1, 1] = 45.0f;
            anchorLevels[1, 2, 0] = 59.0f;
            anchorLevels[1, 2, 1] = 119.0f;

            anchorLevels[2, 0, 0] = 10.0f;
            anchorLevels[2, 0, 1] = 13.0f;
            anchorLevels[2, 1, 0] = 16.0f;
            anchorLevels[2, 1, 1] = 30.0f;
            anchorLevels[2, 2, 0] = 33.0f;
            anchorLevels[2, 2, 1] = 23.0f;
            String inputFilename  = args[0];
            String outputFilename = args[1];

            try
            {
                session = new InferenceSession(onnxfile, options);
                var   inputMeta = session.InputMetadata;
                int[] inputDim  = new int[4];
                float scaleFactor;
                int   offsetX, offsetY;
                foreach (var name in inputMeta.Keys)
                {
                    var dim = inputMeta[name].Dimensions;
                    for (int n = 0; n < dim.Length; n++)
                    {
                        inputDim[n] = dim[n];
                    }
                }
                Stopwatch totalSw;
                Stopwatch processInSw;
                Stopwatch processOutSw;
                Stopwatch executeSw;
                long      totalTime       = 0;
                long      totalProcessIn  = 0;
                long      totalProcessOut = 0;
                long      totalExecute    = 0;
                for (int runs = 0; runs < (BENCHMARKMODE ? NUMRUNS : 1); runs++)
                {
                    totalSw     = Stopwatch.StartNew();
                    processInSw = Stopwatch.StartNew();
                    var testData = CreateInputTensorFromImage(inputFilename, inputDim, out scaleFactor, out offsetX, out offsetY);
                    processInSw.Stop();
                    var container = new List <NamedOnnxValue>();

                    foreach (var name in inputMeta.Keys)
                    {
                        var tensor = new DenseTensor <float>(testData, inputMeta[name].Dimensions);
                        container.Add(NamedOnnxValue.CreateFromTensor <float>(name, tensor));
                    }
                    executeSw = Stopwatch.StartNew();
                    using (var results = session.Run(container))
                    {
                        executeSw.Stop();
                        int       numResults = results.Count;
                        int       levelNr    = 0;
                        ArrayList dets       = new ArrayList();
                        processOutSw = Stopwatch.StartNew();
                        foreach (var r in results)
                        {
                            var resultTensor    = r.AsTensor <float>();
                            var resultDimension = resultTensor.Dimensions;
                            var resultArray     = resultTensor.ToArray();
                            ProcessOutput(levelNr, resultDimension, resultArray, dets);
                            levelNr++;
                        }
                        System.Console.WriteLine("# Dets = " + dets.Count);
                        processOutSw.Stop();
                        dets.Sort();
                        ArrayList finalRects = BuildFinalOutput(dets, 1.0f / scaleFactor, offsetX, offsetY);
                        System.Console.WriteLine("Final # detected Rects = " + finalRects.Count);
                        totalSw.Stop();
                        Console.WriteLine("Prepocessing took " + processInSw.ElapsedMilliseconds);
                        Console.WriteLine("Execution of DNN took " + executeSw.ElapsedMilliseconds);
                        Console.WriteLine("Postprocessing took " + processOutSw.ElapsedMilliseconds);
                        Console.WriteLine("Total processing took " + totalSw.ElapsedMilliseconds);
                        if (runs > WARMUPRUNS)
                        {
                            totalTime       += totalSw.ElapsedMilliseconds;
                            totalExecute    += executeSw.ElapsedMilliseconds;
                            totalProcessIn  += processInSw.ElapsedMilliseconds;
                            totalProcessOut += processOutSw.ElapsedMilliseconds;
                        }
                        if (!BENCHMARKMODE)
                        {
                            WriteOutputDet(inputFilename, outputFilename, finalRects);
                        }
                        results.Dispose();
                        container.Clear();
                    }
                }
                float avgTotalTime      = (float)totalTime / (float)(NUMRUNS - WARMUPRUNS);
                float avgExecuteTime    = (float)totalExecute / (float)(NUMRUNS - WARMUPRUNS);
                float avgProcessInTime  = (float)totalProcessIn / (float)(NUMRUNS - WARMUPRUNS);
                float avgProcessOutTime = (float)totalProcessOut / (float)(NUMRUNS - WARMUPRUNS);
                Console.WriteLine("Avg time of preprocess: " + avgProcessInTime);
                Console.WriteLine("Avg time of xecution of DNN: " + avgExecuteTime);
                Console.WriteLine("Avg time of postprocess: " + avgProcessOutTime);
                Console.WriteLine("Avg time of total: " + avgTotalTime);
            }
            catch (Exception e)
            {
                System.Console.WriteLine("Could not load ONNX model, because " + e.ToString());
                return;
            }
            System.Console.WriteLine("Done");
        }
        public void RecognizeImagesInQueue(string modelPath, CancellationToken token)
        {
            string imagePath;

            while (filesQueue.TryDequeue(out imagePath))
            {
                Tensor <float> tensor = ImageReader.GetTensorFromImageFile(imagePath);

                lock (lockObject)
                {
                    if (cancelAllThreads)
                    {
                        return;
                    }
                }
                if (token.IsCancellationRequested)
                {
                    return;
                }

                using (var session = new InferenceSession(modelPath))
                {
                    var inputMeta = session.InputMetadata;
                    var inputs    = new List <NamedOnnxValue>();

                    lock (lockObject)
                    {
                        if (cancelAllThreads)
                        {
                            return;
                        }
                    }
                    if (token.IsCancellationRequested)
                    {
                        return;
                    }

                    foreach (var name in inputMeta.Keys)
                    {
                        inputs.Add(NamedOnnxValue.CreateFromTensor <float>(name, tensor));
                    }

                    lock (lockObject)
                    {
                        if (cancelAllThreads)
                        {
                            return;
                        }
                    }
                    if (token.IsCancellationRequested)
                    {
                        return;
                    }

                    using (var results = session.Run(inputs))
                    {
                        foreach (var result in results)
                        {
                            var    resultTensor    = result.AsTensor <float>();
                            int    recognizedDigit = TensorArgMax(resultTensor);
                            string resultString    = imagePath + '\n' + recognizedDigit.ToString() + '\n';
                            //resultsQueue.Enqueue(resultString);
                            resultsCollection.TryAdd(resultString);
                        }
                    }
                }
            }
        }
Exemplo n.º 12
0
        private void pictureBox1_MouseUp(object sender, MouseEventArgs e)

        {
            //当鼠标左键释放时

            //开始处理图片进行推理

            if (e.Button == MouseButtons.Left)

            {
                Bitmap digitTmp = (Bitmap)digitImage.Clone();//复制digitImage

                //调整图片大小为Mnist模型可接收的大小:28×28

                using (Graphics g = Graphics.FromImage(digitTmp))

                {
                    g.InterpolationMode = InterpolationMode.HighQualityBicubic;

                    g.DrawImage(digitTmp, 0, 0, MnistImageSize, MnistImageSize);
                }



                //将图片转为灰阶图,并将图片的像素信息保存在list中

                float[] imageArray = new float[MnistImageSize * MnistImageSize];

                for (int y = 0; y < MnistImageSize; y++)

                {
                    for (int x = 0; x < MnistImageSize; x++)

                    {
                        var color = digitTmp.GetPixel(x, y);

                        var a = (float)(0.5 - (color.R + color.G + color.B) / (3.0 * 255));



                        imageArray[y * MnistImageSize + x] = a;
                    }
                }



                // 设置要加载的模型的路径,跟据需要改为你的模型名称

                string modelPath = AppDomain.CurrentDomain.BaseDirectory + "mnist.onnx";



                using (var session = new InferenceSession(modelPath))

                {
                    var inputMeta = session.InputMetadata;

                    var container = new List <NamedOnnxValue>();



                    // 用Netron看到需要的输入类型是float32[1, 1, 28, 28]

                    // 第一维None表示可以传入多张图片进行推理

                    // 这里只使用一张图片,所以使用的输入数据尺寸为[1, 1, 28, 28]

                    var shape = new int[] { 1, 1, MnistImageSize, MnistImageSize };

                    var tensor = new DenseTensor <float>(imageArray, shape);



                    // 支持多个输入,对于mnist模型,只需要一个输入,输入的名称是input3

                    container.Add(NamedOnnxValue.CreateFromTensor <float>("Input3", tensor));



                    // 推理

                    var results = session.Run(container);



                    // 输出结果: Plus214_Output_0

                    IList <float> imageList = results.FirstOrDefault(item => item.Name == "Plus214_Output_0").AsTensor <float>().ToList();



                    // Query to check for highest probability digit

                    var maxIndex = imageList.IndexOf(imageList.Max());



                    // Display the results

                    label1.Text = maxIndex.ToString();
                }
            }
        }
Exemplo n.º 13
0
        public Mat prepareData(List <NamedOnnxValue> container, InferenceSession session1)
        {
            var inputMeta = session1.InputMetadata;


            Mat mat2 = null;

            foreach (var name in inputMeta.Keys)
            {
                var data = InputDatas[name];
                if (data.Data is InternalArray intar)
                {
                    for (int i = 0; i < inputMeta[name].Dimensions.Length; i++)
                    {
                        if (inputMeta[name].Dimensions[i] == -1)
                        {
                            inputMeta[name].Dimensions[i] = intar.Shape[i];
                        }
                    }



                    inputData = intar.Data.Select(z => (float)z).ToArray();
                    var tensor = new DenseTensor <float>(inputData, inputMeta[name].Dimensions);

                    container.Add(NamedOnnxValue.CreateFromTensor <float>(name, tensor));
                }
                if (data.Data is Mat matOrig)
                {
                    var mat = matOrig.Clone();
                    lastReadedMat = mat.Clone();

                    if (inputMeta[name].Dimensions[2] == -1 && inputMeta[name].Dimensions[3] == -1)
                    {
                        inputMeta[name].Dimensions[2] = mat.Height;
                        inputMeta[name].Dimensions[3] = mat.Width;
                    }



                    mat2 = mat.Clone();
                    mat.ConvertTo(mat, MatType.CV_32F);
                    object param = mat;
                    foreach (var pitem in data.Preprocessors)
                    {
                        param = pitem.Process(param);
                        if (pitem is ZeroImagePreprocessor && param is Mat mt2)
                        {
                            inputMeta[name].Dimensions[3] = mt2.Width;
                            inputMeta[name].Dimensions[2] = mt2.Height;
                            mt2.ConvertTo(mt2, MatType.CV_32F);
                        }
                        if (pitem is AspectResizePreprocessor asp && param is Mat mt)
                        {
                            if (asp.ForceH)
                            {
                                inputMeta[name].Dimensions[3] = mt.Width;
                                inputMeta[name].Dimensions[2] = mt.Height;
                            }
                            if (inputMeta[name].Dimensions[2] != -1 && inputMeta[name].Dimensions[3] == -1)//keep aspect required
                            {
                                inputMeta[name].Dimensions[3] = mt.Width;
                            }
                        }
                    }

                    inputData = param as float[];
                    var tensor = new DenseTensor <float>(param as float[], inputMeta[name].Dimensions);

                    container.Add(NamedOnnxValue.CreateFromTensor <float>(name, tensor));
                }
                if (data.Data is VideoCapture cap)
                {
                    Mat  mat = new Mat();
                    bool w   = false;
                    try
                    {
                        if (FetchNextFrame)
                        {
                            cap.Read(mat);
                            lastReadedMat = mat.Clone();
                        }
                        else
                        {
                            mat = lastReadedMat.Clone();
                        }
                        w = true;
                        if (inputMeta[name].Dimensions[2] == -1)
                        {
                            inputMeta[name].Dimensions[2] = mat.Height;
                            inputMeta[name].Dimensions[3] = mat.Width;
                        }
                        //  pictureBox1.Image = BitmapConverter.ToBitmap(mat);
                        mat2 = mat.Clone();
                        mat.ConvertTo(mat, MatType.CV_32F);
                        object param = mat;
                        foreach (var pitem in data.Preprocessors)
                        {
                            param = pitem.Process(param);
                        }

                        inputData = param as float[];
                        var tensor = new DenseTensor <float>(param as float[], inputMeta[name].Dimensions);

                        container.Add(NamedOnnxValue.CreateFromTensor <float>(name, tensor));
                    }
                    catch (Exception ex)
                    {
                        throw new PrepareDataException()
                              {
                                  IsVideo = true, SourceMat = w ? lastReadedMat : null
                              };
                    }
                }
                if (data.Data is float[] fl)
                {
                    var tensor = new DenseTensor <float>(fl, inputMeta[name].Dimensions);

                    container.Add(NamedOnnxValue.CreateFromTensor <float>(name, tensor));
                }
            }
            return(mat2);
        }
Exemplo n.º 14
0
        private void run()
        {
            Stopwatch sw = Stopwatch.StartNew();


            var inputMeta = session1.InputMetadata;
            var container = new List <NamedOnnxValue>();

            Mat mat2 = null;

            foreach (var name in inputMeta.Keys)
            {
                var data = InputDatas[name];
                if (data.Data is Mat matOrig)
                {
                    //var mat = matOrig.Clone(new Rect(shiftX, shiftY, 576, 288));
                    var mat = matOrig.Clone();
                    //mat = mat.Resize(new OpenCvSharp.Size(576, 288));
                    if (inputMeta[name].Dimensions[2] == -1)
                    {
                        inputMeta[name].Dimensions[2] = mat.Height;
                        inputMeta[name].Dimensions[3] = mat.Width;
                    }

                    mat2 = mat.Clone();
                    mat.ConvertTo(mat, MatType.CV_32F);
                    object param = mat;
                    foreach (var pitem in data.Preprocessors)
                    {
                        param = pitem.Process(param);
                    }

                    inputData = param as float[];
                    var tensor = new DenseTensor <float>(param as float[], inputMeta[name].Dimensions);

                    container.Add(NamedOnnxValue.CreateFromTensor <float>(name, tensor));
                }
            }
            OutputDatas.Clear();
            using (var results = session1.Run(container))
            {
                // Get the results
                foreach (var result in results)
                {
                    var data = result.AsTensor <float>();
                    //var dims = data.Dimensions;
                    var rets = data.ToArray();
                    OutputDatas.Add(result.Name, rets);
                }
            }

            //if (checkBox1.Checked)
            {
                Stopwatch sw2 = Stopwatch.StartNew();
                var       ret = boxesDecode(mat2);
                lock (lock1)
                {
                    last = ret;
                }
                sw2.Stop();

                if (ret != null)
                {
                    //var mm = drawBoxes(mat2, ret.Item1, ret.Item2, visTresh, ret.Item3);

                    /*pictureBox1.Image = BitmapConverter.ToBitmap(mm);
                     * mat2 = mm;
                     * pictureBox1.Invoke((Action)(() =>
                     * {
                     *  if (pictureBox1.Image != null)
                     *  {
                     *      pictureBox1.Image.Dispose();
                     *  }
                     *  pictureBox1.Image = BitmapConverter.ToBitmap(mm);
                     * }));*/
                }
            }

            sw.Stop();
            lastms = sw.ElapsedMilliseconds;
        }
Exemplo n.º 15
0
        public List <string> DoOcr(string imageFilePath)
        {
            Mat rgbMat = new Mat();

            int            new_h, new_w;
            Tensor <float> input = GetInputs(imageFilePath, rgbMat, out new_h, out new_w);

            // Setup inputs and outputs
            var inputs = new List <NamedOnnxValue>
            {
                NamedOnnxValue.CreateFromTensor("input0", input)
            };

            try
            {
                LineBoxDecode decode = new LineBoxDecode(rgbMat.Width, rgbMat.Height);
                // Run inference modelFilePath
                using (var session = new InferenceSession(modelFilePath))
                {
                    using (IDisposableReadOnlyCollection <DisposableNamedOnnxValue> results = session.Run(inputs))
                    {
                        var resultsArray = results.ToArray();
                        decode.Do(resultsArray, new_w, new_h);
                    }
                }

                CnLinePrepare          clp     = new CnLinePrepare(rgbMat, decode.Boxes);
                List <Tensor <float> > tensors = clp.GetTensors();

                List <string> fresults = new List <string>();
                using (var crnn = new CRNNHandle())
                {
                    //crNNModelFilePath
                    using (var session = new InferenceSession(crNNModelFilePath))
                    {
                        foreach (Tensor <float> oneTensor in tensors)
                        {
                            fresults.Add(crnn.Run(session, oneTensor));
                        }
                    }
                }

                return(fresults);

                /*
                 *  the following code to draw box for each line of chinese
                 * foreach (List<Point> boxes in decode.Boxes.Values)
                 * {
                 *  DrawBox(rgbMat, boxes);
                 * }
                 *
                 * CvInvoke.Imwrite(outImageFilePath, rgbMat);
                 */
            }
            catch (Exception ex)
            {
                Console.WriteLine(ex.Message + ex.StackTrace);
            }

            return(null);
        }
Exemplo n.º 16
0
        private void TestRegisterCustomOpLibrary()
        {
            using (var option = new SessionOptions())
            {
                string libName   = "custom_op_library.dll";
                string modelPath = "custom_op_test.onnx";
                if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
                {
                    libName = "custom_op_library.dll";
                }
                else if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
                {
                    libName = "libcustom_op_library.so";
                }
                else if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX))
                {
                    libName = "libcustom_op_library.dylib";
                }

                string libFullPath = Path.Combine(Directory.GetCurrentDirectory(), libName);
                Assert.True(File.Exists(libFullPath), $"Expected lib {libFullPath} does not exist.");

                var      ortEnvInstance = OrtEnv.Instance();
                string[] providers      = ortEnvInstance.GetAvailableProviders();
                if (Array.Exists(providers, provider => provider == "CUDAExecutionProvider"))
                {
                    option.AppendExecutionProvider_CUDA(0);
                }

                IntPtr libraryHandle = IntPtr.Zero;
                try
                {
                    option.RegisterCustomOpLibraryV2(libFullPath, out libraryHandle);
                }
                catch (Exception ex)
                {
                    var msg = $"Failed to load custom op library {libFullPath}, error = {ex.Message}";
                    throw new Exception(msg + "\n" + ex.StackTrace);
                }


                using (var session = new InferenceSession(modelPath, option))
                {
                    var inputContainer = new List <NamedOnnxValue>();
                    inputContainer.Add(NamedOnnxValue.CreateFromTensor <float>("input_1",
                                                                               new DenseTensor <float>(
                                                                                   new float[]
                    {
                        1.1f, 2.2f, 3.3f, 4.4f, 5.5f,
                        6.6f, 7.7f, 8.8f, 9.9f, 10.0f,
                        11.1f, 12.2f, 13.3f, 14.4f, 15.5f
                    },
                                                                                   new int[] { 3, 5 }
                                                                                   )));

                    inputContainer.Add(NamedOnnxValue.CreateFromTensor <float>("input_2",
                                                                               new DenseTensor <float>(
                                                                                   new float[]
                    {
                        15.5f, 14.4f, 13.3f, 12.2f, 11.1f,
                        10.0f, 9.9f, 8.8f, 7.7f, 6.6f,
                        5.5f, 4.4f, 3.3f, 2.2f, 1.1f
                    },
                                                                                   new int[] { 3, 5 }
                                                                                   )));

                    using (var result = session.Run(inputContainer))
                    {
                        Assert.Equal("output", result.First().Name);
                        var tensorOut = result.First().AsTensor <int>();

                        var expectedOut = new DenseTensor <int>(
                            new int[]
                        {
                            17, 17, 17, 17, 17,
                            17, 18, 18, 18, 17,
                            17, 17, 17, 17, 17
                        },
                            new int[] { 3, 5 }
                            );
                        Assert.True(tensorOut.SequenceEqual(expectedOut));
                    }
                }

                // Safe to unload the custom op shared library now
                UnloadLibrary(libraryHandle);
            }
        }
Exemplo n.º 17
0
        public static string CaptionImageOnnx(ImageFile imageFile)
        {
            var image = Image.Load <Rgb24>(imageFile.FullPath, out IImageFormat format);

            bool modifyImageFile = false;

            if (!modifyImageFile)
            {
                image.Mutate(x =>
                {
                    x.Resize(new ResizeOptions
                    {
                        Size = new Size(299, 299),
                        Mode = ResizeMode.Crop
                    });
                });
            }
            else
            {
                using (Stream imageStream = new MemoryStream())
                {
                    image.Mutate(x =>
                    {
                        x.Resize(new ResizeOptions
                        {
                            Size = new Size(299, 299),
                            Mode = ResizeMode.Crop
                        });
                    });
                    image.Save(imageStream, format);
                };
            }

            Tensor <float> input  = new DenseTensor <float>(new[] { 1, 3, 299, 299 });
            var            mean   = new[] { 0.485f, 0.456f, 0.406f };
            var            stddev = new[] { 0.229f, 0.224f, 0.225f };

            for (int y = 0; y < image.Height; y++)
            {
                Span <Rgb24> pixelSpan = image.GetPixelRowSpan(y);
                for (int x = 0; x < image.Width; x++)
                {
                    input[0, 0, y, x] = ((pixelSpan[x].R / 255f) - mean[0]) / stddev[0];
                    input[0, 1, y, x] = ((pixelSpan[x].G / 255f) - mean[1]) / stddev[1];
                    input[0, 2, y, x] = ((pixelSpan[x].B / 255f) - mean[2]) / stddev[2];
                }
            }

            var inputs1 = new List <NamedOnnxValue>()
            {
                NamedOnnxValue.CreateFromTensor <float>("image", input)
            };

            var imgFtEx_session = new InferenceSession("C:\\Users\\jsell\\source\\repos\\FileSystemHelper\\ImageCaptionerPlugin\\Resources\\model\\ifem.onnx");
            var encoder_session = new InferenceSession("C:\\Users\\jsell\\source\\repos\\FileSystemHelper\\ImageCaptionerPlugin\\Resources\\model\\encoder.onnx");
            var decoder_session = new InferenceSession("C:\\Users\\jsell\\source\\repos\\FileSystemHelper\\ImageCaptionerPlugin\\Resources\\model\\decoder.onnx");

            // image feature extraction layer
            using (var outputs1 = imgFtEx_session.Run(inputs1))
            {
                var input2 = outputs1.First();
                input2.Name = "imageFeatures";
                var inputs2 = new List <NamedOnnxValue>()
                {
                    input2
                };
                Console.WriteLine(outputs1.Count);

                // encoder
                using (var outputs2 = encoder_session.Run(inputs2))
                {
                    // TODO
                }
            }

            return("Caption");
        }
Exemplo n.º 18
0
        static void Main(string[] args)
        {
            SessionOptions options = new SessionOptions();

            options.GraphOptimizationLevel = GraphOptimizationLevel.ORT_ENABLE_EXTENDED;

            using (var session = new InferenceSession(@"D:\Ubuntu\onnx_c#\project\TinyYolov3\TinyYolov3\Assets\yolov3-tiny.onnx", options))
            {
                var input     = session.InputMetadata;
                var output    = session.OutputMetadata;
                var container = new List <NamedOnnxValue>();
                //var container = new List<NamedOnnxValue>();
                float[] inputData0 = LoadTensorFromFile(@"D:\Ubuntu\onnx_c#\project\TinyYolov3\TinyYolov3\Assets\test_data_set_0\input_0.txt"); //image data
                float[] inputData1 = LoadTensorFromFile(@"D:\Ubuntu\onnx_c#\project\TinyYolov3\TinyYolov3\Assets\test_data_set_0\input_1.txt"); //image shape

                foreach (var name in input.Keys)
                {
                    Console.WriteLine(name);
                }
                foreach (var name in output.Keys)
                {
                    Console.WriteLine(name);
                }
                Console.WriteLine(input.Count);
                Console.WriteLine(output.Count);

                ///// Create Inputs
                var nov     = new List <NamedOnnxValue>();
                var tensor0 = new DenseTensor <float>(inputData0, new int[] { 1, 3, 416, 416 });
                //var tensor1 = new DenseTensor<float>(new float[] { 375, 500 }, new int[] { 1, 2 });
                var tensor1 = new DenseTensor <float>(inputData1, new int[] { 1, 2 });

                container.Add(NamedOnnxValue.CreateFromTensor <float>("input_1", tensor0));
                container.Add(NamedOnnxValue.CreateFromTensor <float>("image_shape", tensor1));

                // Run the inference
                using (var results = session.Run(container))  // results is an IDisposableReadOnlyCollection<DisposableNamedOnnxValue> container
                {
                    // dump the results
                    foreach (var r in results)
                    {
                        //Console.WriteLine(r.Name);
                        //Console.WriteLine(r.AsTensor<int>().GetArrayString());
                        //Console.WriteLine(r.AsTensor<int>().GetArrayString());
                        if (r.Name == "yolonms_layer_1")
                        {
                            File.WriteAllText(@"D:\Ubuntu\onnx_c#\project\TinyYolov3\TinyYolov3\Assets\test_data_set_0\out_0.txt", r.AsTensor <float>().GetArrayString());
                        }
                        if (r.Name == "yolonms_layer_1:1")
                        {
                            File.WriteAllText(@"D:\Ubuntu\onnx_c#\project\TinyYolov3\TinyYolov3\Assets\test_data_set_0\out_1.txt", r.AsTensor <float>().GetArrayString());
                        }
                        if (r.Name == "yolonms_layer_1:2")
                        {
                            File.WriteAllText(@"D:\Ubuntu\onnx_c#\project\TinyYolov3\TinyYolov3\Assets\test_data_set_0\out_2.txt", r.AsTensor <int>().GetArrayString());
                        }
                    }
                }
            }
            while (true)
            {
            }
        }
Exemplo n.º 19
0
        public ImageRepresentation ProcessFile(ImageRepresentation Img)
        {
            byte[] ByteImage = Convert.FromBase64String(Img.Base64Image);

            string hash = Hash.GetHash(ByteImage);

            Img.ImageHash = hash;

            using (var db = new LibraryContext())
            {
                bool found = false;
                foreach (var img in db.Images)
                {
                    if (Hash.VerifyHash(hash, img.ImageHash))
                    {
                        db.Entry(img).Reference(a => a.ByteImage).Load();
                        if (Hash.ByteArrayCompare(ByteImage, img.ByteImage.Img))
                        {
                            img.NumOfRequests += 1;
                            Img.ClassName      = img.ClassName;
                            Img.Prob           = img.Prob;
                            Img.ImageId        = img.ImageInfoId;
                            Img.NumOfRequests  = img.NumOfRequests;

                            found = true;
                            break;
                        }
                    }
                }


                if (found)
                {
                    SaveDataBaseConcurrent(db, null);
                    return(Img);
                }
            }
            using Image <Rgb24> image = Image.Load <Rgb24>(ByteImage);

            const int TargetWidth  = 28;
            const int TargetHeight = 28;

            // Resize image to the 28 x 28
            image.Mutate(x =>
            {
                x.Resize(new ResizeOptions
                {
                    Size = new Size(TargetWidth, TargetHeight),
                    Mode = ResizeMode.Crop
                });
                x.Grayscale();
            });

            // Create tensor of shape (batch-size, channels, height, width) and normalize the image
            var input = new DenseTensor <float>(new[] { 1, 1, TargetHeight, TargetWidth });

            for (int y = 0; y < TargetHeight; y++)
            {
                for (int x = 0; x < TargetWidth; x++)
                {
                    input[0, 0, y, x] = image[x, y].R / 255f;
                }
            }

            // Create the inputs to the model
            var inputs = new List <NamedOnnxValue>
            {
                NamedOnnxValue.CreateFromTensor(inputnodename, input)
            };

            // Run NNet
            using IDisposableReadOnlyCollection <DisposableNamedOnnxValue> results = session.Run(inputs);

            // Softmax calculation
            var output  = results.First().AsEnumerable <float>().ToArray();
            var sum     = output.Sum(x => (float)Math.Exp(x));
            var softmax = output.Select(x => (float)Math.Exp(x) / sum);

            float  maxValue = softmax.Max();
            string maxClass = softmax.ToList().IndexOf(maxValue).ToString();

            using (var db = new LibraryContext())
            {
                var NewImage = new ImageInfo()
                {
                    ClassName = maxClass, Prob = maxValue, ImageName = Img.ImageName, NumOfRequests = 0, ImageHash = hash, ByteImage = new ImageFile {
                        Img = ByteImage
                    }
                };
                NewImage.ImageClasses = new List <ImageClass>();

                var ClassNum = db.ImageClasses.Where(a => a.ClassName == maxClass).FirstOrDefault();

                if (ClassNum != null)
                {
                    NewImage.ImageClasses.Add(ClassNum);
                    ClassNum.Images = new List <ImageInfo>();
                    ClassNum.Images.Add(NewImage);

                    db.Add(NewImage);
                }
                else
                {
                    var NewClass = new ImageClass()
                    {
                        ClassName = maxClass
                    };

                    NewImage.ImageClasses.Add(NewClass);

                    NewClass.Images = new List <ImageInfo>();
                    NewClass.Images.Add(NewImage);

                    db.Add(NewClass);
                    db.Add(NewImage);
                }
                db.SaveChanges();
            }

            Img.NumOfRequests = 0;
            Img.ClassName     = maxClass;
            Img.Prob          = maxValue;

            return(Img);
        }
Exemplo n.º 20
0
        void Add_File(List <NamedOnnxValue> inputs, int[] dimentions, string input_meta_key)
        {
            Tensor <float> t = LoadTensorFromFileBytes(b_data);

            inputs.Add(NamedOnnxValue.CreateFromTensor <float>(input_meta_key, t));
        }
Exemplo n.º 21
0
        static void Main()
        {
            var session = new InferenceSession(@"../../../Models/cntk-mlp15.onnx");

            var session1 = new InferenceSession(@"../../../Models/tf-mlp20.onnx");

            var inputMeta = session.InputMetadata;
            //var outputMeta = session.OutputMetadata;

            var input1Meta = session1.InputMetadata;
            //var output1Meta = session1.OutputMetadata;

            List <NamedOnnxValue> container = new List <NamedOnnxValue>();

            int[] dm = new int[] { 1, 1000 };

            //Previous runs with small amounts of data
            //var Data = LoadTENSORSFromFiles(@"../../../Data/Test_Bitstring/");
            //var Data = LoadTENSORSFromFiles(@"../../../Data/Test_Bitstring2/");

            var Data = LoadTENSORSFromFiles(@"../../../Data/Evolved_Bitstring/Bitstrings/");

            int x = 0;

            foreach (var fltArrList in Data.Tensors)
            {
                foreach (var fltArr in fltArrList.Value)
                {
                    float[] sourceData = fltArr;
                    string  fileName   = fltArrList.Key;

                    Utilities.DatabaseUtils insertData = new Utilities.DatabaseUtils();
                    int tinyIntID = insertData.InsertFloatsIntoEvolvedTinyInts(fileName, sourceData);

                    foreach (var name in inputMeta.Keys)
                    {
                        container.Clear();
                        var tensor = new Microsoft.ML.OnnxRuntime.Tensors.DenseTensor <float>(fltArr, dm);
                        container.Add(NamedOnnxValue.CreateFromTensor <float>(name, tensor));

                        using (var results = session.Run(container))  // results is an IDisposableReadOnlyCollection<DisposableNamedOnnxValue> container
                        {
                            foreach (var r in results)
                            {
                                {
                                    //Console.WriteLine("Output for {0}", r.Name);
                                    StringBuilder s    = new StringBuilder();
                                    var           str  = r.AsTensor <float>().GetArrayString();
                                    var           str1 = str.Replace('{', ' ').Replace('}', ' ').Trim();
                                    var           flt  = float.Parse(str1);

                                    //if (flt*1000 > 4.0)
                                    {
                                        Console.WriteLine(x);
                                        Console.WriteLine(Path.GetFileName(fileName));
                                        Console.WriteLine(flt * 10000);

                                        insertData.InsertOrUpdateEvolvedFilesMetaCNTK(tinyIntID, Path.GetFileName(fileName), flt * 10000);
                                    }
                                    x++;
                                }
                            }
                        }

                        foreach (var name1 in input1Meta.Keys)
                        {
                            container.Clear();
                            var tensor1 = new Microsoft.ML.OnnxRuntime.Tensors.DenseTensor <float>(fltArr, dm);
                            container.Add(NamedOnnxValue.CreateFromTensor <float>(name1, tensor1));

                            using var results = session1.Run(container);
                            foreach (var r in results)
                            {
                                {
                                    //Console.WriteLine("Output for {0}", r.Name);
                                    StringBuilder s    = new StringBuilder();
                                    var           str  = r.AsTensor <float>().GetArrayString();
                                    var           str1 = str.Replace('{', ' ').Replace('}', ' ').Trim();
                                    var           flt  = float.Parse(str1);

                                    //if (flt*1000 > 4.0)
                                    {
                                        Console.WriteLine(x);
                                        Console.WriteLine(Path.GetFileName(fileName));
                                        Console.WriteLine(flt * 100);

                                        insertData.InsertOrUpdateEvolvedFilesMetaTF(tinyIntID, Path.GetFileName(fileName), flt * 100);
                                    }
                                    x++;
                                }
                            }
                        }
                    }
                }
            }
Exemplo n.º 22
0
        static void Main(string[] args)
        {
            if (args.Length < 2)
            {
                System.Console.WriteLine("Not enough arguments given use FaceSegmentationCMD inputimage outputimage");
            }
            else
            {
                byte[] REDLABEL   = { 0, 0, 0, 255, 0, 255, 255, 255, 128, 255, 0 };
                byte[] GREENLABEL = { 0, 255, 0, 0, 255, 255, 0, 255, 128, 192, 128 };
                byte[] BLUELABEL  = { 0, 0, 255, 0, 255, 0, 255, 255, 128, 192, 128 };
                var    options    = new SessionOptions();
                options.SetSessionGraphOptimizationLevel(2);
                var    path     = System.IO.Path.GetDirectoryName(System.Reflection.Assembly.GetEntryAssembly().Location);
                String onnxfile = path + "\\facesegmentation_full_344.onnx";


                InferenceSession session = null;
                try
                {
                    session = new InferenceSession(onnxfile, options);
                } catch (Exception e)
                {
                    System.Console.WriteLine("Could not load ONNX model, because " + e.ToString());
                    return;
                }
                try
                {
                    var    bitmap       = new BitmapImage(new Uri(args[0]));
                    var    bitmapWidth  = bitmap.Width;
                    var    bitmapHeight = bitmap.Height;
                    var    inputMeta    = session.InputMetadata;
                    var    container    = new List <NamedOnnxValue>();
                    int[]  inputDim     = new int[4];
                    double scaleFactor  = 1.0;
                    foreach (var name in inputMeta.Keys)
                    {
                        var dim = inputMeta[name].Dimensions;
                        for (int n = 0; n < dim.Length; n++)
                        {
                            inputDim[n] = dim[n];
                        }
                    }
                    if (bitmapWidth > bitmapHeight)
                    {
                        scaleFactor = (double)inputDim[3] / bitmapWidth;
                    }
                    else
                    {
                        scaleFactor = (double)inputDim[2] / bitmapHeight;
                    }
                    TransformedBitmap tb  = new TransformedBitmap(bitmap, new System.Windows.Media.ScaleTransform(scaleFactor, scaleFactor));
                    int    newWidth       = tb.PixelWidth;
                    int    newHeight      = tb.PixelHeight;
                    int    channels       = tb.Format.BitsPerPixel / 8;
                    int    stride         = channels * newWidth;
                    byte[] rawData        = new byte[stride * newHeight];
                    byte[] rawLabelOutput = new byte[inputDim[2] * inputDim[3]];
                    byte[] rawOutput      = new byte[stride * newHeight];
                    tb.CopyPixels(rawData, stride, 0);
                    int     paddingX = inputDim[3] - newWidth;
                    int     paddingY = inputDim[2] - newHeight;
                    float[] testData = new float[inputDim[2] * inputDim[3] * inputDim[1]];
                    // intialize the whole tensor data to background value so do not have to deal with padding later
                    for (int n = 0; n < inputDim[2] * inputDim[3] * inputDim[1]; n++)
                    {
                        testData[n] = -1.0f;
                    }
                    var offsetX = paddingX / 2;
                    var offsetY = paddingY / 2;
                    // fill up tensor with image data
                    for (int y = 0; y < newHeight; y++)
                    {
                        int y1 = y;
                        for (int x = 0; x < newWidth; x++)
                        {
                            testData[(x + offsetX) + (y + offsetY) * inputDim[3] + inputDim[2] * inputDim[3] * 2] = rawData[(x + y1 * newWidth) * channels] / 127.5f;
                            testData[(x + offsetX) + (y + offsetY) * inputDim[3] + inputDim[2] * inputDim[3]]     = rawData[(x + y1 * newWidth) * channels + 1] / 127.5f;
                            testData[(x + offsetX) + (y + offsetY) * inputDim[3]] = rawData[(x + y1 * newWidth) * channels + 2] / 127.5f;
                            testData[(x + offsetX) + (y + offsetY) * inputDim[3] + inputDim[2] * inputDim[3] * 2] -= 1.0f;
                            testData[(x + offsetX) + (y + offsetY) * inputDim[3] + inputDim[2] * inputDim[3]]     -= 1.0f;
                            testData[(x + offsetX) + (y + offsetY) * inputDim[3]] -= 1.0f;
                        }
                    }
                    foreach (var name in inputMeta.Keys)
                    {
                        var tensor = new DenseTensor <float>(testData, inputMeta[name].Dimensions);
                        container.Add(NamedOnnxValue.CreateFromTensor <float>(name, tensor));
                    }
                    using (var results = session.Run(container))
                    {
                        int numResults = results.Count;
                        foreach (var r in results)
                        {
                            System.Console.WriteLine(r.Name);
                            var resultTensor    = r.AsTensor <float>();
                            var resultDimension = resultTensor.Dimensions;
                            System.Console.WriteLine(resultDimension.Length);
                            var     resultArray = resultTensor.ToArray();
                            float[] pointVal    = new float[resultDimension[1]];
                            for (var y = 0; y < resultDimension[2]; y++)
                            {
                                for (var x = 0; x < resultDimension[3]; x++)
                                {
                                    for (var n = 0; n < resultDimension[1]; n++)
                                    {
                                        pointVal[n] = resultArray[x + y * resultDimension[3] + n * resultDimension[2] * resultDimension[3]];
                                    }
                                    Softmax(pointVal);
                                    byte labelVal = (byte)MaxIndex(pointVal);
                                    rawLabelOutput[x + y * resultDimension[3]] = labelVal;
                                }
                            }
                            if (resultDimension[1] < 64)
                            {
                                for (int y = 0; y < newHeight; y++)
                                {
                                    for (int x = 0; x < newWidth; x++)
                                    {
                                        int n = rawLabelOutput[(x + offsetX) + (y + offsetY) * resultDimension[3]];
                                        rawOutput[(x + y * newWidth) * channels + 3] = 255;
                                        rawOutput[(x + y * newWidth) * channels + 2] = REDLABEL[n];
                                        rawOutput[(x + y * newWidth) * channels + 1] = GREENLABEL[n];
                                        rawOutput[(x + y * newWidth) * channels]     = BLUELABEL[n];
                                    }
                                }
                            }
                            else
                            {
                                for (int y = 0; y < newHeight; y++)
                                {
                                    for (int x = 0; x < newWidth; x++)
                                    {
                                        int n = rawLabelOutput[(x + offsetX) + (y + offsetY) * resultDimension[3]];
                                        rawOutput[(x + y * newWidth) * channels + 3] = 255;
                                        rawOutput[(x + y * newWidth) * channels + 2] = (byte)n;
                                        rawOutput[(x + y * newWidth) * channels + 1] = (byte)n;
                                        rawOutput[(x + y * newWidth) * channels]     = (byte)n;
                                    }
                                }
                            }
                            var outputImage = ImageFromRawBgraArray(rawOutput, newWidth, newHeight, System.Drawing.Imaging.PixelFormat.Format32bppArgb);
                            outputImage = ResizeImage(outputImage, (int)bitmapHeight, (int)bitmapWidth);
                            outputImage.Save(args[1]);
                        }
                    }
                }
                catch (Exception e)
                {
                    System.Console.WriteLine("Could not load image because of " + e.ToString());
                }
            }
        }
        private void Correct(InferenceSession session, string path)
        {
            var rawImage = new BitmapImage();

            while (true)
            {
                try
                {
                    using (var stream = File.OpenRead(path))
                    {
                        rawImage.BeginInit();
                        rawImage.CacheOption  = BitmapCacheOption.OnLoad;
                        rawImage.StreamSource = stream;
                        rawImage.EndInit();
                    }
                }
                catch (System.IO.IOException e)
                {
                    var ret = WinForms.MessageBox.Show(
                        e.Message, "VRCPhotoRotationCorrector",
                        WinForms.MessageBoxButtons.RetryCancel);
                    if (ret == WinForms.DialogResult.Retry)
                    {
                        continue;
                    }
                    else
                    {
                        return;
                    }
                }
                break;
            }
            var scale       = INPUT_SIZE / (double)Math.Max(rawImage.PixelWidth, rawImage.PixelHeight);
            var scaledImage = new TransformedBitmap(rawImage, new ScaleTransform(scale, scale));
            var data        = new byte[INPUT_SIZE * INPUT_SIZE * 4];
            var stride      = INPUT_SIZE * 4;
            var offsetX     = (INPUT_SIZE - scaledImage.PixelWidth) / 2;
            var offsetY     = (INPUT_SIZE - scaledImage.PixelHeight) / 2;

            scaledImage.CopyPixels(data, stride, offsetX * 4 + offsetY * stride);
            var source = TransposeAndCast(data);
            var dims   = new int[] { 1, 3, INPUT_SIZE, INPUT_SIZE };
            var probs  = new float[4];

            for (int rot = 0; rot < 4; ++rot)
            {
                if (rot != 0)
                {
                    Rotate90(source);
                }
                var tensor = new DenseTensor <float>(source, dims);
                var inputs = new List <NamedOnnxValue>()
                {
                    NamedOnnxValue.CreateFromTensor <float>("input.1", tensor)
                };
                using (var results = session.Run(inputs))
                {
                    foreach (var r in results)
                    {
                        var values = r.AsTensor <float>().ToArray();
                        var e0     = (float)Math.Exp(values[0]);
                        var e1     = (float)Math.Exp(values[1]);
                        probs[rot] = e0 / (e0 + e1);
                    }
                }
            }
            var maxval = probs.Max();

            if (probs[0] != maxval)
            {
                var angle = 0.0;
                for (int a = 0; a < 4; ++a)
                {
                    if (probs[a] == maxval)
                    {
                        angle = a * 90.0;
                    }
                }
                var rotatedImage = new TransformedBitmap(rawImage, new RotateTransform(angle));
                var encoder      = new PngBitmapEncoder();
                encoder.Frames.Add(BitmapFrame.Create(rotatedImage));
                var tmp = path + ".tmp.png";
                using (var stream = File.OpenWrite(tmp))
                {
                    encoder.Save(stream);
                }
                File.Delete(path);
                File.Move(tmp, path);
            }
            // MessageBox.Show(path + ": " + probs[0].ToString() + ", " + probs[1].ToString() + ", " + probs[2].ToString() + ", " + probs[3].ToString());
        }
Exemplo n.º 24
0
        public ImageInfo RecognizeImage(byte[] mas, string fileName)
        {
            var image = Image.Load <Rgb24>(mas);

            const int TargetWidth  = 224;
            const int TargetHeight = 224;

            // Изменяем размер картинки до 224 x 224
            image.Mutate(x =>
            {
                x.Resize(new ResizeOptions
                {
                    Size = new Size(TargetWidth, TargetHeight),
                    Mode = ResizeMode.Crop // Сохраняем пропорции обрезая лишнее
                });
            });

            // Перевод пикселов в тензор и нормализация
            var input  = new DenseTensor <float>(new[] { 1, 3, TargetHeight, TargetWidth });
            var mean   = new[] { 0.485f, 0.456f, 0.406f };
            var stddev = new[] { 0.229f, 0.224f, 0.225f };

            for (int y = 0; y < TargetHeight; y++)
            {
                Span <Rgb24> pixelSpan = image.GetPixelRowSpan(y);
                for (int x = 0; x < TargetWidth; x++)
                {
                    input[0, 0, y, x] = ((pixelSpan[x].R / 255f) - mean[0]) / stddev[0];
                    input[0, 1, y, x] = ((pixelSpan[x].G / 255f) - mean[1]) / stddev[1];
                    input[0, 2, y, x] = ((pixelSpan[x].B / 255f) - mean[2]) / stddev[2];
                }
            }

            // Подготавливаем входные данные нейросети. Имя input задано в файле модели
            var inputs = new List <NamedOnnxValue>
            {
                NamedOnnxValue.CreateFromTensor("data", input)
            };

            // Вычисляем предсказание нейросетью

            ImageInfo tmp     = new ImageInfo();
            var       session = new InferenceSession(@"C:\Users\Маша\Documents\Visual Studio 2017\Projects\RecognizerLib\resnet18-v2-7.onnx");

            //res = "Predicting contents of image...\n";
            //Console.WriteLine("Predicting contents of image...");
            IDisposableReadOnlyCollection <DisposableNamedOnnxValue> results = session.Run(inputs);

            // Получаем 1000 выходов и считаем для них softmax
            var output  = results.First().AsEnumerable <float>().ToArray();
            var sum     = output.Sum(x => (float)Math.Exp(x));
            var softmax = output.Select(x => (float)Math.Exp(x) / sum);

            // Выдаем 10 наиболее вероятных результатов на экран


            foreach (var p in softmax
                     .Select((x, g) => new { Label = classLabels[g], Confidence = x })
                     .OrderByDescending(x => x.Confidence)
                     .Take(10))
            {
                tmp.AddInfo(fileName, p.Label, p.Confidence);
            }

            return(tmp);
        }
Exemplo n.º 25
0
        public void GetResults()
        {
            int numThreads = Environment.ProcessorCount;
            int chunk      = fullNames.Count() / numThreads + 1;

            Thread[] threads = new Thread[numThreads];
            for (int i = 0; i < numThreads; i++)
            {
                int chunkStart = i * chunk;

                int chunkEnd = chunkStart + chunk < fullNames.Count() ? chunkStart + chunk : fullNames.Count();

                threads[i] = new Thread(() =>
                {
                    for (int j = chunkStart; j < chunkEnd; j++)
                    {
                        if (token.IsCancellationRequested)
                        {
                            //Console.WriteLine("Termination...");
                            cancelTokenSource.Dispose();
                            return;
                            //System.Environment.Exit(0);
                        }

                        var image = Image.Load <Rgb24>(fullNames[j]);

                        const int TargetWidth  = 224;
                        const int TargetHeight = 224;

                        // Изменяем размер картинки до 224 x 224
                        image.Mutate(x =>
                        {
                            x.Resize(new ResizeOptions
                            {
                                Size = new Size(TargetWidth, TargetHeight),
                                Mode = ResizeMode.Crop // Сохраняем пропорции обрезая лишнее
                            });
                        });

                        // Перевод пикселов в тензор и нормализация
                        var input  = new DenseTensor <float>(new[] { 1, 3, TargetHeight, TargetWidth });
                        var mean   = new[] { 0.485f, 0.456f, 0.406f };
                        var stddev = new[] { 0.229f, 0.224f, 0.225f };
                        for (int y = 0; y < TargetHeight; y++)
                        {
                            Span <Rgb24> pixelSpan = image.GetPixelRowSpan(y);
                            for (int x = 0; x < TargetWidth; x++)
                            {
                                input[0, 0, y, x] = ((pixelSpan[x].R / 255f) - mean[0]) / stddev[0];
                                input[0, 1, y, x] = ((pixelSpan[x].G / 255f) - mean[1]) / stddev[1];
                                input[0, 2, y, x] = ((pixelSpan[x].B / 255f) - mean[2]) / stddev[2];
                            }
                        }

                        // Подготавливаем входные данные нейросети. Имя input задано в файле модели
                        var inputs = new List <NamedOnnxValue>
                        {
                            NamedOnnxValue.CreateFromTensor("data", input)
                        };

                        // Вычисляем предсказание нейросетью
                        waitHandler.WaitOne();
                        ImageInfo tmp = new ImageInfo();
                        var session   = new InferenceSession(@"C:\Users\Маша\Documents\Visual Studio 2017\Projects\RecognizerLib\resnet18-v2-7.onnx");

                        //res = "Predicting contents of image...\n";
                        //Console.WriteLine("Predicting contents of image...");
                        IDisposableReadOnlyCollection <DisposableNamedOnnxValue> results = session.Run(inputs);

                        // Получаем 1000 выходов и считаем для них softmax
                        var output  = results.First().AsEnumerable <float>().ToArray();
                        var sum     = output.Sum(x => (float)Math.Exp(x));
                        var softmax = output.Select(x => (float)Math.Exp(x) / sum);

                        // Выдаем 10 наиболее вероятных результатов на экран


                        foreach (var p in softmax
                                 .Select((x, g) => new { Label = classLabels[g], Confidence = x })
                                 .OrderByDescending(x => x.Confidence)
                                 .Take(10))
                        {
                            tmp.AddInfo(fullNames[j], p.Label, p.Confidence);
                        }
                        //res += p.Label + " with confidence " + p.Confidence + "\n";
                        //Console.WriteLine($"{p.Label} with confidence {p.Confidence}");


                        //Console.WriteLine("pic: " + j);
                        // Console.WriteLine(tmp);
                        //res += "\n";
                        //result.Add(tmp);
                        //if (iConsoleInterface != null)
                        //    iConsoleInterface.ReturnRes(tmp);

                        ires.ReturnRes(tmp);
                        //Thread.Sleep(1000);
                        waitHandler.Set();
                    }
                });
                threads[i].Start();
            }
            foreach (Thread thread in threads)
            {
                thread.Join();
            }
        }
Exemplo n.º 26
0
        private void TestTensorRTProviderOptions()
        {
            string modelPath           = Path.Combine(Directory.GetCurrentDirectory(), "squeezenet.onnx");
            string calTablePath        = "squeezenet_calibration.flatbuffers";
            string enginePath          = "./";
            string engineDecrptLibPath = "engine_decryp";

            using (var cleanUp = new DisposableListTest <IDisposable>())
            {
                var trtProviderOptions = new OrtTensorRTProviderOptions();
                cleanUp.Add(trtProviderOptions);

                var providerOptionsDict = new Dictionary <string, string>();
                providerOptionsDict["device_id"]       = "0";
                providerOptionsDict["trt_fp16_enable"] = "1";
                providerOptionsDict["trt_int8_enable"] = "1";
                providerOptionsDict["trt_int8_calibration_table_name"] = calTablePath;
                providerOptionsDict["trt_engine_cache_enable"]         = "1";
                providerOptionsDict["trt_engine_cache_path"]           = enginePath;
                providerOptionsDict["trt_engine_decryption_enable"]    = "0";
                providerOptionsDict["trt_engine_decryption_lib_path"]  = engineDecrptLibPath;
                trtProviderOptions.UpdateOptions(providerOptionsDict);

                var resultProviderOptionsDict = new Dictionary <string, string>();
                ProviderOptionsValueHelper.StringToDict(trtProviderOptions.GetOptions(), resultProviderOptionsDict);

                // test provider options configuration
                string value;
                value = resultProviderOptionsDict["device_id"];
                Assert.Equal("0", value);
                value = resultProviderOptionsDict["trt_fp16_enable"];
                Assert.Equal("1", value);
                value = resultProviderOptionsDict["trt_int8_enable"];
                Assert.Equal("1", value);
                value = resultProviderOptionsDict["trt_int8_calibration_table_name"];
                Assert.Equal(calTablePath, value);
                value = resultProviderOptionsDict["trt_engine_cache_enable"];
                Assert.Equal("1", value);
                value = resultProviderOptionsDict["trt_engine_cache_path"];
                Assert.Equal(enginePath, value);
                value = resultProviderOptionsDict["trt_engine_decryption_enable"];
                Assert.Equal("0", value);
                value = resultProviderOptionsDict["trt_engine_decryption_lib_path"];
                Assert.Equal(engineDecrptLibPath, value);

                // test correctness of provider options
                SessionOptions options = SessionOptions.MakeSessionOptionWithTensorrtProvider(trtProviderOptions);
                cleanUp.Add(options);

                var session = new InferenceSession(modelPath, options);
                cleanUp.Add(session);

                var     inputMeta = session.InputMetadata;
                var     container = new List <NamedOnnxValue>();
                float[] inputData = TestDataLoader.LoadTensorFromFile(@"bench.in"); // this is the data for only one input tensor for this model
                foreach (var name in inputMeta.Keys)
                {
                    Assert.Equal(typeof(float), inputMeta[name].ElementType);
                    Assert.True(inputMeta[name].IsTensor);
                    var tensor = new DenseTensor <float>(inputData, inputMeta[name].Dimensions);
                    container.Add(NamedOnnxValue.CreateFromTensor <float>(name, tensor));
                }

                session.Run(container);
            }
        }
Exemplo n.º 27
0
        static void Main(string[] args)
        {
            using var image = Image.Load <Rgb24>(args.FirstOrDefault() ?? "image.jpg");

            const int TargetWidth  = 224;
            const int TargetHeight = 224;

            // Изменяем размер картинки до 224 x 224
            image.Mutate(x =>
            {
                x.Resize(new ResizeOptions
                {
                    Size = new Size(TargetWidth, TargetHeight),
                    Mode = ResizeMode.Crop // Сохраняем пропорции обрезая лишнее
                });
            });

            // Перевод пикселов в тензор и нормализация
            var input  = new DenseTensor <float>(new[] { 1, 3, TargetHeight, TargetWidth });
            var mean   = new[] { 0.485f, 0.456f, 0.406f };
            var stddev = new[] { 0.229f, 0.224f, 0.225f };

            for (int y = 0; y < TargetHeight; y++)
            {
                Span <Rgb24> pixelSpan = image.GetPixelRowSpan(y);
                for (int x = 0; x < TargetWidth; x++)
                {
                    input[0, 0, y, x] = ((pixelSpan[x].R / 255f) - mean[0]) / stddev[0];
                    input[0, 1, y, x] = ((pixelSpan[x].G / 255f) - mean[1]) / stddev[1];
                    input[0, 2, y, x] = ((pixelSpan[x].B / 255f) - mean[2]) / stddev[2];
                }
            }

            // Подготавливаем входные данные нейросети. Имя input задано в файле модели
            var inputs = new List <NamedOnnxValue>
            {
                NamedOnnxValue.CreateFromTensor("input", input)
            };

            // Загружаем модель из встроенного ресурса.
            // См. <EmbeddedResource> в файле проекта
            using var modelStream  = typeof(Program).Assembly.GetManifestResourceStream("OnnxSample.shufflenet-v2-10.onnx");
            using var memoryStream = new MemoryStream();
            modelStream.CopyTo(memoryStream);
            using var session = new InferenceSession(memoryStream.ToArray());

            // Вычисляем предсказание нейросетью
            Console.WriteLine("Predicting contents of image...");
            using IDisposableReadOnlyCollection <DisposableNamedOnnxValue> results = session.Run(inputs);

            // Получаем 1000 выходов и считаем для них softmax
            var output  = results.First().AsEnumerable <float>().ToArray();
            var sum     = output.Sum(x => (float)Math.Exp(x));
            var softmax = output.Select(x => (float)Math.Exp(x) / sum);

            // Выдаем 10 наиболее вероятных результатов на экран
            foreach (var p in softmax
                     .Select((x, i) => new { Label = classLabels[i], Confidence = x })
                     .OrderByDescending(x => x.Confidence)
                     .Take(10))
            {
                Console.WriteLine($"{p.Label} with confidence {p.Confidence}");
            }
        }
Exemplo n.º 28
0
        private void TestPreTrainedModels(string opset, string modelName)
        {
            var    modelsDir         = GetTestModelsDir();
            string onnxModelFileName = null;

            var modelDir = new DirectoryInfo(Path.Combine(modelsDir, opset, modelName));

            try
            {
                var  onnxModelNames  = modelDir.GetFiles("*.onnx");
                bool validModelFound = false;
                if (onnxModelNames.Length > 0)
                {
                    // TODO remove file "._resnet34v2.onnx" from test set
                    for (int i = 0; i < onnxModelNames.Length; i++)
                    {
                        if (onnxModelNames[i].Name != "._resnet34v2.onnx")
                        {
                            onnxModelNames[0] = onnxModelNames[i];
                            validModelFound   = true;
                        }
                    }
                }

                if (validModelFound)
                {
                    onnxModelFileName = Path.Combine(modelDir.FullName, onnxModelNames[0].Name);
                }
                else
                {
                    var modelNamesList = string.Join(",", onnxModelNames.Select(x => x.ToString()));
                    throw new Exception($"Opset {opset} Model {modelName}. Can't determine model file name. Found these :{modelNamesList}");
                }

                using (var session = new InferenceSession(onnxModelFileName))
                {
                    var    inMeta = session.InputMetadata;
                    string testDataDirNamePattern = "test_data*";
                    if (opset == "opset9" && modelName == "LSTM_Seq_lens_unpacked")
                    {
                        testDataDirNamePattern = "seq_lens*"; // discrepancy in data directory
                    }
                    foreach (var testDataDir in modelDir.EnumerateDirectories(testDataDirNamePattern))
                    {
                        var inputContainer  = new List <NamedOnnxValue>();
                        var outputContainer = new List <NamedOnnxValue>();
                        foreach (var f in testDataDir.EnumerateFiles("input_*.pb"))
                        {
                            inputContainer.Add(TestDataLoader.LoadTensorFromFilePb(f.FullName, inMeta));
                        }
                        foreach (var f in testDataDir.EnumerateFiles("output_*.pb"))
                        {
                            outputContainer.Add(TestDataLoader.LoadTensorFromFilePb(f.FullName, session.OutputMetadata));
                        }

                        using (var resultCollection = session.Run(inputContainer))
                        {
                            foreach (var result in resultCollection)
                            {
                                Assert.True(session.OutputMetadata.ContainsKey(result.Name));
                                var            outputMeta  = session.OutputMetadata[result.Name];
                                NamedOnnxValue outputValue = null;
                                foreach (var o in outputContainer)
                                {
                                    if (o.Name == result.Name)
                                    {
                                        outputValue = o;
                                        break;
                                    }
                                }
                                if (outputValue == null)
                                {
                                    outputValue = outputContainer.First(); // in case the output data file does not contain the name
                                }
                                if (outputMeta.IsTensor)
                                {
                                    if (outputMeta.ElementType == typeof(float))
                                    {
                                        Assert.Equal(result.AsTensor <float>(), outputValue.AsTensor <float>(), new FloatComparer());
                                    }
                                    else if (outputMeta.ElementType == typeof(double))
                                    {
                                        Assert.Equal(result.AsTensor <double>(), outputValue.AsTensor <double>(), new DoubleComparer());
                                    }
                                    else if (outputMeta.ElementType == typeof(int))
                                    {
                                        Assert.Equal(result.AsTensor <int>(), outputValue.AsTensor <int>(), new ExactComparer <int>());
                                    }
                                    else if (outputMeta.ElementType == typeof(uint))
                                    {
                                        Assert.Equal(result.AsTensor <uint>(), outputValue.AsTensor <uint>(), new ExactComparer <uint>());
                                    }
                                    else if (outputMeta.ElementType == typeof(short))
                                    {
                                        Assert.Equal(result.AsTensor <short>(), outputValue.AsTensor <short>(), new ExactComparer <short>());
                                    }
                                    else if (outputMeta.ElementType == typeof(ushort))
                                    {
                                        Assert.Equal(result.AsTensor <ushort>(), outputValue.AsTensor <ushort>(), new ExactComparer <ushort>());
                                    }
                                    else if (outputMeta.ElementType == typeof(long))
                                    {
                                        Assert.Equal(result.AsTensor <long>(), outputValue.AsTensor <long>(), new ExactComparer <long>());
                                    }
                                    else if (outputMeta.ElementType == typeof(ulong))
                                    {
                                        Assert.Equal(result.AsTensor <ulong>(), outputValue.AsTensor <ulong>(), new ExactComparer <ulong>());
                                    }
                                    else if (outputMeta.ElementType == typeof(byte))
                                    {
                                        Assert.Equal(result.AsTensor <byte>(), outputValue.AsTensor <byte>(), new ExactComparer <byte>());
                                    }
                                    else if (outputMeta.ElementType == typeof(bool))
                                    {
                                        Assert.Equal(result.AsTensor <bool>(), outputValue.AsTensor <bool>(), new ExactComparer <bool>());
                                    }
                                    else if (outputMeta.ElementType == typeof(Float16))
                                    {
                                        Assert.Equal(result.AsTensor <Float16>(), outputValue.AsTensor <Float16>(), new Float16Comparer {
                                            tolerance = 2
                                        });
                                    }
                                    else if (outputMeta.ElementType == typeof(BFloat16))
                                    {
                                        Assert.Equal(result.AsTensor <BFloat16>(), outputValue.AsTensor <BFloat16>(), new BFloat16Comparer {
                                            tolerance = 2
                                        });
                                    }
                                    else
                                    {
                                        Assert.True(false, $"{nameof(TestPreTrainedModels)} does not yet support output of type {outputMeta.ElementType}");
                                    }
                                }
                                else
                                {
                                    Assert.True(false, $"{nameof(TestPreTrainedModels)} cannot handle non-tensor outputs yet");
                                }
                            }
                        }
                    }
                }
            }
            catch (Exception ex)
            {
                var msg = $"Opset {opset}, Model {modelName}: ModelFile = {onnxModelFileName} error = {ex.Message}";
                if (ex.Message.Contains("ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions"))
                {
                    // If the exception is thrown because the opset version of the test model is
                    // not supported by ONNXRuntime yet, then ignore the test and proceed.
                    // ORT allows commits from ONNX master and in such cases we do come across new opsets which are
                    // not supported in ORT yet. In order to force these tests to run set env var ALLOW_RELEASED_ONNX_OPSET_ONLY=0
                    output.WriteLine("Skipping the model test as the latest ONNX opset is not supported yet. Error Message: " + msg);
                }
                else
                {
                    throw new Exception(msg + "\n" + ex.StackTrace);
                }
            }
        }
Exemplo n.º 29
0
        private void TestPreTrainedModelsOpset7And8()
        {
            // 16-bit float not supported type in C#.
            var skipModels = new[] {
                "fp16_inception_v1",
                "fp16_shufflenet",
                "fp16_tiny_yolov2"
            };

            var opsets    = new[] { "opset7", "opset8" };
            var modelsDir = GetTestModelsDir();

            foreach (var opset in opsets)
            {
                var modelRoot = new DirectoryInfo(Path.Combine(modelsDir, opset));
                //var cwd = Directory.GetCurrentDirectory();
                foreach (var modelDir in modelRoot.EnumerateDirectories())
                {
                    String onnxModelFileName = null;

                    if (skipModels.Contains(modelDir.Name))
                    {
                        continue;
                    }

                    try
                    {
                        var onnxModelNames = modelDir.GetFiles("*.onnx");
                        if (onnxModelNames.Length > 1)
                        {
                            // TODO remove file "._resnet34v2.onnx" from test set
                            bool validModelFound = false;
                            for (int i = 0; i < onnxModelNames.Length; i++)
                            {
                                if (onnxModelNames[i].Name != "._resnet34v2.onnx")
                                {
                                    onnxModelNames[0] = onnxModelNames[i];
                                    validModelFound   = true;
                                }
                            }

                            if (!validModelFound)
                            {
                                var modelNamesList = string.Join(",", onnxModelNames.Select(x => x.ToString()));
                                throw new Exception($"Opset {opset}: Model {modelDir}. Can't determine model file name. Found these :{modelNamesList}");
                            }
                        }

                        onnxModelFileName = Path.Combine(modelsDir, opset, modelDir.Name, onnxModelNames[0].Name);
                        using (var session = new InferenceSession(onnxModelFileName))
                        {
                            var inMeta     = session.InputMetadata;
                            var innodepair = inMeta.First();
                            var innodename = innodepair.Key;
                            var innodedims = innodepair.Value.Dimensions;
                            for (int i = 0; i < innodedims.Length; i++)
                            {
                                if (innodedims[i] < 0)
                                {
                                    innodedims[i] = -1 * innodedims[i];
                                }
                            }

                            var testRoot = new DirectoryInfo(Path.Combine(modelsDir, opset, modelDir.Name));
                            var testData = testRoot.EnumerateDirectories("test_data*").First();
                            var dataIn   = LoadTensorFromFilePb(Path.Combine(modelsDir, opset, modelDir.Name, testData.ToString(), "input_0.pb"));
                            var dataOut  = LoadTensorFromFilePb(Path.Combine(modelsDir, opset, modelDir.Name, testData.ToString(), "output_0.pb"));
                            var tensorIn = new DenseTensor <float>(dataIn, innodedims);
                            var nov      = new List <NamedOnnxValue>();
                            nov.Add(NamedOnnxValue.CreateFromTensor <float>(innodename, tensorIn));
                            using (var resnov = session.Run(nov))
                            {
                                var res = resnov.ToArray()[0].AsTensor <float>().ToArray <float>();
                                Assert.Equal(res, dataOut, new floatComparer());
                            }
                        }
                    }
                    catch (Exception ex)
                    {
                        var msg = $"Opset {opset}: Model {modelDir}: ModelFile = {onnxModelFileName} error = {ex.Message}";
                        throw new Exception(msg);
                    }
                } //model
            }     //opset
        }
Exemplo n.º 30
0
        public ConcurrentQueue <Output> processDirAsync(string dir_path)
        {
            if (exception != null)
            {
                exception = new Exception("Model was incorrect");
                return(null);
            }
            else
            {
                Debug.WriteLine("REC");
                ConcurrentQueue <string> requests = new ConcurrentQueue <string>(); //спросить мб лучше конкурентную использовать, и без блокировок обойтись
                cts = new CancellationTokenSource();
                ConcurrentQueue <Output> returns = new ConcurrentQueue <Output>();
                int proc = System.Environment.ProcessorCount;
                try
                {
                    DirectoryInfo dir = new DirectoryInfo(dir_path);

                    var files = dir.GetFiles();

                    Thread[] threads     = new Thread[proc];
                    Thread   wait_thread = new Thread(() =>
                    {
                        for (int i = 0; i < proc; i++)
                        {
                            threads[i].Join();
                        }
                        returns.Enqueue(null);
                    });
                    wait_thread.IsBackground = true;
                    foreach (var file in files)
                    {
                        requests.Enqueue(file.FullName);
                    }
                    for (int i = 0; i < proc; i++)
                    {
                        threads[i] = new Thread(() =>
                                                #region scary lambda
                        {
                            string image_path;
                            byte[] blob = null;
                            while (requests.TryDequeue(out image_path))
                            {
                                try
                                {
                                    blob = File.ReadAllBytes(image_path);
                                }
                                catch
                                {
                                    Debug.WriteLine("Cannot read file " + image_path);
                                    continue;
                                }
                                if (DbProvider.isExist(image_path) && DbProvider.compareBlob(image_path, blob))
                                {
                                    Debug.WriteLine("ALREADY EXIST " + image_path);
                                    var img = DbProvider.getImage(image_path);
                                    returns.Enqueue(new Output(image_path, new KeyValuePair <string, float>(img.Class, img.Probability)));
                                    continue;
                                }
                                if (cts.Token.IsCancellationRequested)
                                {
                                    return;
                                }

                                var inputMeta = session.InputMetadata;

                                var container = new List <NamedOnnxValue>();
                                float[] inputData;
                                //inputdata!=blob
                                if ((inputData = CustomImage.parseImage(image_path, blob)) == null)
                                {
                                    return;
                                }

                                if (cts.Token.IsCancellationRequested)
                                {
                                    return;
                                }

                                foreach (var name in inputMeta.Keys)
                                {
                                    var tensor = new DenseTensor <float>(inputData, inputMeta[name].Dimensions);
                                    //Microsoft.ML.OnnxRuntime.Tensors.Tensor
                                    container.Add(NamedOnnxValue.CreateFromTensor <float>(name, tensor));
                                }

                                if (cts.Token.IsCancellationRequested)
                                {
                                    return;
                                }

                                using (var results = session.Run(container))
                                {
                                    foreach (var r in results)
                                    {
                                        var list = new List <KeyValuePair <string, float> >();
                                        int iter = 0;
                                        foreach (var a in r.AsEnumerable <float>())
                                        {
                                            list.Add(new KeyValuePair <string, float>(answers[iter].ToString(), a));
                                            iter++;
                                        }

                                        var smthg = (from pair in list
                                                     orderby pair.Value descending
                                                     select pair).Take(3);
                                        var buf = new Output(image_path, smthg.ToArray());
                                        returns.Enqueue(buf);
                                        DbProvider.add(image_path, smthg.First().Key, smthg.First().Value, blob);
                                    }
                                }
                            }
                        }
                                                #endregion
                                                );
                        threads[i].Start();
                    }
                    wait_thread.Start();
                }
                catch (Exception ex)
                {
                    exception = ex;
                }
                return(returns);
            }
        }