示例#1
0
 public OnnxImageClassifier()
 {
     RegisterForDisposal(() =>
     {
         _session?.Dispose();
     });
 }
示例#2
0
        private void TestPreTrainedModelsOpset7And8()
        {
            var opsets = new[] { "opset7", "opset8" };

            foreach (var opset in opsets)
            {
                var modelRoot = new DirectoryInfo(opset);
                var cwd       = Directory.GetCurrentDirectory();
                foreach (var modelDir in modelRoot.EnumerateDirectories())
                {
                    String onnxModelFileName = null;
                    try
                    {
                        var onnxModelNames = modelDir.GetFiles("*.onnx");
                        if (onnxModelNames.Count() != 1)
                        {
                            // TODO remove file "._resnet34v2.onnx" from test set
                            if (onnxModelNames[0].Name == "._resnet34v2.onnx")
                            {
                                onnxModelNames[0] = onnxModelNames[1];
                            }
                            else
                            {
                                var modelNamesList = string.Join(",", onnxModelNames.Select(x => x.ToString()));
                                throw new Exception($"Opset {opset}: Model {modelDir}. Can't determine model file name. Found these :{modelNamesList}");
                            }
                        }

                        onnxModelFileName = Path.Combine(cwd, opset, modelDir.Name, onnxModelNames[0].Name);
                        var session    = new InferenceSession(onnxModelFileName);
                        var inMeta     = session.InputMetadata;
                        var innodepair = inMeta.First();
                        var innodename = innodepair.Key;
                        var innodedims = innodepair.Value.Dimensions;
                        for (int i = 0; i < innodedims.Length; i++)
                        {
                            if (innodedims[i] < 0)
                            {
                                innodedims[i] = -1 * innodedims[i];
                            }
                        }
                        var dataIn   = LoadTensorFromFilePb(Path.Combine(cwd, opset, modelDir.Name, "test_data_set_0", "input_0.pb"));
                        var dataOut  = LoadTensorFromFilePb(Path.Combine(cwd, opset, modelDir.Name, "test_data_set_0", "output_0.pb"));
                        var tensorIn = new DenseTensor <float>(dataIn, innodedims);
                        var nov      = new List <NamedOnnxValue>();
                        nov.Add(NamedOnnxValue.CreateFromTensor <float>(innodename, tensorIn));
                        var resnov = session.Run(nov);
                        var res    = resnov.ToArray()[0].AsTensor <float>().ToArray <float>();
                        Assert.Equal(res, dataOut, new floatComparer());
                        session.Dispose();
                    }
                    catch (Exception ex)
                    {
                        var msg = $"Opset {opset}: Model {modelDir}: ModelFile = {onnxModelFileName} error = {ex.Message}";
                        throw new Exception(msg);
                    }
                } //model
            }     //opset
        }
 /// <summary>
 /// Dispose void.
 /// </summary>
 public void Dispose()
 {
     if (!_disposed)
     {
         _session.Dispose();
         _disposed = true;
     }
 }
示例#4
0
 public void Dispose()
 {
     if (!disposed)
     {
         Session.Dispose();
         disposed = true;
     }
 }
示例#5
0
 public void Dispose()
 {
     if (session != null)
     {
         session.Dispose();
         session = null;
     }
     //if (dbprovider != null)
     // dbprovider.Dispose();
 }
示例#6
0
        private void TestPreTrainedModelsOpset7And8()
        {
            var opsets = new[] { "opset7", "opset8" };

            foreach (var opset in opsets)
            {
                var modelRoot = new DirectoryInfo(opset);
                foreach (var model in modelRoot.EnumerateDirectories())
                {
                    // TODO: dims contains 'None'. Session throws error.
                    if (model.ToString() == "test_tiny_yolov2")
                    {
                        continue;
                    }
                    try
                    {
                        var modelNames = model.GetFiles("*.onnx");
                        if (modelNames.Count() != 1)
                        {
                            // TODO remove file "._resnet34v2.onnx" from test set
                            if (modelNames[0].ToString() == "._resnet34v2.onnx")
                            {
                                modelNames[0] = modelNames[1];
                            }
                            else
                            {
                                var modelNamesList = string.Join(",", modelNames.Select(x => x.ToString()));
                                throw new Exception($"Opset {opset}: Model {model}. Can't determine model file name. Found these :{modelNamesList}");
                            }
                        }

                        var session    = new InferenceSession($"{opset}\\{model}\\{modelNames[0].ToString()}");
                        var inMeta     = session.InputMetadata;
                        var innodepair = inMeta.First();
                        var innodename = innodepair.Key;
                        var innodedims = innodepair.Value.Dimensions;
                        var dataIn     = LoadTensorFromFilePb($"{opset}\\{model}\\test_data_set_0\\input_0.pb");
                        var dataOut    = LoadTensorFromFilePb($"{opset}\\{model}\\test_data_set_0\\output_0.pb");
                        var tensorIn   = new DenseTensor <float>(dataIn, innodedims);
                        var nov        = new List <NamedOnnxValue>();
                        nov.Add(NamedOnnxValue.CreateFromTensor <float>(innodename, tensorIn));
                        var resnov = session.Run(nov);
                        var res    = resnov.ToArray()[0].AsTensor <float>().ToArray <float>();
                        Assert.Equal(res, dataOut, new floatComparer());
                        session.Dispose();
                    }
                    catch (Exception ex)
                    {
                        var msg = $"Opset {opset}: Model {model}: error = {ex.Message}";
                        throw new Exception(msg);
                    }
                } //model
            }     //opset
        }
示例#7
0
 protected virtual void Dispose(bool disposing)
 {
     if (!disposedValue)
     {
         if (disposing)
         {
             session.Dispose();
             stopper.Dispose();
             outputMutex.Dispose();
         }
         disposedValue = true;
     }
 }
示例#8
0
        private void TestModelInputFLOAT16()
        {
            // model takes 1x5 input of fixed type, echoes back
            string modelPath = Directory.GetCurrentDirectory() + @"\test_types_FLOAT16.pb";
            var    session   = new InferenceSession(modelPath);
            var    container = new List <NamedOnnxValue>();
            var    tensorIn  = new DenseTensor <float>(new float[] { 1.0f, 2.0f, -3.0f, float.MinValue, float.MaxValue }, new int[] { 1, 5 });
            var    nov       = NamedOnnxValue.CreateFromTensor("input", tensorIn);

            container.Add(nov);
            var res       = session.Run(container);
            var tensorOut = res.First().AsTensor <float>();

            Assert.True(tensorOut.SequenceEqual(tensorIn));
            session.Dispose();
        }
示例#9
0
        private void TestModelInputSTRING()
        {
            // model takes 1x5 input of fixed type, echoes back
            string modelPath = Directory.GetCurrentDirectory() + @"\test_types_STRING.onnx";
            var    session   = new InferenceSession(modelPath);
            var    container = new List <NamedOnnxValue>();
            var    tensorIn  = new DenseTensor <string>(new string[] { "a", "c", "d", "z", "f" }, new int[] { 1, 5 });
            var    nov       = NamedOnnxValue.CreateFromTensor("input", tensorIn);

            container.Add(nov);
            var res       = session.Run(container);
            var tensorOut = res.First().AsTensor <string>();

            Assert.True(tensorOut.SequenceEqual(tensorIn));
            session.Dispose();
        }
示例#10
0
        private void TestModelInputUINT64()
        {
            // model takes 1x5 input of fixed type, echoes back
            string modelPath = Path.Combine(Directory.GetCurrentDirectory(), "test_types_UINT64.pb");
            var    session   = new InferenceSession(modelPath);
            var    container = new List <NamedOnnxValue>();
            var    tensorIn  = new DenseTensor <UInt64>(new UInt64[] { 1, 2, 3, UInt64.MinValue, UInt64.MaxValue }, new int[] { 1, 5 });
            var    nov       = NamedOnnxValue.CreateFromTensor("input", tensorIn);

            container.Add(nov);
            var res       = session.Run(container);
            var tensorOut = res.First().AsTensor <UInt64>();

            Assert.True(tensorOut.SequenceEqual(tensorIn));
            session.Dispose();
        }
示例#11
0
 /// <summary>
 /// There are two unmanaged resources we can dispose, <see cref="_session"/> and <see cref="ModelFile"/>
 /// if <see cref="_ownModelFile"/> is <see langword="true"/>.
 /// </summary>
 /// <param name="disposing"></param>
 private void Dispose(bool disposing)
 {
     if (!_disposed)
     {
         // There are two things to be disposed.
         if (disposing)
         {
             // First, we release the resource token by ONNXRuntime.
             _session.Dispose();
             // Second, we delete the model file if that file is not created by the user.
             if (_ownModelFile && File.Exists(ModelFile))
             {
                 File.Delete(ModelFile);
             }
         }
         _disposed = true;
     }
 }
示例#12
0
        private void TestPreTrainedModelsOpset7And8()
        {
            var opsets = new[] { "opset7", "opset8" };

            foreach (var opset in opsets)
            {
                var modelRoot = new DirectoryInfo(opset);
                foreach (var model in modelRoot.EnumerateDirectories())
                {
                    // TODO: dims contains 'None'. Session throws error.
                    if (model.ToString() == "test_tiny_yolov2")
                    {
                        continue;
                    }
                    try
                    {
                        //TODO: sometimes, the file name is not 'model.onnx'
                        var session    = new InferenceSession($"{opset}\\{model}\\model.onnx");
                        var inMeta     = session.InputMetadata;
                        var innodepair = inMeta.First();
                        var innodename = innodepair.Key;
                        var innodedims = innodepair.Value.Dimensions;
                        var dataIn     = LoadTensorFromFilePb($"{opset}\\{model}\\test_data_set_0\\input_0.pb");
                        var dataOut    = LoadTensorFromFilePb($"{opset}\\{model}\\test_data_set_0\\output_0.pb");
                        var tensorIn   = new DenseTensor <float>(dataIn, innodedims);
                        var nov        = new List <NamedOnnxValue>();
                        nov.Add(NamedOnnxValue.CreateFromTensor <float>(innodename, tensorIn));
                        var resnov = session.Run(nov);
                        var res    = resnov.ToArray()[0].AsTensor <float>().ToArray <float>();
                        Assert.Equal(res, dataOut, new floatComparer());
                        session.Dispose();
                    }
                    catch (Exception ex)
                    {
                        var msg = $"Opset {opset}: Model {model}: error = {ex.Message}";
                        continue; //TODO: fix it
                        //throw new Exception(msg);
                    }
                } //model
            }     //opset
        }
示例#13
0
        /// <summary>
        /// Constructs OnnxModel object from file.
        /// </summary>
        /// <param name="modelFile">Model file path.</param>
        /// <param name="gpuDeviceId">GPU device ID to execute on. Null for CPU.</param>
        /// <param name="fallbackToCpu">If true, resumes CPU execution quietly upon GPU error.</param>
        /// <param name="ownModelFile">If true, the <paramref name="modelFile"/> will be deleted when <see cref="OnnxModel"/> is
        /// no longer needed.</param>
        /// <param name="shapeDictionary"></param>
        public OnnxModel(string modelFile, int?gpuDeviceId = null, bool fallbackToCpu           = false,
                         bool ownModelFile = false, IDictionary <string, int[]> shapeDictionary = null)
        {
            // If we don't own the model file, _disposed should be false to prevent deleting user's file.
            _disposed = false;

            if (gpuDeviceId != null)
            {
                try
                {
                    _session = new InferenceSession(modelFile,
                                                    SessionOptions.MakeSessionOptionWithCudaProvider(gpuDeviceId.Value));
                }
                catch (OnnxRuntimeException)
                {
                    if (fallbackToCpu)
                    {
                        _session = new InferenceSession(modelFile);
                    }
                    else
                    {
                        // If called from OnnxTransform, is caught and rethrown
                        throw;
                    }
                }
            }
            else
            {
                _session = new InferenceSession(modelFile);
            }

            try
            {
                // Load ONNX model file and parse its input and output schema. The reason of doing so is that ONNXRuntime
                // doesn't expose full type information via its C# APIs.
                var model = new OnnxCSharpToProtoWrapper.ModelProto();
                // If we own the model file set the DeleteOnClose flag so it is always deleted.
                if (ownModelFile)
                {
                    ModelStream = new FileStream(modelFile, FileMode.Open, FileAccess.Read, FileShare.Read, 4096, FileOptions.DeleteOnClose);
                }
                else
                {
                    ModelStream = new FileStream(modelFile, FileMode.Open, FileAccess.Read);
                }

                // The CodedInputStream auto closes the stream, and we need to make sure that our main stream stays open, so creating a new one here.
                using (var modelStream = new FileStream(modelFile, FileMode.Open, FileAccess.Read, FileShare.Delete | FileShare.Read))
                    using (var codedStream = Google.Protobuf.CodedInputStream.CreateWithLimits(modelStream, Int32.MaxValue, 100))
                        model = OnnxCSharpToProtoWrapper.ModelProto.Parser.ParseFrom(codedStream);

                // Parse actual input and output types stored in the loaded ONNX model to get their DataViewType's.
                var inputTypePool = new Dictionary <string, DataViewType>();
                foreach (var valueInfo in model.Graph.Input)
                {
                    inputTypePool[valueInfo.Name] = OnnxTypeParser.GetDataViewType(valueInfo.Type);
                }

                var initializerTypePool = new Dictionary <string, DataViewType>();
                foreach (var valueInfo in model.Graph.Initializer)
                {
                    initializerTypePool[valueInfo.Name] = OnnxTypeParser.GetScalarDataViewType(valueInfo.DataType);
                }

                var outputTypePool = new Dictionary <string, DataViewType>();
                // Build casters which maps NamedOnnxValue to .NET objects.
                var casterPool = new Dictionary <string, Func <NamedOnnxValue, object> >();
                foreach (var valueInfo in model.Graph.Output)
                {
                    outputTypePool[valueInfo.Name] = OnnxTypeParser.GetDataViewType(valueInfo.Type);
                    casterPool[valueInfo.Name]     = OnnxTypeParser.GetDataViewValueCasterAndResultedType(valueInfo.Type, out Type actualType);
                }

                var inputInfos  = GetOnnxVariablesFromMetadata(_session.InputMetadata, shapeDictionary, inputTypePool, null);
                var outputInfos = GetOnnxVariablesFromMetadata(_session.OutputMetadata, shapeDictionary, outputTypePool, casterPool);
                var overrideableInitializers = GetOnnxVariablesFromMetadata(_session.OverridableInitializerMetadata, shapeDictionary, inputTypePool, null);

                // Create a view to the used ONNX model from ONNXRuntime's perspective.
                ModelInfo = new OnnxModelInfo(inputInfos, outputInfos, overrideableInitializers);

                Graph = model.Graph;
            }
            catch
            {
                _session.Dispose();
                _session = null;
                throw;
            }
        }
示例#14
0
//===========================================================================================//

        public static async Task RecognitionAsync(IEnumerable <string> imags)
        {
            var images = imags.ToArray();

            tasks = new Task[images.Length];
            try
            {
                for (int i = 0; i < images.Count(); i++)
                {
                    tasks[i] = Task.Factory.StartNew((imagePath) =>
                    {
                        Image <Rgb24> image = Image.Load <Rgb24>((string)imagePath, out IImageFormat format);

                        Stream imageStream = new MemoryStream();
                        image.Mutate(x =>
                        {
                            x.Resize(new ResizeOptions
                            {
                                Size = new Size(224, 224),
                                Mode = ResizeMode.Crop
                            });
                        });
                        image.Save(imageStream, format);

                        Tensor <float> input = new DenseTensor <float>(new[] { 1, 3, 224, 224 });
                        var mean             = new[] { 0.485f, 0.456f, 0.406f };
                        var stddev           = new[] { 0.229f, 0.224f, 0.225f };
                        for (int y = 0; y < image.Height; y++)
                        {
                            Span <Rgb24> pixelSpan = image.GetPixelRowSpan(y);
                            for (int x = 0; x < image.Width; x++)
                            {
                                input[0, 0, y, x] = ((pixelSpan[x].R / 255f) - mean[0]) / stddev[0];
                                input[0, 1, y, x] = ((pixelSpan[x].G / 255f) - mean[1]) / stddev[1];
                                input[0, 2, y, x] = ((pixelSpan[x].B / 255f) - mean[2]) / stddev[2];
                            }
                        }

                        List <NamedOnnxValue> inputs = new List <NamedOnnxValue>
                        {
                            NamedOnnxValue.CreateFromTensor("data", input)
                        };

                        var session = new InferenceSession(onnxModelPath);
                        IDisposableReadOnlyCollection <DisposableNamedOnnxValue> results = session.Run(inputs);

                        if (token.IsCancellationRequested)
                        {
                            return;
                        }

                        IEnumerable <float> output = results.First().AsEnumerable <float>();
                        float sum = output.Sum(x => (float)Math.Exp(x));
                        IEnumerable <float> softmax = output.Select(x => (float)Math.Exp(x) / sum);

                        IEnumerable <Prediction> top1 = softmax.Select((x, i) => new Prediction {
                            Label = LabelMap.Labels[i], Confidence = x
                        })
                                                        .OrderByDescending(x => x.Confidence)
                                                        .Take(1);
                        Prediction prediction = top1.First();
                        prediction.Path       = (string)imagePath;

                        if (token.IsCancellationRequested)
                        {
                            return;
                        }

                        Result?.Invoke(prediction);

                        session.Dispose();
                        image.Dispose();
                        imageStream.Dispose();
                    }, images[i], token);
                }
                await Task.WhenAll(tasks);
            }
            catch (OperationCanceledException e)
            {
                Trace.WriteLine($"{nameof(OperationCanceledException)} thrown with message: {e.Message}");
            }
        }
示例#15
0
 /// <summary>
 /// Disposes YoloScorer instance.
 /// </summary>
 public void Dispose()
 {
     _inferenceSession.Dispose();
 }