コード例 #1
0
        private void LoadModel()
        {
            var onnxModel      = "TinyYolo2_model.onnx";
            var modelDirectory = Path.Combine(Environment.CurrentDirectory, @"ML\OnnxModel");
            var onnxPath       = Path.Combine(modelDirectory, onnxModel);

            var onnxModelConfigurator = new OnnxModelConfigurator(onnxPath);

            predictionEngine = onnxModelConfigurator.GetMlNetPredictionEngine();
        }
コード例 #2
0
        private void LoadModel()
        {
            // Check for an Onnx model exported from Custom Vision
            var customVisionExport = Directory.GetFiles(modelsDirectory, "*.zip").FirstOrDefault();

            // If there is one, use it.
            if (customVisionExport != null)
            {
                var customVisionModel = new CustomVisionModel(customVisionExport);
                var modelConfigurator = new OnnxModelConfigurator(customVisionModel);

                outputParser = new OnnxOutputParser(customVisionModel);
                customVisionPredictionEngine = modelConfigurator.GetMlNetPredictionEngine <CustomVisionPrediction>();
            }
            else // Otherwise default to Tiny Yolo Onnx model
            {
                var tinyYoloModel     = new TinyYoloModel(Path.Combine(modelsDirectory, "TinyYolo2_model.onnx"));
                var modelConfigurator = new OnnxModelConfigurator(tinyYoloModel);

                outputParser             = new OnnxOutputParser(tinyYoloModel);
                tinyYoloPredictionEngine = modelConfigurator.GetMlNetPredictionEngine <TinyYoloPrediction>();
            }
        }