/// <summary> /// Initializes a new instance of the <see cref="TinyYoloV2OnnxModelRunner"/> class. /// </summary> /// <param name="pipeline">The pipeline to add the component to.</param> /// <param name="modelFileName">The name of the model.</param> /// <param name="gpuDeviceId">The GPU device ID to run execution on, or null to run on CPU.</param> /// <remarks> /// To run on a GPU, use the Microsoft.Psi.Onnx.ModelRunners.Gpu library instead of Microsoft.Psi.Onnx.ModelRunners.Cpu, and set /// the value of the <pararef name="gpuDeviceId"/> parameter to a valid non-negative integer. Typical device ID values are 0 or 1. /// </remarks> public TinyYoloV2OnnxModelRunner(Pipeline pipeline, string modelFileName, int?gpuDeviceId = null) : base(pipeline) { // create an ONNX model, with a configuration that matches the structure // of the Tiny Yolo V2 model this.onnxModel = new OnnxModel(new OnnxModelConfiguration() { ModelFileName = modelFileName, InputVectorName = "image", InputVectorSize = 3 * 416 * 416, OutputVectorName = "grid", GpuDeviceId = gpuDeviceId, }); }
/// <summary> /// Initializes a new instance of the <see cref="ImageNetModelRunner"/> class. /// </summary> /// <param name="pipeline">The pipeline to add the component to.</param> /// <param name="configuration">The configuration for the compoinent.</param> /// <remarks> /// To run on a GPU, use the Microsoft.Psi.Onnx.ModelRunners.Gpu library instead of Microsoft.Psi.Onnx.ModelRunners.Cpu, and set /// the value of the <pararef name="gpuDeviceId"/> parameter to a valid non-negative integer. Typical device ID values are 0 or 1. /// </remarks> public ImageNetModelRunner(Pipeline pipeline, ImageNetModelRunnerConfiguration configuration) : base(pipeline) { // create an ONNX model based on the supplied ImageNet model runner configuration this.onnxModel = new OnnxModel(new OnnxModelConfiguration() { ModelFileName = configuration.ModelFilePath, InputVectorName = configuration.InputVectorName, InputVectorSize = 3 * 224 * 224, OutputVectorName = configuration.OutputVectorName, GpuDeviceId = configuration.GpuDeviceId, }); this.outputParser = new ImageNetModelOutputParser(configuration.ImageClassesFilePath, configuration.NumberOfPredictions, configuration.ApplySoftmaxToModelOutput); }
/// <summary> /// Initializes a new instance of the <see cref="OnnxModelRunner"/> class, based on a given configuration. /// </summary> /// <param name="pipeline">The pipeline to add the component to.</param> /// <param name="configuration">The component configuration.</param> /// <param name="name">An optional name for the component.</param> /// <remarks>The configuration parameter specifies the model filename, the /// name of the input and output vectors in that ONNX model, as well as /// the input vector size.</remarks> public OnnxModelRunner(Pipeline pipeline, OnnxModelConfiguration configuration, string name = nameof(OnnxModelRunner)) : base(pipeline, name) { this.inputVectorSize = configuration.InputVectorSize; this.onnxModel = new OnnxModel(configuration); }
/// <summary> /// Initializes a new instance of the <see cref="OnnxModelRunner"/> class, based on a given configuration. /// </summary> /// <param name="pipeline">The pipeline to add the component to.</param> /// <param name="configuration">The component configuration.</param> /// <remarks>The configuration parameter specifies the model filename, the /// name of the input and output vectors in that ONNX model, as well as /// the input vector size.</remarks> public OnnxModelRunner(Pipeline pipeline, OnnxModelConfiguration configuration) : base(pipeline) { this.inputVectorSize = configuration.InputVectorSize; this.onnxModel = new OnnxModel(configuration); }