public static void Main() { try { //using var bitmap = (System.Drawing.Bitmap)System.Drawing.Image.FromFile(ImageName); using var image = Image.Load <Rgb24>(ImageName); Console.WriteLine($"image: {image}"); if (image.TryGetSinglePixelSpan(out var pixelSpan)) { DumpVersion(); using var core = new InferenceEngineCore(); DumpCoreInformation(core); using var network = new InferenceEngineNetwork(core, NetworkName); DumpNetwork(network); var mainInputName = network.GetInputName(0); var mainOutputName = network.GetOutputName(0); network.SetInputResizeAlgorithm(mainInputName, resize_alg_e.RESIZE_BILINEAR); network.SetInputLayout(mainInputName, layout_e.NCHW); network.SetInputPrecision(mainInputName, precision_e.U8); Console.WriteLine("Create executable network"); using var executableNetwork = new InferenceEngineExecutableNetwork(network, "GPU"); Console.WriteLine("Create request"); using var request = new InferenceEngineRequest(executableNetwork); var imageDimensions = new dimensions_t(1, 3, image.Height, image.Width); var tensorDescription = new tensor_desc_t(layout_e.NHWC, imageDimensions, precision_e.U8); Console.WriteLine("Create blob"); using var inputBlob = new Blob(tensorDescription, MemoryMarshal.Cast <Rgb24, byte>(pixelSpan)); request.SetBlob(mainInputName, inputBlob); for (var i = 0; i < 10; i++) { Console.WriteLine($"Infer {i}"); request.Infer(); Console.WriteLine($"Infer {i} done"); } using var outputBlob = request.GetBlob(mainOutputName); Console.WriteLine($"Output blob. Sizes = {outputBlob.Size} {outputBlob.ByteSize}. [{outputBlob.Layout} {outputBlob.Precision}] {outputBlob.Dimensions}"); } } catch (Exception ex) { Console.Error.WriteLine($"Error: {ex.Message}"); } }
private void StartRequest(State state) { var executableNetwork = _executableNetwork ?? throw new InvalidOperationException("Detector needs to be initialised"); if (state.ActiveRequest != null) { throw new InvalidOperationException("State already has a request"); } var network = _network; var mainInputName = network.GetInputName(0); var request = new InferenceEngineRequest(executableNetwork); using var blob = ConvertAndInitialise(state.Buffer, state); request.SetBlob(mainInputName, blob); state.ActiveRequest = request; request.StartInfer(); }