private async Task RunAsync() { var modelConfig = ModelConfig.Value; var cancellationToken = Lifetime.ApplicationStopped; var loading = new GraphLoading(modelConfig.ModelToUse); var catalog = new Catalog(modelConfig.LabelsToUse); await catalog.LoadAsync(cancellationToken); using (var resultBuilder = new ResultBuilder(modelConfig.OutputFileName, catalog)) using (var graph = await loading.LoadGraphAsync(cancellationToken)) using (var session = new TFSession(graph)) { var batch = new List <string>(modelConfig.BatchSize); foreach (var imageDirectory in modelConfig.ImageDirectories) { foreach (var inputFile in Directory.EnumerateFiles(imageDirectory, "*.jpg", SearchOption.AllDirectories)) { cancellationToken.ThrowIfCancellationRequested(); batch.Add(Path.Combine(imageDirectory, inputFile)); if (batch.Count == batch.Capacity) { await ProcessBatch(resultBuilder, graph, session, batch, cancellationToken); batch.Clear(); } } } } Logger.LogInformation("All images processed"); Lifetime.StopApplication(); }
private static async Task RunAsync(string[] args, CancellationToken ct) { var loading = new GraphLoading(Path.Combine(ModelDirectory, "frozen_inference_graph.pb")); var catalog = new Catalog(CatalogFile); await catalog.LoadAsync(ct); var graph = await loading.LoadGraphAsync(ct); using (var session = new TFSession(graph)) using (var input = new InputData(ImageSize, BatchSize)) { foreach (var directoryName in args) { var imageFiles = Directory.EnumerateFiles(directoryName, "*.jpg", SearchOption.AllDirectories); var batch = new List <string>(BatchSize); foreach (var imageFile in imageFiles) { batch.Add(Path.Combine(directoryName, imageFile)); if (batch.Count == BatchSize) { await TryProcessImage(catalog, graph, session, input, batch, ct); batch.Clear(); } } if (batch.Count > 0) { await TryProcessImage(catalog, graph, session, input, batch, ct); } } } }