public static IDataTable FromDataReader( IDataReader reader, int skip = 0, int take = int.MaxValue) { Guard.NotNull(reader, nameof(reader)); if (reader.IsClosed) { throw new ArgumentException("This operation is invalid when the reader is closed.", nameof(reader)); } var columns = new List <IDataColumn>(reader.FieldCount); var data = new List <object[]>(); var schema = reader.GetSchemaTable(); var nameCol = schema.Columns[SchemaTableColumn.ColumnName]; var typeCol = schema.Columns[SchemaTableColumn.DataType]; foreach (DataRow schemaRow in schema.Rows) { var column = new LightweightDataColumn((string)schemaRow[nameCol], (Type)schemaRow[typeCol]); columns.Add(column); } var fieldCount = reader.FieldCount; var i = -1; take = Math.Min(take, int.MaxValue - skip); while (reader.Read()) { i++; if (skip > i) { continue; } if (i >= skip + take) { break; } var values = new object[fieldCount]; reader.GetValues(values); data.Add(values); } var table = new LightweightDataTable(columns, data); return(table); }
public async Task ImportAsync(DataImportRequest request, CancellationToken cancelToken = default) { Guard.NotNull(request, nameof(request)); Guard.NotNull(cancelToken, nameof(cancelToken)); var profile = await _services.DbContext.ImportProfiles.FindByIdAsync(request.ProfileId, false, cancelToken); if (!(profile?.Enabled ?? false)) { return; } var(ctx, logFile) = await CreateImporterContext(request, profile, cancelToken); using var logger = new TraceLogger(logFile, false); ctx.Log = ctx.ExecuteContext.Log = logger; try { if (!request.HasPermission && !await HasPermission()) { throw new SmartException("You do not have permission to perform the selected import."); } var context = ctx.ExecuteContext; var files = await _importProfileService.GetImportFilesAsync(profile, profile.ImportRelatedData); var fileGroups = files.ToMultimap(x => x.RelatedType?.ToString() ?? string.Empty, x => x); logger.Info(CreateLogHeader(profile, fileGroups)); await _services.EventPublisher.PublishAsync(new ImportExecutingEvent(context), cancelToken); foreach (var fileGroup in fileGroups) { context.Result = ctx.Results[fileGroup.Key] = new(); foreach (var file in fileGroup.Value) { if (context.Abort == DataExchangeAbortion.Hard) { break; } if (!file.File.Exists) { throw new SmartException($"File does not exist {file.File.SubPath}."); } try { var csvConfiguration = file.IsCsv ? (new CsvConfigurationConverter().ConvertFrom <CsvConfiguration>(profile.FileTypeConfiguration) ?? CsvConfiguration.ExcelFriendlyConfiguration) : CsvConfiguration.ExcelFriendlyConfiguration; using var stream = file.File.OpenRead(); context.File = file; context.ColumnMap = file.RelatedType.HasValue ? new ColumnMap() : ctx.ColumnMap; context.DataTable = LightweightDataTable.FromFile( file.File.Name, stream, stream.Length, csvConfiguration, profile.Skip, profile.Take > 0 ? profile.Take : int.MaxValue); var segmenter = new ImportDataSegmenter(context.DataTable, context.ColumnMap); context.DataSegmenter = segmenter; context.Result.TotalRecords = segmenter.TotalRows; while (context.Abort == DataExchangeAbortion.None && segmenter.ReadNextBatch()) { using var batchScope = _scopeAccessor.LifetimeScope.BeginLifetimeScope(); // It would be nice if we could make all dependencies use our TraceLogger. var importerFactory = batchScope.Resolve <Func <ImportEntityType, IEntityImporter> >(); var importer = importerFactory(profile.EntityType); await importer.ExecuteAsync(context, cancelToken); } } catch (Exception ex) { context.Abort = DataExchangeAbortion.Hard; context.Result.AddError(ex, $"The importer failed: {ex.ToAllMessages()}."); } finally { context.Result.EndDateUtc = DateTime.UtcNow; if (context.IsMaxFailures) { context.Result.AddWarning("Import aborted. The maximum number of failures has been reached."); } if (ctx.CancelToken.IsCancellationRequested) { context.Result.AddWarning("Import aborted. A cancellation has been requested."); } } } } } catch (Exception ex) { logger.ErrorsAll(ex); } finally { await Finalize(ctx); } cancelToken.ThrowIfCancellationRequested(); }