public DataTable CreateTable(BatchJob batchJob) { var mappings = batchJob.Fields .Where(m => m.Index.HasValue || m.Default.HasValue) .ToList(); var targetTable = CreateTable(mappings); _logger.LogDebug("Processing Job: '{job}', Mapped Columns: {columns}", batchJob, targetTable.Columns.Count); var validator = _factory.ResolveValidator(batchJob.ValidatorType); validator?.Reset(); // skip first row, header for (var index = 1; index < batchJob.Data.Length; index++) { var sourceRow = batchJob.Data[index]; CopyRow(batchJob, sourceRow, targetTable, mappings, validator); } _logger.LogInformation("Processed {rows} rows from job: '{job}'", targetTable.Rows.Count, batchJob); return(targetTable); }
private DataTable LoadData(BatchJob batchJob) { if (batchJob == null) { throw new ArgumentNullException("batchJob"); } if (!File.Exists(batchJob.WorkingFile)) { throw new InvalidOperationException("The BatchJob working file '{0}' could not be found.".FormatWith(batchJob.WorkingFile)); } var mappings = batchJob.SourceMapping .Where(m => m.IsIncluded && (m.Index.HasValue || m.Default.HasValue)) .ToList(); var targetTable = CreateTargetTable(mappings); Logger.Debug() .Message("Processing working file: '{0}', Mapped Columns: {1}", batchJob.WorkingFile, targetTable.Columns.Count) .Write(); var reader = _factory.ResolveReader(batchJob.ReaderType); // read source data var sourceTable = reader.ReadData(batchJob.WorkingFile); var validator = _factory.ResolveValidator(batchJob.ValidatorType); if (validator != null) { validator.Reset(); } foreach (DataRow sourceRow in sourceTable.Rows) { CopyRow(batchJob, sourceRow, targetTable, mappings, validator); } Logger.Debug() .Message("Processed {0} rows from file: '{1}'", targetTable.Rows.Count, batchJob.WorkingFile) .Write(); return(targetTable); }