protected virtual void ImportDataReaderInternal(DbDataReader reader, string destTable, bool truncateDestTable, IEnumerable <SqlBulkCopyColumnMapping> columnMapping, int batchSize) { OnImportStarting(new DataImportStartingArgs() { TableName = destTable }); using (var bulk = new SqlBulkCopy(cs)) { bulk.BulkCopyTimeout = 0; bulk.DestinationTableName = destTable; bulk.NotifyAfter = batchSize; bulk.SqlRowsCopied += (s, e) => OnImportProgress(new DataImportProgressArgs() { TableName = destTable, RowsImported = batchSize, TotalRowsImported = (int)e.RowsCopied }); if (columnMapping != null) { foreach (var col in columnMapping) { bulk.ColumnMappings.Add(col.SourceColumn, col.DestinationColumn); } } try { if (truncateDestTable) { Truncate(bulk.DestinationTableName); } bulk.WriteToServer(reader); var rowsCopied = bulk.GetRowsCopied(); OnImportCompleted(new DataImportCompleteArgs() { TotalRowsImported = rowsCopied, TableName = destTable }); } catch (Exception ex) { OnError(new DataImportErrorAgrs() { TableName = destTable, Error = ex, ImportStarted = importStart, ImportCompleted = importStop }); } } }
public async Task <long> InsertData(TableName tableName, IDataReader dataReader) { await CheckTablesCompatibility(tableName, dataReader); var sw = new Stopwatch(); sw.Start(); long processed = 0; using (var bulkCopy = new SqlBulkCopy(_connection) { BatchSize = 1000000, BulkCopyTimeout = Timeout, NotifyAfter = 10000, DestinationTableName = tableName.ToString() }) { for (var i = 0; i < dataReader.FieldCount; i++) { var column = dataReader.GetName(i); bulkCopy.ColumnMappings.Add(new SqlBulkCopyColumnMapping(column, column)); } bulkCopy.SqlRowsCopied += (sender, args) => { Log.Info($"Records processed: {args.RowsCopied:#########}, table name {tableName}"); var handler = Progress; handler?.Invoke(this, new ProgressEventArgs(args.RowsCopied, "Copying data...", tableName, sw.Elapsed)); }; await bulkCopy.WriteToServerAsync(dataReader); processed = bulkCopy.GetRowsCopied(); } return(processed); }