private async Task BulkCopyExecuteAsync(IDataReader sourceDataReader, SqlConnection targetConnection, string targetTableName, IEnumerable<ColumnsMapping> columnsMappings, Action<long> rowsCopiedNotify) { using (var bulkCopy = new SqlBulkCopy(targetConnection)) { if (columnsMappings != null) { var bulkMappings = CreateBulkCopyMappings(columnsMappings); bulkCopy.AddMappings(bulkMappings); } bulkCopy.DestinationTableName = targetTableName; bulkCopy.BatchSize = _batchSize; bulkCopy.NotifyAfter = _notifyAfter; bulkCopy.BulkCopyTimeout = _bulkCopyTimeout; if (rowsCopiedNotify != null) { bulkCopy.SqlRowsCopied += (sender, e) => rowsCopiedNotify(e.RowsCopied); } await bulkCopy.WriteToServerAsync(sourceDataReader); } }
public async Task DeleteDataAsync(IDataReader dataReader, string targetTableName, string targetConnectionString) { //dataReader. //var tempTableName = "#Temp" + targetTableName; //var createTempTableCommnadText = "create table #MyTempTable(Id int, SomeColumn varchar(50))" //using (var connection = DbCommonHelper.CreateDbConnection( // PROVIDER_NAME, targetConnectionString)) //{ // using (var createTempTableCommand = DbCommonHelper.CreateDbConnection) //} //SqlConnection conn = new SqlConnection("your connection string here"); //SqlCommand cmd = new SqlCommand("create table #MyTempTable(Id int, SomeColumn varchar(50))", conn); //conn.Open(); //cmd.ExecuteNonQuery(); //SqlBulkCopy bulkCopy = new SqlBulkCopy(conn); //bulkCopy.DestinationTableName = "#MyTempTable"; //bulkCopy.WriteToServer(dt); //conn.Close(); var tempTableName = "#Temp" + targetTableName; var createTempTableCommnadText = "create table " + tempTableName + "(EAN13 varchar(50))"; var deleteDataCommandText = "delete from " + targetTableName + " where EAN13 in (select EAN13 from " + tempTableName + ");"; using (var connection = DbCommonHelper.CreateDbConnection( PROVIDER_NAME, targetConnectionString)) { await connection.OpenAsync(); using (var createTempTableCommand = DbCommonHelper.CreateCommand( createTempTableCommnadText, connection)) { await createTempTableCommand.ExecuteNonQueryAsync(); } using (var bulkCopy = new SqlBulkCopy((SqlConnection)connection)) { bulkCopy.ColumnMappings.Add(new SqlBulkCopyColumnMapping("EAN13", "EAN13")); bulkCopy.DestinationTableName = tempTableName; bulkCopy.BulkCopyTimeout = 1800; await bulkCopy.WriteToServerAsync(dataReader); } using (var deleteDataCommand = DbCommonHelper.CreateCommand( deleteDataCommandText, connection)) { await deleteDataCommand.ExecuteNonQueryAsync(); } } }
private async Task WriteToDatabaseAsync(DataTable dataTable) { using (var connection = new SqlConnection(ConnectionString)) { var bulkCopy = new SqlCopy( connection, SqlBulkCopyOptions.TableLock | SqlBulkCopyOptions.FireTriggers | SqlBulkCopyOptions.UseInternalTransaction, null); bulkCopy.DestinationTableName = Uploader.TableName; connection.Open(); await bulkCopy.WriteToServerAsync(dataTable); connection.Close(); } }
public async Task BulkLoadInTransactionAsync(DataTable dataTable, string tableNameAndSchema, SqlConnection connection, SqlTransaction transaction) { using (var bulkCopy = new SqlBulkCopy(connection, SqlBulkCopyOptions.Default, transaction)) { foreach (DataColumn column in dataTable.Columns) bulkCopy.ColumnMappings.Add(column.ColumnName, column.ColumnName); bulkCopy.DestinationTableName = tableNameAndSchema; bulkCopy.BatchSize = _configuration.BatchWriteSize; try { await bulkCopy.WriteToServerAsync(dataTable); } catch (SqlException ex) { if (ex.Message.Contains("Received an invalid column length from the bcp client for colid")) { string pattern = @"\d+"; Match match = Regex.Match(ex.Message.ToString(), pattern); var index = Convert.ToInt32(match.Value) - 1; FieldInfo fi = typeof(SqlBulkCopy).GetField("_sortedColumnMappings", BindingFlags.NonPublic | BindingFlags.Instance); var sortedColumns = fi.GetValue(bulkCopy); var items = (Object[])sortedColumns.GetType().GetField("_items", BindingFlags.NonPublic | BindingFlags.Instance).GetValue(sortedColumns); FieldInfo itemdata = items[1].GetType().GetField("_metadata", BindingFlags.NonPublic | BindingFlags.Instance); var metadata = itemdata.GetValue(items[1]); var column = metadata.GetType().GetField("column", BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance).GetValue(metadata); var length = metadata.GetType().GetField("length", BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance).GetValue(metadata); throw new DataException(String.Format("Column: {0} contains data with a length greater than: {1}", column, length)); } throw; } } }
/// <summary> /// Emit a batch of log events, running asynchronously. /// </summary> /// <param name="events">The events to emit.</param> /// <remarks> /// Override either <see cref="PeriodicBatchingSink.EmitBatch" /> or <see cref="PeriodicBatchingSink.EmitBatchAsync" /> /// , /// not both. /// </remarks> protected override async Task EmitBatchAsync(IEnumerable<LogEvent> events) { // Copy the events to the data table FillDataTable(events); using (var cn = new SqlConnection(_connectionString)) { await cn.OpenAsync(_token.Token); using (var copy = new SqlBulkCopy(cn)) { copy.DestinationTableName = _tableName; await copy.WriteToServerAsync(_eventsTable, _token.Token); // Processed the items, clear for the next run _eventsTable.Clear(); } } }
/// <summary> /// Pushes the Data to SQL /// </summary> /// <param name="sqlBulkCopy">Sql Bulk Copy object to which the Datas to be pushed</param> /// <param name="dataTable">Datatable containing the Data to be pushed using SQL Bulk Copy</param> /// <returns></returns> private async Task InsertDataTable(SqlBulkCopy sqlBulkCopy, DataTable dataTable) { await sqlBulkCopy.WriteToServerAsync(dataTable); dataTable.Rows.Clear(); }
public Task<AsyncTaskResult> BatchAppendAsync(IEnumerable<DomainEventStream> eventStreams) { var table = BuildEventTable(); foreach (var eventStream in eventStreams) { AddDataRow(table, eventStream); } return _ioHelper.TryIOFuncAsync<AsyncTaskResult>(async () => { try { using (var connection = GetConnection()) { await connection.OpenAsync(); var transaction = await Task.Run<SqlTransaction>(() => connection.BeginTransaction()); using (var copy = new SqlBulkCopy(connection, SqlBulkCopyOptions.Default, transaction)) { copy.BatchSize = _bulkCopyBatchSize; copy.BulkCopyTimeout = _bulkCopyTimeout; copy.DestinationTableName = _tableName; copy.ColumnMappings.Add("AggregateRootId", "AggregateRootId"); copy.ColumnMappings.Add("AggregateRootTypeName", "AggregateRootTypeName"); copy.ColumnMappings.Add("CommandId", "CommandId"); copy.ColumnMappings.Add("Version", "Version"); copy.ColumnMappings.Add("CreatedOn", "CreatedOn"); copy.ColumnMappings.Add("Events", "Events"); try { await copy.WriteToServerAsync(table); await Task.Run(() => transaction.Commit()); return AsyncTaskResult.Success; } catch (Exception ex) { try { transaction.Rollback(); } catch { } return new AsyncTaskResult(AsyncTaskStatus.Failed, ex.Message); } } } } catch (SqlException ex) { _logger.Error("Batch append event has sql exception.", ex); return new AsyncTaskResult(AsyncTaskStatus.IOException, ex.Message); } catch (Exception ex) { _logger.Error("Batch append event has unknown exception.", ex); return new AsyncTaskResult(AsyncTaskStatus.Failed, ex.Message); } }, "BatchAppendEventsAsync"); }
/// <summary> /// Emit a batch of log events, running asynchronously. /// </summary> /// <param name="events">The events to emit.</param> /// <remarks> /// Override either <see cref="PeriodicBatchingSink.EmitBatch" /> or <see cref="PeriodicBatchingSink.EmitBatchAsync" /> /// , /// not both. /// </remarks> protected override async Task EmitBatchAsync(IEnumerable<LogEvent> events) { // Copy the events to the data table FillDataTable(events); try { using (var cn = new SqlConnection(_connectionString)) { await cn.OpenAsync(_token.Token).ConfigureAwait(false); using (var copy = new SqlBulkCopy(cn)) { copy.DestinationTableName = _tableName; foreach (var column in _eventsTable.Columns) { var columnName = ((DataColumn)column).ColumnName; var mapping = new SqlBulkCopyColumnMapping(columnName, columnName); copy.ColumnMappings.Add(mapping); } await copy.WriteToServerAsync(_eventsTable, _token.Token).ConfigureAwait(false); } } } catch (Exception ex) { SelfLog.WriteLine("Unable to write {0} log events to the database due to following error: {1}", events.Count(), ex.Message); } finally { // Processed the items, clear for the next run _eventsTable.Clear(); } }
public async Task WriteToServerAsync(CancellationToken cancellationToken) { string[] sourceFields = fields.Length > 0 ? fields.ToArray() : reader.GetColumnNames().ToArray(); if (bulkOptions.CreateTable) { await CreateTableAsync(sourceFields); } Mapping[] map; if (bulkOptions.FieldsSelector == FieldsSelector.Source && !bulkOptions.CaseSensitive.HasValue) { map = sourceFields.Select(x => new Mapping() { Source = x, Destination = x }).ToArray(); } else { string[] destFields = (await GetTableColumnsAsync(cancellationToken).ConfigureAwait(false)).ToArray(); map = bulkOptions.CaseSensitive.HasValue && bulkOptions.CaseSensitive.Value ? sourceFields.Join(destFields, x => x, x => x, (x, y) => new Mapping() { Source = x, Destination = y }).ToArray() : sourceFields.Join(destFields, x => x.ToLowerInvariant(), x => x.ToLowerInvariant(), (x, y) => new Mapping() { Source = x, Destination = y }).ToArray(); Helpers.CheckFieldSelection(bulkOptions.FieldsSelector, sourceFields.Length, destFields.Length, map.Length); } if (map.Length > 0) { using (SqlBulkCopy bcp = new SqlBulkCopy(connection, bulkOptions.SqlBulkCopyOptions, transaction)) { bcp.DestinationTableName = destinationTable; if (bulkOptions.BatchSize.HasValue) { bcp.BatchSize = bulkOptions.BatchSize.Value; } if (bulkOptions.BulkCopyTimeout.HasValue) { bcp.BulkCopyTimeout = bulkOptions.BulkCopyTimeout.Value; } if (bulkOptions.EnableStreaming.HasValue) { bcp.EnableStreaming = bulkOptions.EnableStreaming.Value; } foreach (var item in map) { bcp.ColumnMappings.Add(item.Source, item.Destination); } await bcp.WriteToServerAsync(reader, cancellationToken).ConfigureAwait(false); } } }
/// <summary> /// Emit a batch of log events, running asynchronously. /// </summary> /// <param name="events">The events to emit.</param> /// <remarks> /// Override either <see cref="PeriodicBatchingSink.EmitBatch" /> or <see cref="PeriodicBatchingSink.EmitBatchAsync" /> /// , /// not both. /// </remarks> protected override async Task EmitBatchAsync(IEnumerable<LogEvent> events) { // Copy the events to the data table FillDataTable(events); using (var cn = new SqlConnection(_connectionString)) { await cn.OpenAsync(_token.Token); using (var copy = new SqlBulkCopy(cn)) { copy.DestinationTableName = _tableName; try { await copy.WriteToServerAsync(_eventsTable, _token.Token); } catch (Exception ex) { // create LogEvent so exception can be added to DB during next batch run. var logEvent = new LogEvent( DateTimeOffset.Now, LogEventLevel.Error, ex, new MessageTemplate("", new[] {new TextToken("Internal error while trying to write events to database")}), Enumerable.Empty<LogEventProperty>()); Emit(logEvent); } finally { // Processed the items, clear for the next run _eventsTable.Clear(); } } } }
public Task<AsyncTaskResult<EventAppendResult>> BatchAppendAsync(IEnumerable<DomainEventStream> eventStreams) { if (eventStreams.Count() == 0) { throw new ArgumentException("Event streams cannot be empty."); } var table = BuildEventTable(); var aggregateRootIds = eventStreams.Select(x => x.AggregateRootId).Distinct(); if (aggregateRootIds.Count() > 1) { throw new ArgumentException("Batch append event only support for one aggregate."); } var aggregateRootId = aggregateRootIds.Single(); foreach (var eventStream in eventStreams) { AddDataRow(table, eventStream); } return _ioHelper.TryIOFuncAsync(async () => { try { using (var connection = GetConnection()) { await connection.OpenAsync(); var transaction = await Task.Run<SqlTransaction>(() => connection.BeginTransaction()); using (var copy = new SqlBulkCopy(connection, SqlBulkCopyOptions.Default, transaction)) { InitializeSqlBulkCopy(copy, aggregateRootId); try { await copy.WriteToServerAsync(table); await Task.Run(() => transaction.Commit()); return new AsyncTaskResult<EventAppendResult>(AsyncTaskStatus.Success, EventAppendResult.Success); } catch { try { transaction.Rollback(); } catch (Exception ex) { _logger.ErrorFormat("Transaction rollback failed.", ex); } throw; } } } } catch (SqlException ex) { if (ex.Number == 2601 && ex.Message.Contains(_versionIndexName)) { return new AsyncTaskResult<EventAppendResult>(AsyncTaskStatus.Success, EventAppendResult.DuplicateEvent); } else if (ex.Number == 2601 && ex.Message.Contains(_commandIndexName)) { return new AsyncTaskResult<EventAppendResult>(AsyncTaskStatus.Success, EventAppendResult.DuplicateCommand); } _logger.Error("Batch append event has sql exception.", ex); return new AsyncTaskResult<EventAppendResult>(AsyncTaskStatus.IOException, ex.Message, EventAppendResult.Failed); } catch (Exception ex) { _logger.Error("Batch append event has unknown exception.", ex); return new AsyncTaskResult<EventAppendResult>(AsyncTaskStatus.Failed, ex.Message, EventAppendResult.Failed); } }, "BatchAppendEventsAsync"); }
private static void BulkyCopyTables() { foreach (var table in GetWorbookTablesList(sourceConnection)) { //Console.WriteLine("Carregar tabela " + table + "(s/n)"); //if (Console.ReadLine() == "n") continue; if (!regex.IsMatch(table)) continue; Console.WriteLine("Tabela: " + table); if (lembrar == "n") { Console.WriteLine("Digite o nome para tabela temporaria destino: "); _destinationTable = Console.ReadLine(); Console.WriteLine("Deseja usar esta tabela para o resto da operação: "); lembrar = Console.ReadLine(); } //if (Console.ReadLine() == null || Console.ReadLine() == "") //{ // Console.WriteLine("Não vou insistir: "); // _destinationTable = Console.ReadLine(); //} var dataTable = new DataTable(table); string query = "SELECT * FROM [" + table + "]"; using (var adapter = new OleDbDataAdapter(query, sourceConnection)) { adapter.Fill(dataTable); } using (var copy = new SqlBulkCopy(destinyConnection)) { copy.DestinationTableName = _destinationTable; copy.BulkCopyTimeout = 0; //copy.BatchSize = 2000; VinculaColumnTable(dataTable); DestinyTableHelper(dataTable); BulkCopyMapping(copy, dataTable); //gerando select var selectCommand = GenerateSelectCommand(dataTable); Console.WriteLine("Inicido do bulk copy da tabela: " + dataTable.TableName); copy.SqlRowsCopied += (o, s) => { Console.Write("\rQtde. linhas copiadas {0} do total de {1}", +s.RowsCopied, dataTable.Rows.Count); }; copy.NotifyAfter = 2; destinyConnection.Open(); //copy.WriteToServer(dataTable); var task = copy.WriteToServerAsync(dataTable); task.Wait(); Console.WriteLine(""); destinyConnection.Close(); dataTable.Clear(); dataTable.Dispose(); } } }
private async void BtnPassData_Click(object sender, RoutedEventArgs e) { try { #region validaciones if (dt_compare.Rows.Count <= 0) { MessageBox.Show("empty comparison table", "alert", MessageBoxButton.OK, MessageBoxImage.Exclamation); return; } bool ischeck = false; List <string> stringc = new List <string>(); foreach (DataRow item in dt_compare.Rows) { if (Convert.ToBoolean(item["CHECK"])) { ischeck = true; string colm_fox = item["COLUMN_NAME"].ToString(); string colm_sql = item["COLUMN_NAME_SQL"].ToString(); if (!String.Equals(colm_fox, colm_sql)) { stringc.Add(colm_fox + "-" + colm_sql); } } } ; if (!ischeck) { MessageBox.Show("confirm at least one column", "alert", MessageBoxButton.OK, MessageBoxImage.Exclamation); return; } if (stringc.Count > 0) { string concat = "selected columns do not match uppercase and lowercase " + Environment.NewLine; foreach (var item in stringc) { concat += item + Environment.NewLine; } MessageBox.Show(concat, "alert", MessageBoxButton.OK, MessageBoxImage.Exclamation); return; } #endregion if (MessageBox.Show("wants to pass the information ?", "Alerta", MessageBoxButton.YesNo, MessageBoxImage.Information) == MessageBoxResult.Yes) { string where = ""; if (Convert.ToBoolean(BtnWhere.IsChecked)) { AddWhere ww = new AddWhere(); ww.ShowInTaskbar = false; ww.table = CbTableFox.SelectedValue.ToString(); ww.Owner = Application.Current.MainWindow; ww.WindowStartupLocation = WindowStartupLocation.CenterScreen; ww.ShowDialog(); where = ww.where; } string table_fox = CbTableFox.SelectedValue.ToString(); string table_sql = CbTableSql.SelectedValue.ToString(); string connsql = TxPathSqlServer.Text; List <ListColumn> list_col = new List <ListColumn>(); foreach (DataRow item in dt_compare.Rows) { bool flag = Convert.ToBoolean(item["CHECK"]); bool cast = Convert.ToBoolean(item["CAST"]); bool rtrim = Convert.ToBoolean(item["RTRIM"]); if (flag) { ListColumn lc = new ListColumn(); lc.column = item["COLUMN_NAME"].ToString().Trim(); if (cast && !rtrim) { string clm = item["COLUMN_NAME"].ToString().Trim(); string tipo = item["TYPE_SQL"].ToString().Trim(); string np = item["NUMERIC_PRECISION_SQL"].ToString().Trim(); string ns = item["NUMERIC_SCALE_SQL"].ToString().Trim(); string cast_column = "cast(" + clm + " as " + tipo + "(" + np + "," + ns + ")) as " + clm; lc.column_convert = cast_column.Trim(); list_col.Add(lc); } if (!cast && rtrim) { string clm = item["COLUMN_NAME"].ToString().Trim(); string tipo = item["TYPE_SQL"].ToString().Trim().ToLower(); bool iftext = tipo == "char" || tipo == "varchar" ? true : false; string cast_column = iftext ? "RTRIM(" + clm + ") as " + clm + "" : clm; lc.column_convert = cast_column.Trim(); list_col.Add(lc); } if (!cast && !rtrim) { string clm = item["COLUMN_NAME"].ToString().Trim(); lc.column_convert = clm; list_col.Add(lc); } } ; } ; string cab_colm_parm = String.Join(",", list_col.Select(x => x.column_convert).ToArray()); string query = "select " + cab_colm_parm + " from " + table_fox + " " + where; string root = TxPathFoxPro.Text; GridMain.IsEnabled = false; BusyIndicator.IsIndeterminate = true; TxLoad.Visibility = Visibility.Visible; CancellationTokenSource source = new CancellationTokenSource(); var slowTask = Task <OleDbDataReader> .Factory.StartNew(() => SelectDBFDR(query, root), source.Token); //var slowTask = Task<DataTable>.Factory.StartNew(() => SelectDBFDT(query, root), source.Token); //GRidPrueba.ItemsSource = data.DefaultView; await slowTask; if (slowTask.IsCompleted) { OleDbDataReader data = ((OleDbDataReader)slowTask.Result); using (System.Data.SqlClient.SqlBulkCopy bc = new System.Data.SqlClient.SqlBulkCopy(connsql)) { bc.BulkCopyTimeout = 0; bc.DestinationTableName = table_sql; foreach (var item in list_col) { bc.ColumnMappings.Add(item.column.Trim(), item.column.Trim()); } var t = bc.WriteToServerAsync(data); await t; if (t.IsCompleted) { MessageBox.Show("successful data transfer", "alert", MessageBoxButton.OK, MessageBoxImage.Information); GridMain.IsEnabled = true; BusyIndicator.IsIndeterminate = false; TxLoad.Visibility = Visibility.Hidden; } } GridMain.IsEnabled = true; BusyIndicator.IsIndeterminate = false; TxLoad.Visibility = Visibility.Hidden; } GridMain.IsEnabled = true; BusyIndicator.IsIndeterminate = false; TxLoad.Visibility = Visibility.Hidden; CheckAll.IsChecked = false; } } catch (Exception w) { MessageBox.Show("error pass data:" + w, "alert", MessageBoxButton.OK, MessageBoxImage.Error); GridMain.IsEnabled = true; BusyIndicator.IsIndeterminate = false; TxLoad.Visibility = Visibility.Hidden; } }
private async Task<DatabaseStatusViewModel> ProcessDataFile(string filePath) { var startTime = DateTime.Now; if (filePath == null || !File.Exists(filePath)) return new DatabaseStatusViewModel() { Message = "File does not exist or No file was uploaded" }; var status = new DatabaseStatusViewModel() { Success = false, RecordsInFile = 0, RecordsLoaded = 0, }; var config = new CsvConfiguration() { IsHeaderCaseSensitive = false, WillThrowOnMissingField = false, IgnoreReadingExceptions = true, ThrowOnBadData = false, SkipEmptyRecords = true, }; var csv = new CsvReader(new StreamReader(filePath, Encoding.Default, true), config); csv.Configuration.RegisterClassMap<CsvMap>(); var csvTaxRecords = csv.GetRecords<CsvTaxRecordViewModel>().ToList(); var csvConstituents = csvTaxRecords.DistinctBy(m => m.LookupId).AsQueryable().ProjectTo<ConstituentViewModel>().ToList(); var dbConstituents = db.Constituents.ProjectTo<ConstituentViewModel>().ToList(); var newConstituentList = csvConstituents.Except(dbConstituents, new ConstituentIdComparer()).ToList(); var existingConstituentList = csvConstituents.Except(newConstituentList, new ConstituentIdComparer()); var constituentChangeList = existingConstituentList.Except(dbConstituents, new ConstituentComparer()); // Update existing constituents that differ from database foreach (var vm in constituentChangeList) { ConstituentViewModel cvm = dbConstituents.FirstOrDefault(x => x.LookupId == vm.LookupId); if (cvm == null) continue; vm.Id = cvm.Id; vm.UpdatedBy = "system"; vm.UpdatedDate = DateTime.Now; cvm.CopyPropertiesFrom(vm); var constituent = Mapper.Map<ConstituentViewModel, Constituent>(cvm); db.Constituents.AddOrUpdate(constituent); } status.ConstituentsUpdated = db.SaveChanges(); // Add new Constituents missing from database // Bulk copy new Constituent records if (newConstituentList.Count > 0) { foreach (var vm in newConstituentList) { vm.CreatedBy = "system"; vm.UpdatedBy = "system"; vm.CreatedDate = DateTime.Now; vm.UpdatedDate = DateTime.Now; } var missingTbl = newConstituentList.ToDataTable(); using (var sbc = new SqlBulkCopy(db.Database.Connection.ConnectionString)) { sbc.DestinationTableName = db.GetTableName<Constituent>(); sbc.BatchSize = 10000; sbc.BulkCopyTimeout = 0; foreach (var col in missingTbl.Columns) { sbc.ColumnMappings.Add(col.ToString(), col.ToString()); } try { await sbc.WriteToServerAsync(missingTbl); status.ConstituentsCreated = sbc.RowsCopiedCount(); } catch (Exception e) { status.Message = e.Message; } } } // Update constituents because of new bulk copy constituents //TODO: Change Created and Updated user to logged in user dbConstituents = db.Constituents.ProjectTo<ConstituentViewModel>().ToList(); // Build dictionary to map database key to csv records LookupId var dic = new Dictionary<int, string>(); dbConstituents.ForEach(x => dic.Add(x.Id, x.LookupId)); // Update parent key for each tax record //csvTaxRecords.ForEach(x => x.ConstituentId = dic.FirstOrDefault(d => d.Value == x.LookupId).Key); csvTaxRecords.ForEach((s) => { s.ConstituentId = dic.FirstOrDefault(d => d.Value == s.LookupId).Key; s.CreatedBy = "system"; s.UpdatedBy = "system"; s.CreatedDate = DateTime.Now; s.UpdatedDate = DateTime.Now; }); // Bulk insert new tax records using (var sbc = new SqlBulkCopy(db.Database.Connection.ConnectionString)) { sbc.DestinationTableName = db.GetTableName<TaxItem>(); sbc.BatchSize = 10000; sbc.BulkCopyTimeout = 0; var dt = Mapper.Map<List<CsvTaxRecordViewModel>, List<TaxItem>>(csvTaxRecords).ToDataTable(); foreach (var col in dt.Columns) { sbc.ColumnMappings.Add(col.ToString(), col.ToString()); } try { await sbc.WriteToServerAsync(dt); status.RecordsLoaded = sbc.RowsCopiedCount(); } catch (Exception ex) { status.Message = ex.Message; } } status.RecordsInFile = csvTaxRecords.Count; status.Success = true; if (csvTaxRecords.Count != csv.Row - 2) { status.Message = "Error in file header mappings. Check file headers and try again."; status.Success = false; } else { status.Success = true; status.Message = "Successfully loaded tax records."; } status.TotalTime = DateTime.Now.Subtract(startTime).ToString(@"hh\:mm\:ss"); csv.Dispose(); return status; }