protected virtual SqlBulkCopy CreateSqlBulkCopyHelper(DataTable dataTable, SqlBulkHelpersTableDefinition tableDefinition, SqlTransaction transaction) { var factory = new SqlBulkCopyFactory(); //Load with all Defaults from our Factory. var sqlBulkCopy = factory.CreateSqlBulkCopy(dataTable, tableDefinition, transaction); return(sqlBulkCopy); }
//TODO: BBernard - If beneficial, we can Add Caching here at this point to cache the fully formed Merge Queries! protected virtual SqlMergeScriptResults BuildSqlMergeScriptsHelper(SqlBulkHelpersTableDefinition tableDefinition, SqlBulkHelpersMergeAction mergeAction) { var mergeScriptBuilder = new SqlBulkHelpersMergeScriptBuilder(); var sqlScripts = mergeScriptBuilder.BuildSqlMergeScripts(tableDefinition, mergeAction); return(sqlScripts); }
public virtual SqlBulkCopy CreateSqlBulkCopy(DataTable dataTable, SqlBulkHelpersTableDefinition tableDefinition, SqlTransaction transaction) { var sqlBulk = new SqlBulkCopy(transaction.Connection, this.BulkCopyOptions, transaction) { //Always initialize a Batch size & Timeout BatchSize = this.BulkCopyBatchSize, BulkCopyTimeout = this.BulkCopyTimeoutSeconds }; //First initialize the Column Mappings for the SqlBulkCopy //NOTE: BBernard - We only map valid columns that exist in both the Model & the Table Schema! //NOTE: BBernard - We Map All valid columns (including Identity Key column) to support Insert or Updates! var dataTableColumnNames = dataTable.Columns.Cast <DataColumn>().Select(c => c.ColumnName); foreach (var dataTableColumnName in dataTableColumnNames) { var dbColumnDef = tableDefinition.FindColumnCaseInsensitive(dataTableColumnName); if (dbColumnDef != null) { sqlBulk.ColumnMappings.Add(dataTableColumnName, dbColumnDef.ColumnName); } } //BBernard //Now that we know we have only valid columns from the Model/DataTable, we must manually add a mapping // for the Row Number Column for Bulk Loading . . . but Only if the data table has a RowNumber column defined. if (dataTable.Columns.Contains(SqlBulkHelpersConstants.ROWNUMBER_COLUMN_NAME)) { sqlBulk.ColumnMappings.Add(SqlBulkHelpersConstants.ROWNUMBER_COLUMN_NAME, SqlBulkHelpersConstants.ROWNUMBER_COLUMN_NAME); } return(sqlBulk); }
protected virtual SqlBulkCopy CreateSqlBulkCopyHelper( DataTable dataTable, SqlBulkHelpersTableDefinition tableDefinition, SqlTransaction transaction, int timeoutSeconds ) { //Initialize the BulkCopy Factory class with parameters... var factory = new SqlBulkCopyFactory() { BulkCopyTimeoutSeconds = timeoutSeconds }; var sqlBulkCopy = factory.CreateSqlBulkCopy(dataTable, tableDefinition, transaction); return(sqlBulkCopy); }
/// <summary> /// BBernard - Private process helper to wrap up and encapsulate the initialization logic that is shared across both Async and Sync methods... /// </summary> /// <param name="entityList"></param> /// <param name="tableName"></param> /// <param name="mergeAction"></param> /// <param name="transaction"></param> /// <param name="timeoutSeconds"></param> /// <param name="matchQualifierExpression"></param> /// <returns></returns> protected virtual ProcessHelper CreateProcessHelper( List <T> entityList, String tableName, SqlBulkHelpersMergeAction mergeAction, SqlTransaction transaction, int timeoutSeconds, SqlMergeMatchQualifierExpression matchQualifierExpression = null ) { //***STEP #1: Load the Table Schema Definitions (cached after initial Load)!!! //BBernard //NOTE: Prevent SqlInjection - by validating that the TableName must be a valid value (as retrieved from the DB Schema) // we eliminate risk of Sql Injection. //NOTE: ALl other parameters are Strongly typed (vs raw Strings) thus eliminating risk of Sql Injection SqlBulkHelpersTableDefinition tableDefinition = this.GetTableSchemaDefinition(tableName); //***STEP #2: Dynamically Convert All Entities to a DataTable for consumption by the SqlBulkCopy class... DataTable dataTable = this.ConvertEntitiesToDataTableHelper(entityList, tableDefinition.IdentityColumn); //***STEP #3: Build all of the Sql Scripts needed to Process the entities based on the specified Table definition. SqlMergeScriptResults sqlScripts = this.BuildSqlMergeScriptsHelper(tableDefinition, mergeAction, matchQualifierExpression); //***STEP #4: Dynamically Initialize the Bulk Copy Helper using our Table data and table Definition! var sqlBulkCopyHelper = this.CreateSqlBulkCopyHelper(dataTable, tableDefinition, transaction, timeoutSeconds); return(new ProcessHelper() { TableDefinition = tableDefinition, DataTable = dataTable, SqlMergeScripts = sqlScripts, SqlCommand = new SqlCommand(String.Empty, transaction.Connection, transaction) { CommandTimeout = timeoutSeconds }, SqlBulkCopy = sqlBulkCopyHelper }); }
public virtual SqlMergeScriptResults BuildSqlMergeScripts( SqlBulkHelpersTableDefinition tableDefinition, SqlBulkHelpersMergeAction mergeAction, SqlMergeMatchQualifierExpression matchQualifierExpression = null ) { //NOTE: BBernard - This temp table name MUST begin with 1 (and only 1) hash "#" to ensure it is a Transaction Scoped table! var tempStagingTableName = $"#SqlBulkHelpers_STAGING_TABLE_{Guid.NewGuid()}"; var tempOutputIdentityTableName = $"#SqlBulkHelpers_OUTPUT_IDENTITY_TABLE_{Guid.NewGuid()}"; var identityColumnName = tableDefinition.IdentityColumn?.ColumnName ?? String.Empty; var columnNamesListWithoutIdentity = tableDefinition.GetColumnNames(false); var columnNamesWithoutIdentityCSV = columnNamesListWithoutIdentity.Select(c => $"[{c}]").ToCSV(); //Dynamically build the Merge Match Qualifier Fields Expression //NOTE: This is an optional parameter now, but is initialized to the IdentityColumn by Default! var qualifierExpression = matchQualifierExpression ?? new SqlMergeMatchQualifierExpression(identityColumnName); var mergeMatchQualifierExpressionText = BuildMergeMatchQualifierExpressionText(qualifierExpression); //Initialize/Create the Staging Table! //NOTE: THe ROWNUMBER_COLUMN_NAME (3'rd Column) IS CRITICAL because SqlBulkCopy and Sql Server OUTPUT claus do not // preserve Order; e.g. it may change based on execution plan (indexes/no indexes, etc.). String sqlScriptToInitializeTempTables = $@" SELECT TOP(0) -1 as [{identityColumnName}], {columnNamesWithoutIdentityCSV}, -1 as [{SqlBulkHelpersConstants.ROWNUMBER_COLUMN_NAME}] INTO [{tempStagingTableName}] FROM [{tableDefinition.TableName}]; ALTER TABLE [{tempStagingTableName}] ADD PRIMARY KEY ([{identityColumnName}]); SELECT TOP(0) CAST('' AS nvarchar(10)) as [MERGE_ACTION], CAST(-1 AS int) as [IDENTITY_ID], CAST(-1 AS int) [{SqlBulkHelpersConstants.ROWNUMBER_COLUMN_NAME}] INTO [{tempOutputIdentityTableName}]; "; //NOTE: This is ALL now completed very efficiently on the Sql Server Database side with // NO unnecessary round trips to the Database! var mergeInsertSql = String.Empty; if (mergeAction.HasFlag(SqlBulkHelpersMergeAction.Insert)) { mergeInsertSql = $@" WHEN NOT MATCHED BY TARGET THEN INSERT ({columnNamesWithoutIdentityCSV}) VALUES ({columnNamesListWithoutIdentity.Select(c => $"source.[{c}]").ToCSV()}) "; } var mergeUpdateSql = String.Empty; if (mergeAction.HasFlag(SqlBulkHelpersMergeAction.Update)) { mergeUpdateSql = $@" WHEN MATCHED THEN UPDATE SET {columnNamesListWithoutIdentity.Select(c => $"target.[{c}] = source.[{c}]").ToCSV()} "; } //Build the FULL Dynamic Merge Script here... //BBernard - 2019-08-07 //NOTE: We now sort on the RowNumber column that we define; this FIXES issue with SqlBulkCopy.WriteToServer() // where the order of data being written is NOT guaranteed, and there is still no support for the ORDER() hint. // In general it results in inverting the order of data being sent in Bulk which then resulted in Identity // values being incorrect based on the order of data specified. //NOTE: We MUST SORT the OUTPUT Results by ROWNUMBER and then by IDENTITY Column in case there are multiple matches due to // custom match Qualifiers; this ensures that data is sorted in a way that postprocessing // can occur & be validated as expected. String sqlScriptToExecuteMergeProcess = $@" MERGE [{tableDefinition.TableName}] as target USING ( SELECT TOP 100 PERCENT * FROM [{tempStagingTableName}] ORDER BY [{SqlBulkHelpersConstants.ROWNUMBER_COLUMN_NAME}] ASC ) as source ON {mergeMatchQualifierExpressionText} {mergeUpdateSql} {mergeInsertSql} OUTPUT $action, INSERTED.[{identityColumnName}], source.[{SqlBulkHelpersConstants.ROWNUMBER_COLUMN_NAME}] INTO [{tempOutputIdentityTableName}] ([MERGE_ACTION], [IDENTITY_ID], [{SqlBulkHelpersConstants.ROWNUMBER_COLUMN_NAME}]); SELECT [{SqlBulkHelpersConstants.ROWNUMBER_COLUMN_NAME}], [IDENTITY_ID], [MERGE_ACTION] FROM [{tempOutputIdentityTableName}] ORDER BY [{SqlBulkHelpersConstants.ROWNUMBER_COLUMN_NAME}] ASC, [IDENTITY_ID] ASC; DROP TABLE [{tempStagingTableName}]; DROP TABLE [{tempOutputIdentityTableName}]; "; return(new SqlMergeScriptResults( tempStagingTableName, tempOutputIdentityTableName, sqlScriptToInitializeTempTables, sqlScriptToExecuteMergeProcess, qualifierExpression )); }
public SqlMergeScriptResults BuildSqlMergeScripts(SqlBulkHelpersTableDefinition tableDefinition, SqlBulkHelpersMergeAction mergeAction) { //NOTE: BBernard - This temp table name MUST begin with 1 (and only 1) hash "#" to ensure it is a Transaction Scoped table! var tempStagingTableName = $"#SqlBulkHelpers_STAGING_TABLE_{Guid.NewGuid()}"; var tempOutputIdentityTableName = $"#SqlBulkHelpers_OUTPUT_IDENTITY_TABLE_{Guid.NewGuid()}"; var identityColumnName = tableDefinition.IdentityColumn?.ColumnName ?? String.Empty; var columnNamesListWithoutIdentity = tableDefinition.GetColumnNames(false); var columnNamesWithoutIdentityCSV = columnNamesListWithoutIdentity.Select(c => $"[{c}]").ToCSV(); //Initialize/Create the Staging Table! String sqlScriptToInitializeTempTables = $@" SELECT TOP(0) -1 as [{identityColumnName}], {columnNamesWithoutIdentityCSV}, -1 as [{SqlBulkHelpersConstants.ROWNUMBER_COLUMN_NAME}] INTO [{tempStagingTableName}] FROM [{tableDefinition.TableName}]; ALTER TABLE [{tempStagingTableName}] ADD PRIMARY KEY ([{identityColumnName}]); SELECT TOP(0) CAST('' AS nvarchar(10)) as [MERGE_ACTION], CAST(-1 AS int) as [IDENTITY_ID], CAST(-1 AS int) [{SqlBulkHelpersConstants.ROWNUMBER_COLUMN_NAME}] INTO [{tempOutputIdentityTableName}]; "; //NOTE: This is ALL now completed very efficiently on the Sql Server Database side with // NO unnecessary round trips to the Database! var mergeInsertSql = String.Empty; if (mergeAction.HasFlag(SqlBulkHelpersMergeAction.Insert)) { mergeInsertSql = $@" WHEN NOT MATCHED BY TARGET THEN INSERT ({columnNamesWithoutIdentityCSV}) VALUES ({columnNamesListWithoutIdentity.Select(c => $"source.[{c}]").ToCSV()}) "; } var mergeUpdateSql = String.Empty; if (mergeAction.HasFlag(SqlBulkHelpersMergeAction.Update)) { mergeUpdateSql = $@" WHEN MATCHED THEN UPDATE SET {columnNamesListWithoutIdentity.Select(c => $"target.[{c}] = source.[{c}]").ToCSV()} "; } //Build the FULL Dynamic Merge Script here... //BBernard - 2019-08-07 //NOTE: We now sort on the RowNumber column that we define; this FIXES issue with SqlBulkCopy.WriteToServer() // where the order of data being written is NOT guaranteed, and there is still no support for the ORDER() hint. // In general it results in inverting the order of data being sent in Bulk which then resulted in Identity // values being incorrect based on the order of data specified. String sqlScriptToExecuteMergeProcess = $@" MERGE [{tableDefinition.TableName}] as target USING ( SELECT TOP 100 PERCENT * FROM [{tempStagingTableName}] ORDER BY [{SqlBulkHelpersConstants.ROWNUMBER_COLUMN_NAME}] ASC ) as source ON target.[{identityColumnName}] = source.[{identityColumnName}] {mergeUpdateSql} {mergeInsertSql} OUTPUT $action, INSERTED.[{identityColumnName}], source.[{SqlBulkHelpersConstants.ROWNUMBER_COLUMN_NAME}] INTO [{tempOutputIdentityTableName}] ([MERGE_ACTION], [IDENTITY_ID], [{SqlBulkHelpersConstants.ROWNUMBER_COLUMN_NAME}]); SELECT [{SqlBulkHelpersConstants.ROWNUMBER_COLUMN_NAME}], [IDENTITY_ID], [MERGE_ACTION] FROM [{tempOutputIdentityTableName}] ORDER BY [{SqlBulkHelpersConstants.ROWNUMBER_COLUMN_NAME}] ASC; DROP TABLE [{tempStagingTableName}]; DROP TABLE [{tempOutputIdentityTableName}]; "; return(new SqlMergeScriptResults( tempStagingTableName, tempOutputIdentityTableName, sqlScriptToInitializeTempTables, sqlScriptToExecuteMergeProcess )); }