/// <summary> /// BBernard /// This is the Primary Synchronous method that supports Insert, Update, and InsertOrUpdate via the flexibility of the Sql MERGE query! /// </summary> /// <param name="entities"></param> /// <param name="tableName"></param> /// <param name="mergeAction"></param> /// <param name="transaction"></param> /// <returns></returns> protected virtual IEnumerable <T> BulkInsertOrUpdateWithIdentityColumn( IEnumerable <T> entities, String tableName, SqlBulkHelpersMergeAction mergeAction, SqlTransaction transaction, SqlMergeMatchQualifierExpression matchQualifierExpression = null ) { //For Performance we ensure the entities are only ever enumerated One Time! var entityList = entities.ToList(); var bulkOperationTimeoutSeconds = this.BulkOperationTimeoutSeconds; using (ProcessHelper processHelper = this.CreateProcessHelper( entityList, tableName, mergeAction, transaction, bulkOperationTimeoutSeconds, matchQualifierExpression)) { var sqlCmd = processHelper.SqlCommand; var sqlBulkCopy = processHelper.SqlBulkCopy; var sqlScripts = processHelper.SqlMergeScripts; //***STEP #4: Create Tables for Buffering Data & Storing Output values sqlCmd.CommandText = sqlScripts.SqlScriptToInitializeTempTables; sqlCmd.ExecuteNonQuery(); //***STEP #5: Write Data to the Staging/Buffer Table as fast as possible! sqlBulkCopy.DestinationTableName = $"[{sqlScripts.TempStagingTableName}]"; sqlBulkCopy.WriteToServer(processHelper.DataTable); //***STEP #6: Merge Data from the Staging Table into the Real Table // and simultaneously Output Identity Id values into Output Temp Table! sqlCmd.CommandText = sqlScripts.SqlScriptToExecuteMergeProcess; //Execute this script and load the results.... var mergeResultsList = new List <MergeResult>(); using (SqlDataReader sqlReader = sqlCmd.ExecuteReader()) { while (sqlReader.Read()) { var mergeResult = ReadCurrentMergeResultHelper(sqlReader); mergeResultsList.Add(mergeResult); } } //***STEP #7: FINALLY Update all of the original Entities with INSERTED/New Identity Values //NOTE: IF MULTIPLE NON-UNIQUE items are updated then ONLY ONE Identity value can be returned, though multiple // other items may have in-reality actually been updated within the DB. This is a likely scenario // IF a different non-unique Match Qualifier Field is specified. var updatedEntityList = this.PostProcessEntitiesWithMergeResults( entityList, mergeResultsList, processHelper.TableDefinition.IdentityColumn, processHelper.SqlMergeScripts.SqlMatchQualifierExpression ); //FINALLY Return the updated Entities with the Identity Id if it was Inserted! return(updatedEntityList); } }
public virtual IEnumerable <T> BulkInsertOrUpdate( IEnumerable <T> entityList, String tableName, SqlBulkHelpersMergeAction mergeAction, SqlTransaction transaction, SqlMergeMatchQualifierExpression matchQualifierExpression = null ) { throw new NotImplementedException(); }
//TODO: BBernard - If beneficial, we can Add Caching here at this point to cache the fully formed Merge Queries! protected virtual SqlMergeScriptResults BuildSqlMergeScriptsHelper( SqlBulkHelpersTableDefinition tableDefinition, SqlBulkHelpersMergeAction mergeAction, SqlMergeMatchQualifierExpression matchQualifierExpression = null ) { var mergeScriptBuilder = new SqlBulkHelpersMergeScriptBuilder(); var sqlScripts = mergeScriptBuilder.BuildSqlMergeScripts( tableDefinition, mergeAction, matchQualifierExpression ); return(sqlScripts); }
/// <summary> /// BBernard - Private process helper to wrap up and encapsulate the initialization logic that is shared across both Async and Sync methods... /// </summary> /// <param name="entityList"></param> /// <param name="tableName"></param> /// <param name="mergeAction"></param> /// <param name="transaction"></param> /// <param name="timeoutSeconds"></param> /// <param name="matchQualifierExpression"></param> /// <returns></returns> protected virtual ProcessHelper CreateProcessHelper( List <T> entityList, String tableName, SqlBulkHelpersMergeAction mergeAction, SqlTransaction transaction, int timeoutSeconds, SqlMergeMatchQualifierExpression matchQualifierExpression = null ) { //***STEP #1: Load the Table Schema Definitions (cached after initial Load)!!! //BBernard //NOTE: Prevent SqlInjection - by validating that the TableName must be a valid value (as retrieved from the DB Schema) // we eliminate risk of Sql Injection. //NOTE: ALl other parameters are Strongly typed (vs raw Strings) thus eliminating risk of Sql Injection SqlBulkHelpersTableDefinition tableDefinition = this.GetTableSchemaDefinition(tableName); //***STEP #2: Dynamically Convert All Entities to a DataTable for consumption by the SqlBulkCopy class... DataTable dataTable = this.ConvertEntitiesToDataTableHelper(entityList, tableDefinition.IdentityColumn); //***STEP #3: Build all of the Sql Scripts needed to Process the entities based on the specified Table definition. SqlMergeScriptResults sqlScripts = this.BuildSqlMergeScriptsHelper(tableDefinition, mergeAction, matchQualifierExpression); //***STEP #4: Dynamically Initialize the Bulk Copy Helper using our Table data and table Definition! var sqlBulkCopyHelper = this.CreateSqlBulkCopyHelper(dataTable, tableDefinition, transaction, timeoutSeconds); return(new ProcessHelper() { TableDefinition = tableDefinition, DataTable = dataTable, SqlMergeScripts = sqlScripts, SqlCommand = new SqlCommand(String.Empty, transaction.Connection, transaction) { CommandTimeout = timeoutSeconds }, SqlBulkCopy = sqlBulkCopyHelper }); }
/// <summary> /// BBernard /// This is the Primary Async method that supports Insert, Update, and InsertOrUpdate via the flexibility of the Sql MERGE query! /// </summary> /// <typeparam name="T"></typeparam> /// <param name="entities"></param> /// <param name="tableName"></param> /// <param name="mergeAction"></param> /// <param name="transaction"></param> /// <returns></returns> protected virtual async Task <IEnumerable <T> > BulkInsertOrUpdateWithIdentityColumnAsync(IEnumerable <T> entities, String tableName, SqlBulkHelpersMergeAction mergeAction, SqlTransaction transaction) { //For Performance we ensure the entities are only ever enumerated One Time! var entityList = entities.ToList(); using (ProcessHelper processHelper = this.CreateProcessHelper(entityList, tableName, mergeAction, transaction)) { var sqlCmd = processHelper.SqlCommand; var sqlBulkCopy = processHelper.SqlBulkCopy; var sqlScripts = processHelper.SqlMergeScripts; //***STEP #4: Create Tables for Buffering Data & Storing Output values // NOTE: THIS Step is Unique for Async processing... sqlCmd.CommandText = sqlScripts.SqlScriptToInitializeTempTables; await sqlCmd.ExecuteNonQueryAsync(); //***STEP #5: Write Data to the Staging/Buffer Table as fast as possible! // NOTE: THIS Step is Unique for Async processing... sqlBulkCopy.DestinationTableName = $"[{sqlScripts.TempStagingTableName}]"; await sqlBulkCopy.WriteToServerAsync(processHelper.DataTable); //***STEP #6: Merge Data from the Staging Table into the Real Table // and simultaneously Output Identity Id values into Output Temp Table! // NOTE: THIS Step is Unique for Async processing... sqlCmd.CommandText = sqlScripts.SqlScriptToExecuteMergeProcess; //Execute this script and load the results.... var mergeResultsList = new List <MergeResult>(); using (SqlDataReader sqlReader = await sqlCmd.ExecuteReaderAsync()) { while (await sqlReader.ReadAsync()) { //So far all calls to SqlDataReader have been asynchronous, but since the data reader is in //non -sequential mode and ReadAsync was used, the column data should be read synchronously. var mergeResult = ReadCurrentMergeResultHelper(sqlReader); mergeResultsList.Add(mergeResult); } } //***STEP #7: FINALLY Update all of the original Entities with INSERTED/New Identity Values var updatedEntityList = this.PostProcessEntitiesWithMergeResults(entityList, mergeResultsList, processHelper.TableDefinition.IdentityColumn); //FINALLY Return the updated Entities with the Identity Id if it was Inserted! return(updatedEntityList); } }
/// <summary> /// BBernard /// This is the Primary Synchronous method that supports Insert, Update, and InsertOrUpdate via the flexibility of the Sql MERGE query! /// </summary> /// <param name="entities"></param> /// <param name="tableName"></param> /// <param name="mergeAction"></param> /// <param name="transaction"></param> /// <returns></returns> protected virtual IEnumerable <T> BulkInsertOrUpdateWithIdentityColumn(IEnumerable <T> entities, String tableName, SqlBulkHelpersMergeAction mergeAction, SqlTransaction transaction) { //For Performance we ensure the entities are only ever enumerated One Time! var entityList = entities.ToList(); using (ProcessHelper processHelper = this.CreateProcessHelper(entityList, tableName, mergeAction, transaction)) { var sqlCmd = processHelper.SqlCommand; var sqlBulkCopy = processHelper.SqlBulkCopy; var sqlScripts = processHelper.SqlMergeScripts; //***STEP #4: Create Tables for Buffering Data & Storing Output values sqlCmd.CommandText = sqlScripts.SqlScriptToInitializeTempTables; sqlCmd.ExecuteNonQuery(); //***STEP #5: Write Data to the Staging/Buffer Table as fast as possible! sqlBulkCopy.DestinationTableName = $"[{sqlScripts.TempStagingTableName}]"; sqlBulkCopy.WriteToServer(processHelper.DataTable); //***STEP #6: Merge Data from the Staging Table into the Real Table // and simultaneously Output Identity Id values into Output Temp Table! sqlCmd.CommandText = sqlScripts.SqlScriptToExecuteMergeProcess; //Execute this script and load the results.... var mergeResultsList = new List <MergeResult>(); using (SqlDataReader sqlReader = sqlCmd.ExecuteReader()) { while (sqlReader.Read()) { var mergeResult = ReadCurrentMergeResultHelper(sqlReader); mergeResultsList.Add(mergeResult); } } //***STEP #7: FINALLY Update all of the original Entities with INSERTED/New Identity Values var updatedEntityList = this.PostProcessEntitiesWithMergeResults(entityList, mergeResultsList, processHelper.TableDefinition.IdentityColumn); //FINALLY Return the updated Entities with the Identity Id if it was Inserted! return(updatedEntityList); } }
public virtual SqlMergeScriptResults BuildSqlMergeScripts( SqlBulkHelpersTableDefinition tableDefinition, SqlBulkHelpersMergeAction mergeAction, SqlMergeMatchQualifierExpression matchQualifierExpression = null ) { //NOTE: BBernard - This temp table name MUST begin with 1 (and only 1) hash "#" to ensure it is a Transaction Scoped table! var tempStagingTableName = $"#SqlBulkHelpers_STAGING_TABLE_{Guid.NewGuid()}"; var tempOutputIdentityTableName = $"#SqlBulkHelpers_OUTPUT_IDENTITY_TABLE_{Guid.NewGuid()}"; var identityColumnName = tableDefinition.IdentityColumn?.ColumnName ?? String.Empty; var columnNamesListWithoutIdentity = tableDefinition.GetColumnNames(false); var columnNamesWithoutIdentityCSV = columnNamesListWithoutIdentity.Select(c => $"[{c}]").ToCSV(); //Dynamically build the Merge Match Qualifier Fields Expression //NOTE: This is an optional parameter now, but is initialized to the IdentityColumn by Default! var qualifierExpression = matchQualifierExpression ?? new SqlMergeMatchQualifierExpression(identityColumnName); var mergeMatchQualifierExpressionText = BuildMergeMatchQualifierExpressionText(qualifierExpression); //Initialize/Create the Staging Table! //NOTE: THe ROWNUMBER_COLUMN_NAME (3'rd Column) IS CRITICAL because SqlBulkCopy and Sql Server OUTPUT claus do not // preserve Order; e.g. it may change based on execution plan (indexes/no indexes, etc.). String sqlScriptToInitializeTempTables = $@" SELECT TOP(0) -1 as [{identityColumnName}], {columnNamesWithoutIdentityCSV}, -1 as [{SqlBulkHelpersConstants.ROWNUMBER_COLUMN_NAME}] INTO [{tempStagingTableName}] FROM [{tableDefinition.TableName}]; ALTER TABLE [{tempStagingTableName}] ADD PRIMARY KEY ([{identityColumnName}]); SELECT TOP(0) CAST('' AS nvarchar(10)) as [MERGE_ACTION], CAST(-1 AS int) as [IDENTITY_ID], CAST(-1 AS int) [{SqlBulkHelpersConstants.ROWNUMBER_COLUMN_NAME}] INTO [{tempOutputIdentityTableName}]; "; //NOTE: This is ALL now completed very efficiently on the Sql Server Database side with // NO unnecessary round trips to the Database! var mergeInsertSql = String.Empty; if (mergeAction.HasFlag(SqlBulkHelpersMergeAction.Insert)) { mergeInsertSql = $@" WHEN NOT MATCHED BY TARGET THEN INSERT ({columnNamesWithoutIdentityCSV}) VALUES ({columnNamesListWithoutIdentity.Select(c => $"source.[{c}]").ToCSV()}) "; } var mergeUpdateSql = String.Empty; if (mergeAction.HasFlag(SqlBulkHelpersMergeAction.Update)) { mergeUpdateSql = $@" WHEN MATCHED THEN UPDATE SET {columnNamesListWithoutIdentity.Select(c => $"target.[{c}] = source.[{c}]").ToCSV()} "; } //Build the FULL Dynamic Merge Script here... //BBernard - 2019-08-07 //NOTE: We now sort on the RowNumber column that we define; this FIXES issue with SqlBulkCopy.WriteToServer() // where the order of data being written is NOT guaranteed, and there is still no support for the ORDER() hint. // In general it results in inverting the order of data being sent in Bulk which then resulted in Identity // values being incorrect based on the order of data specified. //NOTE: We MUST SORT the OUTPUT Results by ROWNUMBER and then by IDENTITY Column in case there are multiple matches due to // custom match Qualifiers; this ensures that data is sorted in a way that postprocessing // can occur & be validated as expected. String sqlScriptToExecuteMergeProcess = $@" MERGE [{tableDefinition.TableName}] as target USING ( SELECT TOP 100 PERCENT * FROM [{tempStagingTableName}] ORDER BY [{SqlBulkHelpersConstants.ROWNUMBER_COLUMN_NAME}] ASC ) as source ON {mergeMatchQualifierExpressionText} {mergeUpdateSql} {mergeInsertSql} OUTPUT $action, INSERTED.[{identityColumnName}], source.[{SqlBulkHelpersConstants.ROWNUMBER_COLUMN_NAME}] INTO [{tempOutputIdentityTableName}] ([MERGE_ACTION], [IDENTITY_ID], [{SqlBulkHelpersConstants.ROWNUMBER_COLUMN_NAME}]); SELECT [{SqlBulkHelpersConstants.ROWNUMBER_COLUMN_NAME}], [IDENTITY_ID], [MERGE_ACTION] FROM [{tempOutputIdentityTableName}] ORDER BY [{SqlBulkHelpersConstants.ROWNUMBER_COLUMN_NAME}] ASC, [IDENTITY_ID] ASC; DROP TABLE [{tempStagingTableName}]; DROP TABLE [{tempOutputIdentityTableName}]; "; return(new SqlMergeScriptResults( tempStagingTableName, tempOutputIdentityTableName, sqlScriptToInitializeTempTables, sqlScriptToExecuteMergeProcess, qualifierExpression )); }
public SqlMergeScriptResults BuildSqlMergeScripts(SqlBulkHelpersTableDefinition tableDefinition, SqlBulkHelpersMergeAction mergeAction) { //NOTE: BBernard - This temp table name MUST begin with 1 (and only 1) hash "#" to ensure it is a Transaction Scoped table! var tempStagingTableName = $"#SqlBulkHelpers_STAGING_TABLE_{Guid.NewGuid()}"; var tempOutputIdentityTableName = $"#SqlBulkHelpers_OUTPUT_IDENTITY_TABLE_{Guid.NewGuid()}"; var identityColumnName = tableDefinition.IdentityColumn?.ColumnName ?? String.Empty; var columnNamesListWithoutIdentity = tableDefinition.GetColumnNames(false); var columnNamesWithoutIdentityCSV = columnNamesListWithoutIdentity.Select(c => $"[{c}]").ToCSV(); //Initialize/Create the Staging Table! String sqlScriptToInitializeTempTables = $@" SELECT TOP(0) -1 as [{identityColumnName}], {columnNamesWithoutIdentityCSV}, -1 as [{SqlBulkHelpersConstants.ROWNUMBER_COLUMN_NAME}] INTO [{tempStagingTableName}] FROM [{tableDefinition.TableName}]; ALTER TABLE [{tempStagingTableName}] ADD PRIMARY KEY ([{identityColumnName}]); SELECT TOP(0) CAST('' AS nvarchar(10)) as [MERGE_ACTION], CAST(-1 AS int) as [IDENTITY_ID], CAST(-1 AS int) [{SqlBulkHelpersConstants.ROWNUMBER_COLUMN_NAME}] INTO [{tempOutputIdentityTableName}]; "; //NOTE: This is ALL now completed very efficiently on the Sql Server Database side with // NO unnecessary round trips to the Database! var mergeInsertSql = String.Empty; if (mergeAction.HasFlag(SqlBulkHelpersMergeAction.Insert)) { mergeInsertSql = $@" WHEN NOT MATCHED BY TARGET THEN INSERT ({columnNamesWithoutIdentityCSV}) VALUES ({columnNamesListWithoutIdentity.Select(c => $"source.[{c}]").ToCSV()}) "; } var mergeUpdateSql = String.Empty; if (mergeAction.HasFlag(SqlBulkHelpersMergeAction.Update)) { mergeUpdateSql = $@" WHEN MATCHED THEN UPDATE SET {columnNamesListWithoutIdentity.Select(c => $"target.[{c}] = source.[{c}]").ToCSV()} "; } //Build the FULL Dynamic Merge Script here... //BBernard - 2019-08-07 //NOTE: We now sort on the RowNumber column that we define; this FIXES issue with SqlBulkCopy.WriteToServer() // where the order of data being written is NOT guaranteed, and there is still no support for the ORDER() hint. // In general it results in inverting the order of data being sent in Bulk which then resulted in Identity // values being incorrect based on the order of data specified. String sqlScriptToExecuteMergeProcess = $@" MERGE [{tableDefinition.TableName}] as target USING ( SELECT TOP 100 PERCENT * FROM [{tempStagingTableName}] ORDER BY [{SqlBulkHelpersConstants.ROWNUMBER_COLUMN_NAME}] ASC ) as source ON target.[{identityColumnName}] = source.[{identityColumnName}] {mergeUpdateSql} {mergeInsertSql} OUTPUT $action, INSERTED.[{identityColumnName}], source.[{SqlBulkHelpersConstants.ROWNUMBER_COLUMN_NAME}] INTO [{tempOutputIdentityTableName}] ([MERGE_ACTION], [IDENTITY_ID], [{SqlBulkHelpersConstants.ROWNUMBER_COLUMN_NAME}]); SELECT [{SqlBulkHelpersConstants.ROWNUMBER_COLUMN_NAME}], [IDENTITY_ID], [MERGE_ACTION] FROM [{tempOutputIdentityTableName}] ORDER BY [{SqlBulkHelpersConstants.ROWNUMBER_COLUMN_NAME}] ASC; DROP TABLE [{tempStagingTableName}]; DROP TABLE [{tempOutputIdentityTableName}]; "; return(new SqlMergeScriptResults( tempStagingTableName, tempOutputIdentityTableName, sqlScriptToInitializeTempTables, sqlScriptToExecuteMergeProcess )); }