/// <summary> /// Gets the identity <see cref="DbField"/> object. /// </summary> /// <param name="request">The request object.</param> /// <returns>The identity <see cref="DbField"/> object.</returns> private static DbField GetIdentityField(BaseRequest request) { if (request.Type != null && request.Type != typeof(object)) { var identityProperty = IdentityCache.Get(request.Type, DbConnectionExtension.GetDbSetting(request.Connection)); if (identityProperty != null) { var primaryProperty = PrimaryCache.Get(request.Type, DbConnectionExtension.GetDbSetting(request.Connection)); var isPrimary = false; if (primaryProperty != null) { isPrimary = string.Equals(primaryProperty.GetUnquotedMappedName(), identityProperty.GetUnquotedMappedName(), StringComparison.OrdinalIgnoreCase); } return(new DbField(identityProperty.GetUnquotedMappedName(), isPrimary, true, false, identityProperty.PropertyInfo.PropertyType, null, null, null, null, request.Connection.GetDbSetting())); } } return(DbFieldCache.Get(request.Connection, request.Name, request.Transaction)?.FirstOrDefault(f => f.IsIdentity)); }
/// <summary> /// Disposes the current instance of <see cref="QueryMultipleExtractor"/>. /// </summary> public void Dispose() { // Reader reader?.Dispose(); // Connection if (isDisposeConnection == true) { connection?.Dispose(); } // Set the output parameters DbConnectionExtension.SetOutputParameters(param); }
/// <summary> /// Bulk insert an instance of <see cref="DataTable"/> object into the database in an asynchronous way. /// </summary> /// <param name="connection">The connection object to be used.</param> /// <param name="tableName">The target table for bulk-insert operation.</param> /// <param name="dataTable">The <see cref="DataTable"/> object to be used in the bulk-insert operation.</param> /// <param name="rowState">The state of the rows to be copied to the destination.</param> /// <param name="mappings">The list of the columns to be used for mappings. If this parameter is not set, then all columns will be used for mapping.</param> /// <param name="options">The bulk-copy options to be used.</param> /// <param name="bulkCopyTimeout">The timeout in seconds to be used.</param> /// <param name="batchSize">The size per batch to be used.</param> /// <param name="transaction">The transaction to be used.</param> /// <returns>The number of rows affected by the execution.</returns> internal static async Task <int> BulkInsertAsyncInternal(SqlConnection connection, string tableName, DataTable dataTable, DataRowState rowState = DataRowState.Unchanged, IEnumerable <BulkInsertMapItem> mappings = null, SqlBulkCopyOptions options = SqlBulkCopyOptions.Default, int?bulkCopyTimeout = null, int?batchSize = null, SqlTransaction transaction = null) { // Validate the objects DbConnectionExtension.ValidateTransactionConnectionObject(connection, transaction); // Variables for the operation var result = 0; // Before Execution Time var beforeExecutionTime = DateTime.UtcNow; // Actual Execution using (var sqlBulkCopy = new SqlBulkCopy(connection, options, transaction)) { // Set the destinationtable sqlBulkCopy.DestinationTableName = tableName; // Set the timeout if (bulkCopyTimeout != null && bulkCopyTimeout.HasValue) { sqlBulkCopy.BulkCopyTimeout = bulkCopyTimeout.Value; } // Set the batch szie if (batchSize != null && batchSize.HasValue) { sqlBulkCopy.BatchSize = batchSize.Value; } // Add the mappings if (mappings == null) { // Variables needed var dbSetting = connection.GetDbSetting(); var dbFields = await DbFieldCache.GetAsync(connection, tableName, transaction); var fields = GetDataColumns(dataTable).Select(column => column.ColumnName); var filteredFields = new List <Tuple <string, string> >(); // To fix the casing problem of the bulk inserts foreach (var dbField in dbFields) { var field = fields.FirstOrDefault(f => string.Equals(f.AsUnquoted(true, dbSetting), dbField.Name.AsUnquoted(true, dbSetting), StringComparison.OrdinalIgnoreCase)); if (field != null) { filteredFields.Add(new Tuple <string, string>(field, dbField.Name.AsUnquoted(true, dbSetting))); } } // Iterate the filtered fields foreach (var field in filteredFields) { sqlBulkCopy.ColumnMappings.Add(field.Item1, field.Item2); } } else { // Iterate the provided mappings foreach (var mapItem in mappings) { sqlBulkCopy.ColumnMappings.Add(mapItem.SourceColumn, mapItem.DestinationColumn); } } // Open the connection and do the operation connection.EnsureOpen(); await sqlBulkCopy.WriteToServerAsync(dataTable, rowState); // Set the return value result = GetDataRows(dataTable, rowState).Count(); } // Result return(result); }
/// <summary> /// Bulk insert an instance of <see cref="DbDataReader"/> object into the database in an asynchronous way. /// </summary> /// <param name="connection">The connection object to be used.</param> /// <param name="tableName">The target table for bulk-insert operation.</param> /// <param name="reader">The <see cref="DbDataReader"/> object to be used in the bulk-insert operation.</param> /// <param name="mappings">The list of the columns to be used for mappings. If this parameter is not set, then all columns will be used for mapping.</param> /// <param name="options">The bulk-copy options to be used.</param> /// <param name="bulkCopyTimeout">The timeout in seconds to be used.</param> /// <param name="batchSize">The size per batch to be used.</param> /// <param name="transaction">The transaction to be used.</param> /// <returns>The number of rows affected by the execution.</returns> internal static async Task <int> BulkInsertAsyncInternal(SqlConnection connection, string tableName, DbDataReader reader, IEnumerable <BulkInsertMapItem> mappings = null, SqlBulkCopyOptions options = SqlBulkCopyOptions.Default, int?bulkCopyTimeout = null, int?batchSize = null, SqlTransaction transaction = null) { // Validate the objects DbConnectionExtension.ValidateTransactionConnectionObject(connection, transaction); // Variables for the operation var result = 0; // Before Execution Time var beforeExecutionTime = DateTime.UtcNow; // Actual Execution using (var sqlBulkCopy = new SqlBulkCopy(connection, options, transaction)) { // Set the destinationtable sqlBulkCopy.DestinationTableName = tableName; // Set the timeout if (bulkCopyTimeout != null && bulkCopyTimeout.HasValue) { sqlBulkCopy.BulkCopyTimeout = bulkCopyTimeout.Value; } // Set the batch szie if (batchSize != null && batchSize.HasValue) { sqlBulkCopy.BatchSize = batchSize.Value; } // Add the mappings if (mappings == null) { // Variables needed var dbSetting = connection.GetDbSetting(); var dbFields = await DbFieldCache.GetAsync(connection, tableName, transaction); var fields = Enumerable.Range(0, reader.FieldCount).Select((index) => reader.GetName(index)); var filteredFields = new List <Tuple <string, string> >(); // To fix the casing problem of the bulk inserts foreach (var dbField in dbFields) { var readerField = fields.FirstOrDefault(field => string.Equals(field.AsUnquoted(true, dbSetting), dbField.Name.AsUnquoted(true, dbSetting), StringComparison.OrdinalIgnoreCase)); if (!string.IsNullOrEmpty(readerField)) { filteredFields.Add(new Tuple <string, string>(readerField, dbField.Name.AsUnquoted(true, dbSetting))); } } // Iterate the filtered fields foreach (var field in filteredFields) { sqlBulkCopy.ColumnMappings.Add(field.Item1, field.Item2); } } else { // Iterate the provided mappings foreach (var mapItem in mappings) { sqlBulkCopy.ColumnMappings.Add(mapItem.SourceColumn, mapItem.DestinationColumn); } } // Open the connection and do the operation await connection.EnsureOpenAsync(); await sqlBulkCopy.WriteToServerAsync(reader); // Hack the 'SqlBulkCopy' object var copiedField = GetRowsCopiedField(); // Set the return value result = copiedField != null ? (int)copiedField.GetValue(sqlBulkCopy) : reader.RecordsAffected; } // Result return(result); }