/// <summary> /// Bulk insert json data to a SQL table. https://github.com/FrendsPlatform/Frends.Sql /// </summary> /// <param name="input">Input parameters</param> /// <param name="options">Optional parameters with default values</param> /// <returns>Copied row count</returns> public static async Task <int> BulkInsert([PropertyTab] BulkInsertInput input, [PropertyTab] BulkInsertOptions options, CancellationToken cancellationToken) { var inputJson = "{\"Table\" : " + input.InputData + " }"; var dataset = JsonConvert.DeserializeObject <DataSet>(inputJson); using (var connection = new SqlConnection(input.ConnectionString)) { await connection.OpenAsync(cancellationToken).ConfigureAwait(false); cancellationToken.ThrowIfCancellationRequested(); //Get the combined flags for multiple booleans that match a flag var flagEnum = options.FireTriggers.GetFlag(SqlBulkCopyOptions.FireTriggers) | options.KeepIdentity.GetFlag(SqlBulkCopyOptions.KeepIdentity); if (options.ConvertEmptyPropertyValuesToNull) { // convert string.Empty values to null (this allows inserting data to fields which are different than text (int, ..) dataset.SetEmptyDataRowsToNull(); } if (options.SqlTransactionIsolationLevel == SqlTransactionIsolationLevel.None) { using (var sqlBulkCopy = new SqlBulkCopy(connection.ConnectionString, flagEnum)) { sqlBulkCopy.BulkCopyTimeout = options.CommandTimeoutSeconds; sqlBulkCopy.DestinationTableName = input.TableName; await sqlBulkCopy.WriteToServerAsync(dataset.Tables[0], cancellationToken).ConfigureAwait(false); return(sqlBulkCopy.RowsCopiedCount()); } } using (var transaction = options.SqlTransactionIsolationLevel == SqlTransactionIsolationLevel.Default ? connection.BeginTransaction() : connection.BeginTransaction(options.SqlTransactionIsolationLevel.GetSqlTransactionIsolationLevel())) { int rowsCopyCount; using (var sqlBulkCopy = new SqlBulkCopy(connection, flagEnum, transaction)) { sqlBulkCopy.BulkCopyTimeout = options.CommandTimeoutSeconds; sqlBulkCopy.DestinationTableName = input.TableName; await sqlBulkCopy.WriteToServerAsync(dataset.Tables[0], cancellationToken).ConfigureAwait(false); rowsCopyCount = sqlBulkCopy.RowsCopiedCount(); } transaction.Commit(); return(rowsCopyCount); } } }
private async Task <DatabaseStatusViewModel> ProcessDataFile(string filePath) { var startTime = DateTime.Now; if (filePath == null || !File.Exists(filePath)) { return new DatabaseStatusViewModel() { Message = "File does not exist or No file was uploaded" } } ; var status = new DatabaseStatusViewModel() { Success = false, RecordsInFile = 0, RecordsLoaded = 0, }; var config = new CsvConfiguration() { IsHeaderCaseSensitive = false, WillThrowOnMissingField = false, IgnoreReadingExceptions = true, ThrowOnBadData = false, SkipEmptyRecords = true, }; var csv = new CsvReader(new StreamReader(filePath, Encoding.Default, true), config); csv.Configuration.RegisterClassMap <CsvMap>(); var csvTaxRecords = csv.GetRecords <CsvTaxRecordViewModel>().ToList(); var csvConstituents = csvTaxRecords.DistinctBy(m => m.LookupId).AsQueryable().ProjectTo <ConstituentViewModel>().ToList(); var dbConstituents = db.Constituents.ProjectTo <ConstituentViewModel>().ToList(); var newConstituentList = csvConstituents.Except(dbConstituents, new ConstituentIdComparer()).ToList(); var existingConstituentList = csvConstituents.Except(newConstituentList, new ConstituentIdComparer()); var constituentChangeList = existingConstituentList.Except(dbConstituents, new ConstituentComparer()); // Update existing constituents that differ from database foreach (var vm in constituentChangeList) { ConstituentViewModel cvm = dbConstituents.FirstOrDefault(x => x.LookupId == vm.LookupId); if (cvm == null) { continue; } vm.Id = cvm.Id; vm.UpdatedBy = "system"; vm.UpdatedDate = DateTime.Now; cvm.CopyPropertiesFrom(vm); var constituent = Mapper.Map <ConstituentViewModel, Constituent>(cvm); db.Constituents.AddOrUpdate(constituent); } status.ConstituentsUpdated = db.SaveChanges(); // Add new Constituents missing from database // Bulk copy new Constituent records if (newConstituentList.Count > 0) { foreach (var vm in newConstituentList) { vm.CreatedBy = "system"; vm.UpdatedBy = "system"; vm.CreatedDate = DateTime.Now; vm.UpdatedDate = DateTime.Now; } var missingTbl = newConstituentList.ToDataTable(); using (var sbc = new SqlBulkCopy(db.Database.Connection.ConnectionString)) { sbc.DestinationTableName = db.GetTableName <Constituent>(); sbc.BatchSize = 10000; sbc.BulkCopyTimeout = 0; foreach (var col in missingTbl.Columns) { sbc.ColumnMappings.Add(col.ToString(), col.ToString()); } try { await sbc.WriteToServerAsync(missingTbl); status.ConstituentsCreated = sbc.RowsCopiedCount(); } catch (Exception e) { status.Message = e.Message; } } } // Update constituents because of new bulk copy constituents //TODO: Change Created and Updated user to logged in user dbConstituents = db.Constituents.ProjectTo <ConstituentViewModel>().ToList(); // Build dictionary to map database key to csv records LookupId var dic = new Dictionary <int, string>(); dbConstituents.ForEach(x => dic.Add(x.Id, x.LookupId)); // Update parent key for each tax record //csvTaxRecords.ForEach(x => x.ConstituentId = dic.FirstOrDefault(d => d.Value == x.LookupId).Key); csvTaxRecords.ForEach((s) => { s.ConstituentId = dic.FirstOrDefault(d => d.Value == s.LookupId).Key; s.CreatedBy = "system"; s.UpdatedBy = "system"; s.CreatedDate = DateTime.Now; s.UpdatedDate = DateTime.Now; }); // Bulk insert new tax records using (var sbc = new SqlBulkCopy(db.Database.Connection.ConnectionString)) { sbc.DestinationTableName = db.GetTableName <TaxItem>(); sbc.BatchSize = 10000; sbc.BulkCopyTimeout = 0; var dt = Mapper.Map <List <CsvTaxRecordViewModel>, List <TaxItem> >(csvTaxRecords).ToDataTable(); foreach (var col in dt.Columns) { sbc.ColumnMappings.Add(col.ToString(), col.ToString()); } try { await sbc.WriteToServerAsync(dt); status.RecordsLoaded = sbc.RowsCopiedCount(); } catch (Exception ex) { status.Message = ex.Message; } } status.RecordsInFile = csvTaxRecords.Count; status.Success = true; if (csvTaxRecords.Count != csv.Row - 2) { status.Message = "Error in file header mappings. Check file headers and try again."; status.Success = false; } else { status.Success = true; status.Message = "Successfully loaded tax records."; } status.TotalTime = DateTime.Now.Subtract(startTime).ToString(@"hh\:mm\:ss"); csv.Dispose(); return(status); } }
/// <summary> /// Executes the specified arguments. /// </summary> /// <typeparam name="TEntry">The type of the entry.</typeparam> /// <param name="arguments">The arguments.</param> /// <exception cref="System.InvalidOperationException">No active database connection.</exception> private void Execute <TEntry>(ExecutionArguments <TEntry> arguments) { ///// // Argument checks. ///// if (arguments == null || arguments.Entries == null || arguments.GetColumnsAction == null || string.IsNullOrEmpty(arguments.TableName) || arguments.PopulateRowAction == null || (string.IsNullOrEmpty(arguments.CommandText) && !arguments.SkipCommandExec)) { return; } IList <TEntry> entryList = arguments.Entries as IList <TEntry> ?? arguments.Entries.ToList( ); ///// // Early out. ///// if (entryList.Count <= 0) { return; } var connection = DatabaseContext.GetUnderlyingConnection( ) as SqlConnection; if (connection == null) { throw new InvalidOperationException("No active database connection."); } var table = new DataTable( ); ///// // Populate the columns. ///// table.Columns.AddRange(arguments.GetColumnsAction( )); ///// // Populate the rows. ///// foreach (TEntry entry in entryList) { DataRow row = table.NewRow( ); if (arguments.PopulateRowAction(entry, row) != PopulateRowResult.Success) { continue; } table.Rows.Add(row); } if (table.Rows.Count > 0 && !arguments.SkipCommandExec) { using (IDbCommand command = CreateCommand( )) { ///// // Clear existing data. ///// command.CommandText = string.Format("TRUNCATE TABLE #{0}", arguments.TableName); command.CommandType = CommandType.Text; command.ExecuteNonQuery( ); int rowsCopied; ///// // Bulk load into the staging table. ///// using (var bulkCopy = new SqlBulkCopy(connection)) { bulkCopy.BulkCopyTimeout = 600; bulkCopy.NotifyAfter = 100; if (arguments.Context != null) { bulkCopy.SqlRowsCopied += (sender, args) => arguments.Context.WriteProgress(string.Format("{0} {1} data... {2} rows", arguments.ExecuteAction, arguments.TableName, args.RowsCopied)); } bulkCopy.DestinationTableName = string.Format("#{0}", arguments.TableName); bulkCopy.WriteToServer(table); rowsCopied = bulkCopy.RowsCopiedCount( ); } if (arguments.SetCopiedCountAction != null) { arguments.SetCopiedCountAction(rowsCopied); } command.CommandText = arguments.CommandText; command.CommandType = CommandType.Text; ///// // Additional command setup. ///// if (arguments.SetupCommandAction != null) { arguments.SetupCommandAction(command); } if (arguments.Context != null) { arguments.Context.WriteInfo(string.Format("Committing {0} data...", arguments.TableName)); } int executeRows = arguments.CustomCommandExecuteAction != null?arguments.CustomCommandExecuteAction(command) : command.ExecuteNonQuery( ); if (arguments.SetExecuteCountAction != null) { arguments.SetExecuteCountAction(executeRows); } } } }
private void ExportDataToSQLServer(string connStr, string schemaName, bool truncateTables) { var metadataPane = this.Document.MetadataPane; var cancellationTokenSource = new CancellationTokenSource(); SqlConnectionStringBuilder builder; try { builder = new SqlConnectionStringBuilder(connStr); } catch (ArgumentException ex) { // wrap this exception and include the connection string that we could not parse throw new ArgumentException($"Error parsing connections string: {connStr} - {ex.Message}", ex); } builder.ApplicationName = "DAX Studio Table Export"; currentTableIdx = 0; var selectedTables = Tables.Where(t => t.IsSelected); totalTableCnt = selectedTables.Count(); var connRead = Document.Connection; // no tables were selected so exit here if (totalTableCnt == 0) { return; } // TODO: Use async but to be well done need to apply async on the DBCommand & DBConnection // TODO: Show warning message? if (metadataPane.SelectedModel == null) { return; } try { Document.QueryStopWatch.Start(); using (var conn = new SqlConnection(builder.ToString())) { conn.Open(); foreach (var table in selectedTables) { try { EventAggregator.PublishOnUIThread(new ExportStatusUpdateEvent(table)); currentTable = table; currentTable.Status = ExportStatus.Exporting; currentTableIdx++; var daxRowCount = $"EVALUATE ROW(\"RowCount\", COUNTROWS( {table.DaxName} ) )"; // get a count of the total rows in the table DataTable dtRows = connRead.ExecuteDaxQueryDataTable(daxRowCount); var totalRows = dtRows.Rows[0].Field <long>(0); currentTable.TotalRows = totalRows; using (var statusMsg = new StatusBarMessage(Document, $"Exporting {table.Caption}")) { for (long batchRows = 0; batchRows < totalRows; batchRows += maxBatchSize) { var daxQuery = $"EVALUATE {table.DaxName}"; // if the connection supports TOPNSKIP then use that to query batches of rows if (connRead.AllFunctions.Contains("TOPNSKIP")) { daxQuery = $"EVALUATE TOPNSKIP({maxBatchSize}, {batchRows}, {table.DaxName} )"; } using (var reader = connRead.ExecuteReader(daxQuery)) { sqlTableName = $"[{schemaName}].[{table.Caption}]"; sqlBatchRows = batchRows; // if this is the first batch ensure the table exists if (batchRows == 0) { EnsureSQLTableExists(conn, sqlTableName, reader); } using (var transaction = conn.BeginTransaction()) { if (truncateTables && batchRows == 0) { using (var cmd = new SqlCommand($"truncate table {sqlTableName}", conn)) { cmd.Transaction = transaction; cmd.ExecuteNonQuery(); } } var sqlBulkCopy = new SqlBulkCopy(conn, SqlBulkCopyOptions.TableLock, transaction); //)//, transaction)) sqlBulkCopy.DestinationTableName = sqlTableName; sqlBulkCopy.BatchSize = 5000; sqlBulkCopy.NotifyAfter = 5000; sqlBulkCopy.SqlRowsCopied += SqlBulkCopy_SqlRowsCopied; sqlBulkCopy.EnableStreaming = true; var task = sqlBulkCopy.WriteToServerAsync(reader, cancellationTokenSource.Token); WaitForTaskPollingForCancellation(cancellationTokenSource, task); // update the currentTable with the final rowcount currentTable.RowCount = sqlBulkCopy.RowsCopiedCount() + batchRows; if (CancelRequested) { transaction.Rollback(); currentTable.Status = ExportStatus.Cancelled; } else { transaction.Commit(); if (currentTable.RowCount >= currentTable.TotalRows) { currentTable.Status = ExportStatus.Done; } } } // end transaction } // end using reader // exit the loop here if the connection does not support TOPNSKIP if (!connRead.AllFunctions.Contains("TOPNSKIP")) { break; } } // end rowBatch } // jump out of table loop if we have been cancelled if (CancelRequested) { EventAggregator.PublishOnUIThread(new OutputMessage(MessageType.Warning, "Data Export Cancelled")); // mark an tables not yet exported as skipped MarkWaitingTablesAsSkipped(); break; } EventAggregator.PublishOnUIThread(new OutputMessage(MessageType.Information, exportTableMsg.Format(table.RowCount, table.RowCount == 1?"":"s", sqlTableName))); currentTable.Status = ExportStatus.Done; } catch (Exception ex) { currentTable.Status = ExportStatus.Error; Log.Error(ex, "{class} {method} {message}", nameof(ExportDataWizardViewModel), nameof(ExportDataToSQLServer), ex.Message); EventAggregator.PublishOnUIThread(new OutputMessage(MessageType.Error, $"Error exporting data to SQL Server Table: {ex.Message}")); EventAggregator.PublishOnUIThread(new ExportStatusUpdateEvent(currentTable, true)); continue; // skip to next table on error } } // end foreach table } Document.QueryStopWatch.Stop(); EventAggregator.PublishOnUIThread(new OutputMessage(MessageType.Information, exportCompleteMsg.Format(currentTableIdx), Document.QueryStopWatch.ElapsedMilliseconds)); EventAggregator.PublishOnUIThread(new ExportStatusUpdateEvent(currentTable, true)); Document.QueryStopWatch.Reset(); } catch (Exception ex) { Document.QueryStopWatch.Stop(); if (currentTable == null && totalTableCnt > 0) { currentTable = selectedTables.FirstOrDefault(); } if (currentTable != null) { currentTable.Status = ExportStatus.Error; } Log.Error(ex, "{class} {method} {message}", nameof(ExportDataWizardViewModel), nameof(ExportDataToSQLServer), ex.Message); EventAggregator.PublishOnUIThread(new OutputMessage(MessageType.Error, $"Error exporting data to SQL Server: {ex.Message}")); EventAggregator.PublishOnUIThread(new ExportStatusUpdateEvent(currentTable, true)); } }
/// <summary> /// Executes the specified entries. /// </summary> /// <typeparam name="TEntry">The type of the entry.</typeparam> /// <param name="arguments">The arguments.</param> /// <exception cref="System.InvalidOperationException">No active database connection.</exception> private void Execute <TEntry>(LibraryAppTargetExecutionArguments <TEntry> arguments) { ///// // Argument checks. ///// if (arguments == null || arguments.Entries == null || arguments.GetColumnsAction == null || string.IsNullOrEmpty(arguments.TableName) || arguments.PopulateRowAction == null) { return; } IList <TEntry> entryList = arguments.Entries as IList <TEntry> ?? arguments.Entries.ToList( ); ///// // Early out. ///// if (entryList.Count <= 0) { if (arguments.SetCopiedCountAction != null) { arguments.SetCopiedCountAction(0); } return; } var connection = DatabaseContext.GetUnderlyingConnection( ) as SqlConnection; if (connection == null) { throw new InvalidOperationException("No active database connection."); } var table = new DataTable( ); ///// // Populate the columns. ///// table.Columns.AddRange(arguments.GetColumnsAction( )); ///// // Populate the rows. ///// foreach (TEntry entry in entryList) { DataRow row = table.NewRow( ); if (arguments.PopulateRowAction(entry, row) != PopulateRowResult.Success) { continue; } table.Rows.Add(row); } using (IDbCommand command = CreateCommand( )) { if (arguments.ClearExistingData) { ///// // Clear existing data. ///// command.CommandText = string.Format("DELETE FROM {0} WHERE AppVerUid = @appVer", arguments.TableName); command.AddParameterWithValue("@appVer", ApplicationVersionId); command.CommandType = CommandType.Text; command.ExecuteNonQuery( ); } int rowsCopied = 0; if (table.Rows.Count > 0) { ///// // Bulk load into the staging table. ///// using (var bulkCopy = new SqlBulkCopy(connection)) { bulkCopy.BulkCopyTimeout = 600; bulkCopy.NotifyAfter = 100; string sanitizedTableName = arguments.TableName.Replace("#", "").Replace("App", "").Replace("Data", "").Replace("_", ""); if (arguments.Context != null) { bulkCopy.SqlRowsCopied += (sender, args) => arguments.Context.WriteProgress(string.Format("Copying {0} data... {1} rows", sanitizedTableName, args.RowsCopied)); } bulkCopy.DestinationTableName = string.Format("{0}", arguments.TableName); bulkCopy.WriteToServer(table); rowsCopied = bulkCopy.RowsCopiedCount( ); } } if (arguments.SetCopiedCountAction != null) { arguments.SetCopiedCountAction(rowsCopied); } if (arguments.CustomCommandExecuteAction != null) { arguments.CustomCommandExecuteAction(command); } } }
static void Main(string[] args) { bool singletable = false; char csvSeperatorChar = '\t'; Console.WriteLine("HolyOne csv 2 mssql importer, by Aytek Ustundag, www.tahribat.com"); if (args.Length < 2) { Console.WriteLine("USAGE:"); Console.WriteLine("csv2mssql.exe <ConnectionString> <Filename>"); Console.WriteLine("EXAMPLE:"); Console.WriteLine(@"csv2mssql.exe ""Data Source=(local);Initial Catalog=dbname;Integrated Security=SSPI"" ""data.csv"""); Console.WriteLine(@"csv2mssql.exe ""Data Source=(local);Initial Catalog=dbname;Integrated Security=SSPI"" ""excelfile.xlsx"""); Console.WriteLine(""); return; } string filename = args[1]; string connstr = args[0]; if (args.Length >= 3) { singletable = args[2].Equals("/single", StringComparison.InvariantCultureIgnoreCase); } if (!System.IO.File.Exists(filename)) { Console.WriteLine(@"Input file ""{0}"" not found", filename); return; } //string baglantiCumlesi = "Provider=Microsoft.ACE.OLEDB.12.0;Data Source=|DataDirectory|\\Database1.accdb;Persist Security Info=False;"; // foreach (string filename in args) { //string filename = "x.csv"; /* string bulk_data_filename = "x.csv"; * StreamReader file = new StreamReader(bulk_data_filename); * CsvReader csv = new CsvReader(file, true, ','); * SqlBulkCopy copy = new SqlBulkCopy(conn); * copy.DestinationTableName = System.IO.Path.GetFileNameWithoutExtension(bulk_data_filename); * copy.WriteToServer(csv); */ string tablename = System.IO.Path.GetFileNameWithoutExtension(filename); string ext = System.IO.Path.GetExtension(filename); List <DataTable> dts = new List <DataTable>(); Ftype mode = Ftype.csv; if (ext.Equals(".xls", StringComparison.InvariantCultureIgnoreCase) || ext.Equals(".xlsx", StringComparison.InvariantCultureIgnoreCase)) { mode = Ftype.xls; dts = exceldata(filename, singletable); } else if (ext.Equals(".accdb", StringComparison.InvariantCultureIgnoreCase) || ext.Equals(".mdb", StringComparison.InvariantCultureIgnoreCase)) { mode = Ftype.mdb; dts = access(filename); } else { //csv mode using (var csvStreamReader = new StreamReader(filename)) using (CsvReader csvReader = new CsvReader(csvStreamReader, true)) { DataTable dt = new DataTable(tablename); int tmpcnt = 0; string myline = ""; while (tmpcnt < 100) { myline = csvStreamReader.ReadLine(); if (myline == null) { break; } if (tmpcnt == 0) {//first line Dictionary <char, int> charcounter = new Dictionary <char, int>(); foreach (char cx in sepchars) { int chcount = myline.Count(f => f == cx); charcounter.Add(cx, chcount); } charcounter = charcounter.OrderByDescending(x => x.Value).ToDictionary(x => x.Key, x => x.Value); csvSeperatorChar = charcounter.First().Key; Console.WriteLine("Resolving seperator char:" + ((csvSeperatorChar == '\t')?"<TAB>":(csvSeperatorChar.ToString()))); } tmpcnt++; string[] cells = myline.Replace(@"""", "").Split(new char[] { csvSeperatorChar }); while (dt.Columns.Count < cells.Length) { dt.Columns.Add(cells[dt.Columns.Count]); } dt.Rows.Add(cells); } // dt.Load(csvReader); dts.Add(dt); } } if (dts.Count == 0) { Console.WriteLine("No data table found to import"); return; } foreach (DataTable dt in dts) { string str = BuildCreateTableScript(dt); SqlConnection conn = new SqlConnection(connstr); conn.Open(); try { using (SqlCommand cmd = new SqlCommand(str, conn)) { cmd.ExecuteNonQuery(); } } catch (Exception exx) { Console.WriteLine("\tWarning:" + exx.Message + " ,Appending..."); } DataTable dtexcelSingle = new DataTable(tablename); SqlTransaction transaction = conn.BeginTransaction(); try { int batchsize = 0; Console.WriteLine("Importing table {0}", dt.TableName); if (mode == Ftype.csv) { using (StreamReader file = new StreamReader(filename)) { using (CsvReader csv = new CsvReader(file, true, csvSeperatorChar)) // using (CsvReader csv = new CsvReader(file, true, csvSeperatorChar,'\0','\0','#', ValueTrimmingOptions.None)) { csv.SkipEmptyLines = true; csv.SupportsMultiline = true; csv.MissingFieldAction = MissingFieldAction.ReplaceByNull; // csv.DefaultParseErrorAction = ParseErrorAction.AdvanceToNextLine; SqlBulkCopy copy = new SqlBulkCopy(conn, SqlBulkCopyOptions.KeepIdentity, transaction); // SqlBulkCopy copy = new SqlBulkCopy(connstr, SqlBulkCopyOptions.KeepIdentity ); copy.BulkCopyTimeout = 9999999; copy.DestinationTableName = tablename; copy.WriteToServer(csv); batchsize = copy.RowsCopiedCount(); transaction.Commit(); } } } else { string sheet = dt.TableName; if (sheet.EndsWith("_")) { continue; } OleDbConnection oconn = GetOleConn(filename); // List<DataTable> dtt = new List<DataTable>(); // oconn.Open(); try { // foreach (DataRow schemaRow in schemaTable.Rows) { //Looping a first Sheet of Xl File // schemaRow = schemaTable.Rows[0]; // if (!sheet.EndsWith("_")) { string query = "SELECT * FROM [" + sheet + "]"; OleDbDataAdapter daexcel = new OleDbDataAdapter(query, oconn); /* DataTable targettable = null; * * if ( singletable) * { * targettable=(dtexcelSingle); * } * else * { * * DataTable dtexcel = new DataTable(); * dtexcel.Locale = CultureInfo.CurrentCulture; * targettable = dtexcel; * * * }*/ using (OleDbCommand cmd = new OleDbCommand(query, oconn)) { using (OleDbDataReader rdr = cmd.ExecuteReader()) { SqlBulkCopy copy = new SqlBulkCopy(conn, SqlBulkCopyOptions.KeepIdentity, transaction); // SqlBulkCopy copy = new SqlBulkCopy(connstr, SqlBulkCopyOptions.KeepIdentity ); copy.BulkCopyTimeout = 9999999; copy.DestinationTableName = dt.TableName; copy.WriteToServer(rdr); batchsize = copy.RowsCopiedCount(); transaction.Commit(); } } /* * if (!singletable) * { * dtt.Add(targettable); * }*/ } // if (assingle) dtt.Add(dtexcelSingle); } } finally { // oconn.Close(); } } Console.WriteLine("Finished inserting {0} records.", batchsize); } catch (Exception ex) { transaction.Rollback(); Console.WriteLine("Err:" + ex.Message); } finally { conn.Close(); } } } // Console.ReadKey(); }
private void ExportDataToSQLServer(string connStr, string schemaName, bool truncateTables) { var metadataPane = this.Document.MetadataPane; // TODO: Use async but to be well done need to apply async on the DBCommand & DBConnection // TODO: Show warning message? if (metadataPane.SelectedModel == null) { return; } Document.QueryStopWatch.Start(); using (var conn = new SqlConnection(connStr)) { conn.Open(); var currentTableIdx = 0; var selectedTables = Tables.Where(t => t.IsSelected); totalTableCnt = selectedTables.Count(); foreach (var table in selectedTables) { try { EventAggregator.PublishOnUIThread(new ExportStatusUpdateEvent(table)); currentTable = table; currentTable.Status = ExportStatus.Exporting; currentTableIdx++; var daxQuery = $"EVALUATE {table.DaxName}"; using (var statusMsg = new StatusBarMessage(Document, $"Exporting {table.Caption}")) using (var reader = metadataPane.Connection.ExecuteReader(daxQuery)) { sqlTableName = $"[{schemaName}].[{table.Caption}]"; EnsureSQLTableExists(conn, sqlTableName, reader); using (var transaction = conn.BeginTransaction()) { if (truncateTables) { using (var cmd = new SqlCommand($"truncate table {sqlTableName}", conn)) { cmd.Transaction = transaction; cmd.ExecuteNonQuery(); } } using (var sqlBulkCopy = new SqlBulkCopy(conn, SqlBulkCopyOptions.TableLock, transaction)) { sqlBulkCopy.DestinationTableName = sqlTableName; sqlBulkCopy.BatchSize = 5000; sqlBulkCopy.NotifyAfter = 5000; sqlBulkCopy.SqlRowsCopied += SqlBulkCopy_SqlRowsCopied; sqlBulkCopy.EnableStreaming = true; sqlBulkCopy.WriteToServer(reader); currentTable.RowCount = sqlBulkCopy.RowsCopiedCount(); } transaction.Commit(); currentTable.Status = ExportStatus.Done; } } // jump out of table loop if we have been cancelled if (CancelRequested) { EventAggregator.PublishOnUIThread(new OutputMessage(MessageType.Warning, "Data Export Cancelled")); break; } EventAggregator.PublishOnUIThread(new OutputMessage(MessageType.Information, $"Exported {table.Caption} to {sqlTableName}")); currentTable.Status = ExportStatus.Done; } catch (Exception ex) { currentTable.Status = ExportStatus.Error; Log.Error(ex, "{class} {method} {message}", "ExportDataWizardViewModel", "ExportDataToSQLServer", ex.Message); EventAggregator.PublishOnUIThread(new OutputMessage(MessageType.Error, $"Error exporting data to SQL Server: {ex.Message}")); EventAggregator.PublishOnUIThread(new ExportStatusUpdateEvent(currentTable, true)); continue; // skip to next table on error } } } Document.QueryStopWatch.Stop(); EventAggregator.PublishOnUIThread(new OutputMessage(MessageType.Information, $"Model Export Complete: {currentTableIdx} tables exported", Document.QueryStopWatch.ElapsedMilliseconds)); EventAggregator.PublishOnUIThread(new ExportStatusUpdateEvent(currentTable, true)); Document.QueryStopWatch.Reset(); }
public static void ExportDataToSQLServer(string connStr, string schemaName, bool truncateTables, bool dropTables) { var cancellationTokenSource = new CancellationTokenSource(); SqlConnectionStringBuilder builder; try { builder = new SqlConnectionStringBuilder(connStr); } catch (ArgumentException ex) { // wrap this exception and include the connection string that we could not parse throw new ArgumentException($"Error parsing connections string: {connStr} - {ex.Message}", ex); } // Load Tables from Excel to List LoadExcelTableToDotnetDataTable(); if (SelectedTbls.Count == 0) { // if no table selected exit return; } try { using (var conn = new SqlConnection(builder.ToString())) { conn.Open(); foreach (var table in SelectedTbls) { try { using (var reader = new DataTableReader(table)) { string sqlTableName = $"[{schemaName}].[{table.TableName}]"; EnsureSQLTableExists(conn, sqlTableName, reader, dropTables); using (var transaction = conn.BeginTransaction()) { if (truncateTables) { using (var cmd = new SqlCommand($"truncate table {sqlTableName}", conn)) { cmd.Transaction = transaction; cmd.ExecuteNonQuery(); } } var sqlBulkCopy = new SqlBulkCopy(conn, SqlBulkCopyOptions.TableLock, transaction); //)//, transaction)) sqlBulkCopy.DestinationTableName = sqlTableName; sqlBulkCopy.BatchSize = 10000; sqlBulkCopy.EnableStreaming = true; //sqlBulkCopy.NotifyAfter = 5000; //sqlBulkCopy.SqlRowsCopied += SqlBulkCopy_SqlRowsCopied; sqlBulkCopy.WriteToServerAsync(reader); transaction.Commit(); MessageBox.Show(table.TableName + ": " + sqlBulkCopy.RowsCopiedCount().ToString() + " rows copied", "Notice"); //var task = sqlBulkCopy.WriteToServerAsync(reader, cancellationTokenSource.Token); //WaitForTaskPollingForCancellation(cancellationTokenSource, task); // update the currentTable with the final rowcount //currentTable.RowCount = sqlBulkCopy.RowsCopiedCount(); //if (CancelRequested) //{ // transaction.Rollback(); // //currentTable.Status = ExportStatus.Cancelled; //} //else //{ // transaction.Commit(); // //currentTable.Status = ExportStatus.Done; //} } } // jump out of table loop if we have been cancelled // if (CancelRequested) // { // break; // } } catch (Exception ex) { MessageBox.Show(ex.Message); continue; // skip to next table on error } } } } catch { } }