private void ExportDataToSQLServer(string connStr, string schemaName, bool truncateTables) { var metadataPane = this.Document.MetadataPane; var cancellationTokenSource = new CancellationTokenSource(); SqlConnectionStringBuilder builder; try { builder = new SqlConnectionStringBuilder(connStr); } catch (ArgumentException ex) { // wrap this exception and include the connection string that we could not parse throw new ArgumentException($"Error parsing connections string: {connStr} - {ex.Message}", ex); } builder.ApplicationName = "DAX Studio Table Export"; currentTableIdx = 0; var selectedTables = Tables.Where(t => t.IsSelected); totalTableCnt = selectedTables.Count(); var connRead = Document.Connection; // no tables were selected so exit here if (totalTableCnt == 0) { return; } // TODO: Use async but to be well done need to apply async on the DBCommand & DBConnection // TODO: Show warning message? if (metadataPane.SelectedModel == null) { return; } try { Document.QueryStopWatch.Start(); using (var conn = new SqlConnection(builder.ToString())) { conn.Open(); foreach (var table in selectedTables) { try { EventAggregator.PublishOnUIThread(new ExportStatusUpdateEvent(table)); currentTable = table; currentTable.Status = ExportStatus.Exporting; currentTableIdx++; var daxRowCount = $"EVALUATE ROW(\"RowCount\", COUNTROWS( {table.DaxName} ) )"; // get a count of the total rows in the table DataTable dtRows = connRead.ExecuteDaxQueryDataTable(daxRowCount); var totalRows = dtRows.Rows[0].Field <long>(0); currentTable.TotalRows = totalRows; using (var statusMsg = new StatusBarMessage(Document, $"Exporting {table.Caption}")) { for (long batchRows = 0; batchRows < totalRows; batchRows += maxBatchSize) { var daxQuery = $"EVALUATE {table.DaxName}"; // if the connection supports TOPNSKIP then use that to query batches of rows if (connRead.AllFunctions.Contains("TOPNSKIP")) { daxQuery = $"EVALUATE TOPNSKIP({maxBatchSize}, {batchRows}, {table.DaxName} )"; } using (var reader = connRead.ExecuteReader(daxQuery)) { sqlTableName = $"[{schemaName}].[{table.Caption}]"; sqlBatchRows = batchRows; // if this is the first batch ensure the table exists if (batchRows == 0) { EnsureSQLTableExists(conn, sqlTableName, reader); } using (var transaction = conn.BeginTransaction()) { if (truncateTables && batchRows == 0) { using (var cmd = new SqlCommand($"truncate table {sqlTableName}", conn)) { cmd.Transaction = transaction; cmd.ExecuteNonQuery(); } } var sqlBulkCopy = new SqlBulkCopy(conn, SqlBulkCopyOptions.TableLock, transaction); //)//, transaction)) sqlBulkCopy.DestinationTableName = sqlTableName; sqlBulkCopy.BatchSize = 5000; sqlBulkCopy.NotifyAfter = 5000; sqlBulkCopy.SqlRowsCopied += SqlBulkCopy_SqlRowsCopied; sqlBulkCopy.EnableStreaming = true; var task = sqlBulkCopy.WriteToServerAsync(reader, cancellationTokenSource.Token); WaitForTaskPollingForCancellation(cancellationTokenSource, task); // update the currentTable with the final rowcount currentTable.RowCount = sqlBulkCopy.RowsCopiedCount() + batchRows; if (CancelRequested) { transaction.Rollback(); currentTable.Status = ExportStatus.Cancelled; } else { transaction.Commit(); if (currentTable.RowCount >= currentTable.TotalRows) { currentTable.Status = ExportStatus.Done; } } } // end transaction } // end using reader // exit the loop here if the connection does not support TOPNSKIP if (!connRead.AllFunctions.Contains("TOPNSKIP")) { break; } } // end rowBatch } // jump out of table loop if we have been cancelled if (CancelRequested) { EventAggregator.PublishOnUIThread(new OutputMessage(MessageType.Warning, "Data Export Cancelled")); // mark an tables not yet exported as skipped MarkWaitingTablesAsSkipped(); break; } EventAggregator.PublishOnUIThread(new OutputMessage(MessageType.Information, exportTableMsg.Format(table.RowCount, table.RowCount == 1?"":"s", sqlTableName))); currentTable.Status = ExportStatus.Done; } catch (Exception ex) { currentTable.Status = ExportStatus.Error; Log.Error(ex, "{class} {method} {message}", nameof(ExportDataWizardViewModel), nameof(ExportDataToSQLServer), ex.Message); EventAggregator.PublishOnUIThread(new OutputMessage(MessageType.Error, $"Error exporting data to SQL Server Table: {ex.Message}")); EventAggregator.PublishOnUIThread(new ExportStatusUpdateEvent(currentTable, true)); continue; // skip to next table on error } } // end foreach table } Document.QueryStopWatch.Stop(); EventAggregator.PublishOnUIThread(new OutputMessage(MessageType.Information, exportCompleteMsg.Format(currentTableIdx), Document.QueryStopWatch.ElapsedMilliseconds)); EventAggregator.PublishOnUIThread(new ExportStatusUpdateEvent(currentTable, true)); Document.QueryStopWatch.Reset(); } catch (Exception ex) { Document.QueryStopWatch.Stop(); if (currentTable == null && totalTableCnt > 0) { currentTable = selectedTables.FirstOrDefault(); } if (currentTable != null) { currentTable.Status = ExportStatus.Error; } Log.Error(ex, "{class} {method} {message}", nameof(ExportDataWizardViewModel), nameof(ExportDataToSQLServer), ex.Message); EventAggregator.PublishOnUIThread(new OutputMessage(MessageType.Error, $"Error exporting data to SQL Server: {ex.Message}")); EventAggregator.PublishOnUIThread(new ExportStatusUpdateEvent(currentTable, true)); } }
private void ExportDataToSQLServer(string connStr, string schemaName, bool truncateTables) { var metadataPane = this.Document.MetadataPane; // TODO: Use async but to be well done need to apply async on the DBCommand & DBConnection // TODO: Show warning message? if (metadataPane.SelectedModel == null) { return; } Document.QueryStopWatch.Start(); using (var conn = new SqlConnection(connStr)) { conn.Open(); var currentTableIdx = 0; var selectedTables = Tables.Where(t => t.IsSelected); totalTableCnt = selectedTables.Count(); foreach (var table in selectedTables) { try { EventAggregator.PublishOnUIThread(new ExportStatusUpdateEvent(table)); currentTable = table; currentTable.Status = ExportStatus.Exporting; currentTableIdx++; var daxQuery = $"EVALUATE {table.DaxName}"; using (var statusMsg = new StatusBarMessage(Document, $"Exporting {table.Caption}")) using (var reader = metadataPane.Connection.ExecuteReader(daxQuery)) { sqlTableName = $"[{schemaName}].[{table.Caption}]"; EnsureSQLTableExists(conn, sqlTableName, reader); using (var transaction = conn.BeginTransaction()) { if (truncateTables) { using (var cmd = new SqlCommand($"truncate table {sqlTableName}", conn)) { cmd.Transaction = transaction; cmd.ExecuteNonQuery(); } } using (var sqlBulkCopy = new SqlBulkCopy(conn, SqlBulkCopyOptions.TableLock, transaction)) { sqlBulkCopy.DestinationTableName = sqlTableName; sqlBulkCopy.BatchSize = 5000; sqlBulkCopy.NotifyAfter = 5000; sqlBulkCopy.SqlRowsCopied += SqlBulkCopy_SqlRowsCopied; sqlBulkCopy.EnableStreaming = true; sqlBulkCopy.WriteToServer(reader); currentTable.RowCount = sqlBulkCopy.RowsCopiedCount(); } transaction.Commit(); currentTable.Status = ExportStatus.Done; } } // jump out of table loop if we have been cancelled if (CancelRequested) { EventAggregator.PublishOnUIThread(new OutputMessage(MessageType.Warning, "Data Export Cancelled")); break; } EventAggregator.PublishOnUIThread(new OutputMessage(MessageType.Information, $"Exported {table.Caption} to {sqlTableName}")); currentTable.Status = ExportStatus.Done; } catch (Exception ex) { currentTable.Status = ExportStatus.Error; Log.Error(ex, "{class} {method} {message}", "ExportDataWizardViewModel", "ExportDataToSQLServer", ex.Message); EventAggregator.PublishOnUIThread(new OutputMessage(MessageType.Error, $"Error exporting data to SQL Server: {ex.Message}")); EventAggregator.PublishOnUIThread(new ExportStatusUpdateEvent(currentTable, true)); continue; // skip to next table on error } } } Document.QueryStopWatch.Stop(); EventAggregator.PublishOnUIThread(new OutputMessage(MessageType.Information, $"Model Export Complete: {currentTableIdx} tables exported", Document.QueryStopWatch.ElapsedMilliseconds)); EventAggregator.PublishOnUIThread(new ExportStatusUpdateEvent(currentTable, true)); Document.QueryStopWatch.Reset(); }