/// <summary> /// Creates a new empty database on the server if one of that name doesn't already exist /// </summary> /// <param name="name">Name of the database to create</param> public void CreateDB(string name) { if (!_targetServer.Databases.Contains(name)) { var toCreate = new Microsoft.SqlServer.Management.Smo.Database(_targetServer, name); toCreate.Create(); } }
public void CreateDatabaseFromScriptFile( string dbName, string scriptFilePath, StringCollection assembliesToDeploy) { if (!File.Exists(scriptFilePath)) { throw new UpdateDatabaseException( string.Format( "Cannot create database, database script file {0} does not exist.", scriptFilePath)); } DropDatabase(dbName); using (var cn = new SqlConnection(mConnectionString)) { cn.Open(); var sc = new ServerConnection(cn); var srv = new Server(sc); var db = new Microsoft.SqlServer.Management.Smo.Database(srv, dbName); // // Set trustworthy options, if we have assemblies // db.DatabaseOptions.Trustworthy = true; db.Create(); foreach (string assemblyPath in assembliesToDeploy) { var assembly = new SqlAssembly(db, Path.GetFileNameWithoutExtension(assemblyPath)) { AssemblySecurityLevel = AssemblySecurityLevel.Unrestricted }; assembly.Create(Environment.ExpandEnvironmentVariables(assemblyPath)); } using (var sr = new StreamReader(scriptFilePath)) { var createDatabaseScript = sr.ReadToEnd(); db.ExecuteNonQuery(createDatabaseScript); } } }
private void Create() { this.LogTaskMessage(string.Format(CultureInfo.CurrentCulture, "Creating Database: {0}", this.DatabaseItem.ItemSpec)); if (this.CheckDatabaseExists()) { if (this.Force) { this.Delete(); } else { this.Log.LogError(string.Format(CultureInfo.CurrentCulture, "Database already exists: {0}. Set Force to true to delete an existing Database.", this.DatabaseItem.ItemSpec)); return; } } SMO.Database newDatabase = new SMO.Database(this.sqlServer, this.DatabaseItem.ItemSpec); if (this.DataFilePath != null) { FileGroup fileGroup = new FileGroup(newDatabase, this.FileGroupName); DataFile dataFile = new DataFile(fileGroup, this.DatabaseItem.ItemSpec, this.DataFilePath.GetMetadata("FullPath")); fileGroup.Files.Add(dataFile); newDatabase.FileGroups.Add(fileGroup); } if (this.LogFilePath != null) { if (string.IsNullOrEmpty(this.LogName)) { this.LogName = this.DatabaseItem.ItemSpec + "_log"; } LogFile logFile = new LogFile(newDatabase, this.LogName, this.LogFilePath.GetMetadata("FullPath")); newDatabase.LogFiles.Add(logFile); } if (!string.IsNullOrEmpty(this.Collation)) { newDatabase.Collation = this.Collation; } newDatabase.Create(); }
public void CreateIfNotExtists(string serverName, string userName, SecureString password, string databaseName) { var serverConnection = new ServerConnection(serverName, userName, password); var server = new Server(serverName); if (!server.Databases.Contains(databaseName)) { var database = new Microsoft.SqlServer.Management.Smo.Database(server, databaseName); database.Create(); var user = new User(database, userName); user.Create(); var databasePermissionSet = new DatabasePermissionSet(DatabasePermission.CreateType); databasePermissionSet.Add(DatabasePermission.CreateSchema); databasePermissionSet.Add(DatabasePermission.CreateTable); databasePermissionSet.Add(DatabasePermission.CreateFunction); databasePermissionSet.Add(DatabasePermission.CreateProcedure); database.Grant(databasePermissionSet, userName); } }
public bool CreateDB(string servername, string username, string password, string DBName) { try { var serverconnection = CreateServerConnection(servername, username, password); var srv = new Microsoft.SqlServer.Management.Smo.Server(serverconnection); //Define a Database object variable by supplying the server and the database name arguments in the constructor. Database db; db = new Microsoft.SqlServer.Management.Smo.Database(srv, DBName); //Create the database on the instance of SQL Server. db.Create(); serverconnection.Disconnect(); return(true); } catch (Exception) { return(false); throw; } }
public void bgwValidateConnection_DoWorkHandler(object sender, DoWorkEventArgs e) { try { List <CheckedListItem> backupTables = e.Argument as List <CheckedListItem>; Smo.Server srvDestination = new Smo.Server(Destination.ServerConnection); if (!srvDestination.Databases.Contains(Destination.Database)) { #region Create Database and Copy Table sechma Smo.Database newdb = new Smo.Database(srvDestination, Destination.Database); newdb.Create(); Smo.Server srvSource = new Smo.Server(Source.ServerConnection); Smo.Database dbSource = srvSource.Databases[Source.Database]; Smo.Transfer transfer = new Smo.Transfer(dbSource); transfer.CopyAllUsers = true; transfer.CopyAllObjects = true; transfer.CopyAllTables = false; transfer.CopyData = false; transfer.CopySchema = true; transfer.Options.WithDependencies = true; transfer.Options.DriAll = true; transfer.Options.ContinueScriptingOnError = false; // Create all table when database not exist foreach (var tbl in dbSource.Tables) { transfer.ObjectList.Add(tbl); } transfer.DestinationServer = Destination.Server; transfer.DestinationDatabase = newdb.Name; transfer.DestinationLoginSecure = Destination.LoginSecurity; if (!Destination.LoginSecurity) { transfer.DestinationLogin = Destination.UserId; transfer.DestinationPassword = Destination.Password; } transfer.TransferData(); #endregion #region Get Source Data and Filter DataSet ds = DbHelper.CopySechmaFromDatabase(Source.ConnectionString()); List <string> sortTables = DbHelper.Fill(Source.ConnectionString(), ds, backupTables.Select(b => b.Name).ToList()); if (ds.Tables["Jobs"].Rows.Count > 0) { ds.Tables["Jobs"].Rows.Cast <DataRow>().LastOrDefault().Delete(); // Always delete last record. ds.Tables["Jobs"].AcceptChanges(); if (IsDateFiltration) { ds.Tables["Jobs"].Rows.Cast <DataRow>().Where(j => (DateTime)j["Date"] < DateFrom || (DateTime)j["Date"] > DateTo).ToList().ForEach(j => j.Delete()); } foreach (var tbl in sortTables) { ds.Tables[tbl].AcceptChanges(); } } #endregion #region Execute SqlBulk Copy foreach (string tbl in sortTables) { DbHelper.ExecuteSqlBulk(Destination.ConnectionString(), ds.Tables[tbl]); this.Progress += 100 / backupTables.Count; if (DbHelper.SqlBulkLog.Count > 0) { this.Log += DbHelper.SqlBulkLog.LastOrDefault() + Environment.NewLine; } } #endregion } else { if (CheckVersion()) { // TODO: now todo here. #region Get Source Data and Filter date range DataSet ds = DbHelper.CopySechmaFromDatabase(Source.ConnectionString()); List <string> sortTables = DbHelper.Fill(Source.ConnectionString(), ds, backupTables.Select(b => b.Name).ToList()); if (ds.Tables["Jobs"].Rows.Count > 0) { ds.Tables["Jobs"].Rows.Cast <DataRow>().LastOrDefault().Delete(); // Always delete last record. ds.Tables["Jobs"].AcceptChanges(); if (IsDateFiltration) { ds.Tables["Jobs"].Rows.Cast <DataRow>().Where(j => (DateTime)j["Date"] < DateFrom || (DateTime)j["Date"] > DateTo).ToList().ForEach(j => j.Delete()); } foreach (var tbl in sortTables) { ds.Tables[tbl].AcceptChanges(); } } #endregion #region Get destination PK list of table exists and modify for merge // filter override table don't modify key List <string> overrideTable = backupTables.Where(b => b.IsOverride == true).Select(b => b.Name).ToList(); foreach (string tbl in sortTables) { if (!overrideTable.Contains(tbl)) { string keycolumn = DbHelper.PrimaryKeyColumn(Destination.ConnectionString(), tbl); string sql = string.Format("Select TOP 1 {0} From {1} Order By {0} DESC", keycolumn, tbl); int? lastkey = (int?)(DbHelper.ReadOne(Destination.ConnectionString(), sql)); if (lastkey != null) { int newkey = (int)lastkey + 1; int row = ds.Tables[tbl].Rows.Count; ds.Tables[tbl].Columns[keycolumn].ReadOnly = false; for (int i = row - 1; i >= 0; i--) { ds.Tables[tbl].Rows[i][keycolumn] = newkey + i; } ds.Tables[tbl].AcceptChanges(); } } } ds.AcceptChanges(); #endregion #region Delete override table data List <string> clearTable = backupTables.Where(b => b.IsOverride == true).Select(b => b.Name).ToList(); DeleteDestinationOverride(clearTable); #endregion #region Execute SqlBulk Copy foreach (string tbl in sortTables) { DbHelper.ExecuteSqlBulk(Destination.ConnectionString(), ds.Tables[tbl]); this.Progress += 100 / backupTables.Count; if (DbHelper.SqlBulkLog.Count > 0) { this.Log += DbHelper.SqlBulkLog.LastOrDefault() + Environment.NewLine; } } #endregion } else { this.Log += "*** Database upgrade failed ***" + Environment.NewLine; e.Result = false; } } } catch (Exception) { this.Log += "An error occurred during backup database." + Environment.NewLine; e.Result = false; } // DONE: 1. Check destination database exsits. // DONE: 2. If No date range, use transfer copy all database. // DONE: 2-1. If use date range, DataTable.Select(); filter Jobs key (klKey) and filter another table has FK by Jobs (fkJobKey, klJobKey) // DONE: 2-2. Use Sqlbulk copy datatable // DONE: 3. If YES Check db version // CheckVersion(); // DONE: 3-1. Source > Destination => upgrade scripts // DONE: 3-2. Source == Destination => Run step 4 for merge data. // DONE: 3-3. Source < Destination => false; alert message and block; // DONE: 4. Deal table releationship PK/FK to create DataSet & Datatable from Source. // DONE: 5. If table of ObservTable selected get record of Destination last PK int. // List<Record> Record.LastKey, Record.TableName, Record.PKColumnName // TODO: 6. DataSet.Fill(); get data and filter date range. // TODO: 7. Use Sqlbulk copy datatable. }
private ServiceResponse CreateDatabase(DatabaseInfo info, string databaseUserNamePublic, string databasePasswordPublic, string scriptResourceName, IEnumerable<string> scriptResourceNames, ScriptLoader scriptLoader, DatabaseCredentials creatorCredentials) { var creatorInfo = info.CloneWithNewCredentials(creatorCredentials); using (var database = new DatabaseManipulator(creatorInfo)) { try { smo.Server sqlServer = database.SqlServer; // Make sure SQL Server allows SQL logins if needed if (creatorInfo.HasUserName()) { if (database.LoginMode == smo.ServerLoginMode.Integrated) { _logger.Error("Database Server is not configured for mixed-mode authentication.", "Database Server", creatorInfo.ServerName); _response.AddError(string.Format(CultureInfo.InvariantCulture, "Database Server ({0}) is not configured for mixed-mode authentication.", creatorInfo.ServerName)); } } if (!_response.HasErrors) { if (!database.Exists()) { _logger.Info("Creating database.", "Database name", info.DatabaseName, "Database Server", info.ServerName); // Create new database var newDatabase = new smo.Database(sqlServer, info.DatabaseName); newDatabase.Create(); database.CreateLogin(info.UserName, info.Password, true); database.CreateLogin(databaseUserNamePublic, databasePasswordPublic, false); // I don't know why we have to dispose this here, but I was afraid to remove it database.Dispose(); UpdateDatabaseSchema(info, databaseUserNamePublic, scriptResourceName, scriptResourceNames, scriptLoader); _logger.Info("Database created successfully.", info.GenerateCustomLoggingProperties()); } else { _logger.Error("Database already exists.", info.GenerateCustomLoggingProperties()); _response.AddError(string.Format(CultureInfo.InvariantCulture, "Database ({0}) on server ({1}) already exists.", info.DatabaseName, info.ServerName)); } } } catch (Exception ex) { _logger.Error("Creation of Database failed.", ex, info.GenerateCustomLoggingProperties()); _response.AddError(string.Format(CultureInfo.InvariantCulture, "Creation of Database ({0}) on server ({1}) failed: {2}", info.DatabaseName, info.ServerName, ex.Message)); } } return _response; }
private void RunBackup() { // Flow 1 Destination Dataabase not exists. SqlConnectionStringBuilder builder = new SqlConnectionStringBuilder(_source_connstring); /* SOURCE Database */ string s_server = builder["Server"].ToString(); string s_username = builder["User ID"].ToString(); string s_pwd = builder["Password"].ToString(); string s_db = cmbSourceDatabases.SelectedItem.ToString(); /* DESTINATION Database */ builder = new SqlConnectionStringBuilder(_target_connstring); string d_server = builder["Server"].ToString(); string d_username = builder["User ID"].ToString(); string d_pwd = builder["Password"].ToString(); string d_db = txtBackupDatabaseName.Text.Trim(); ServerConnection conn; if (string.IsNullOrEmpty(s_username)) { conn = new ServerConnection(s_server); } else { conn = new ServerConnection(s_server, s_username, s_pwd); } Smo.Server source_srv = new Smo.Server(conn); Smo.Database source_db = source_srv.Databases[s_db]; Smo.Transfer transfer = new Smo.Transfer(source_db); transfer.CopyAllUsers = true; transfer.CreateTargetDatabase = false; transfer.CopyAllObjects = false; transfer.CopyAllTables = false; transfer.CopyData = true; // transfer.CopySchema = true; transfer.Options.WithDependencies = true; transfer.Options.DriAll = true; transfer.Options.ContinueScriptingOnError = false; foreach (var tbl in _tables.Where(x => x.IsChecked == true)) { transfer.ObjectList.Add(source_db.Tables[tbl.Name]); } //use following code if want to create destination databaes runtime ServerConnection d_conn; if (string.IsNullOrEmpty(d_username)) { d_conn = new ServerConnection(d_server); } else { d_conn = new ServerConnection(d_server, d_username, d_pwd); } Smo.Server destination_srv = new Smo.Server(d_conn); // When database not exists backup all if (!destination_srv.Databases.Contains(d_db)) { // transfer.CreateTargetDatabase = true; transfer.DestinationLoginSecure = false; transfer.DestinationServer = d_server; if (!string.IsNullOrEmpty(d_username) && !string.IsNullOrEmpty(d_pwd)) { transfer.DestinationLogin = d_username; transfer.DestinationPassword = d_pwd; } else { transfer.DestinationLoginSecure = true; } transfer.DestinationDatabase = d_db; Smo.Database newdb = new Smo.Database(destination_srv, d_db); newdb.Create(); if (!(bool)chkBackupDateRange.IsChecked) { transfer.ScriptTransfer(); transfer.TransferData(); } else { transfer.CopySchema = true; transfer.CopyData = false; transfer.ScriptTransfer(); transfer.TransferData(); // has data range using (TransactionScope scope = new TransactionScope(TransactionScopeOption.Suppress)) { // 大量寫入 // Solve Somethiing // http://msdn.microsoft.com/zh-tw/library/aa561924%28v=bts.10%29.aspx // http://www.died.tw/2009/04/msdtc.html // using (SqlConnection bulk_conn = new SqlConnection(this._target_connstring)) { // step 1 check target tables List<string> tableList = new List<string>(); try { bulk_conn.Open(); } catch (SqlException exp) { throw new InvalidOperationException("Data could not be read", exp); } SqlCommand cmd = new SqlCommand(); cmd.Connection = bulk_conn; cmd.CommandText = "SELECT name FROM sys.tables WHERE is_ms_shipped = 0"; SqlDataReader dr = cmd.ExecuteReader(); tableList.Clear(); while (dr.Read()) { tableList.Add(dr[0].ToString()); } // Jobs 和 MCS 一定要有所以排除 下面會直接跑。 tableList.Remove("Jobs"); tableList.Remove("MCS"); dr.Close(); bulk_conn.Close(); // TODO: data always full Dictionary<string, DataTable> dts = new Dictionary<string, DataTable>(); using (SqlConnection c = new SqlConnection(this._source_connstring)) { c.Open(); // 1. 先取得 Jobs 完成 Jobs 轉移 要先撈Job才知道時間 所以流程為 // get Jobs -> get MCS -> write MCS -> write Jobs string query_filter_datarange = string.Format("SELECT * FROM Jobs Where Date Between '{0}' and '{1}'", dpFrom.SelectedDate.Value.ToShortDateString(), dpTo.SelectedDate.Value.ToShortDateString()); using (SqlDataAdapter da = new SqlDataAdapter(query_filter_datarange, c)) { dts["Jobs"] = new DataTable(); da.Fill(dts["Jobs"]); } var jobKeys = from job in dts["Jobs"].AsEnumerable() select job.Field<int>("klKey"); string condiction = string.Join(",", jobKeys.Select(j => j.ToString()).ToArray()); // 後面都是跟著這個條件 condiction = condiction.Trim(); // 2-0. 因為 Jobs KEY 綁 MCS 所以要先撈MCS string query_mcs = string.Format("Select * From MCS Where pkMCS IN ({0})",condiction); using (SqlDataAdapter da = new SqlDataAdapter(query_mcs, c)) { dts["MCS"] = new DataTable(); da.Fill(dts["MCS"]); } // write MCS using (SqlBulkCopy mySbc = new SqlBulkCopy(this._target_connstring, SqlBulkCopyOptions.KeepIdentity)) { // bulk_conn.Open(); //設定 mySbc.BatchSize = 10000; //批次寫入的數量 mySbc.BulkCopyTimeout = 60; //逾時時間 //處理完後丟出一個事件,或是說處理幾筆後就丟出事件 //mySbc.NotifyAfter = DTableList.Rows.Count; //mySbc.SqlRowsCopied += new SqlRowsCopiedEventHandler(mySbc_SqlRowsCopied); // 更新哪個資料表 mySbc.DestinationTableName = "MCS"; foreach (var item in dts["MCS"].Columns.Cast<DataColumn>()) { mySbc.ColumnMappings.Add(item.ColumnName, item.ColumnName); } //開始寫入 mySbc.WriteToServer(dts["MCS"]); //完成交易 //scope.Complete(); } // write Jobs using (SqlBulkCopy mySbc = new SqlBulkCopy(this._target_connstring, SqlBulkCopyOptions.KeepIdentity)) { // bulk_conn.Open(); //設定 mySbc.BatchSize = 10000; //批次寫入的數量 mySbc.BulkCopyTimeout = 60; //逾時時間 //處理完後丟出一個事件,或是說處理幾筆後就丟出事件 //mySbc.NotifyAfter = DTableList.Rows.Count; //mySbc.SqlRowsCopied += new SqlRowsCopiedEventHandler(mySbc_SqlRowsCopied); // 更新哪個資料表 mySbc.DestinationTableName = "Jobs"; foreach (var item in dts["Jobs"].Columns.Cast<DataColumn>()) { mySbc.ColumnMappings.Add(item.ColumnName, item.ColumnName); } //開始寫入 mySbc.WriteToServer(dts["Jobs"]); //完成交易 //scope.Complete(); } // 2. 撈出所有選取的 Table 欄位 為了判斷誰有 klJobKey fkJobKey foreach (var tableName in tableList) { dts[tableName] = new DataTable(); string query = ""; using (SqlConnection c1 = new SqlConnection(this._source_connstring)) { // 2-1. 先撈出所有欄位 SqlCommand command = new SqlCommand(); command.Connection = c1; string query_column = string.Format("Select * From syscolumns Where id=OBJECT_ID(N'{0}')", tableName); command.CommandText = query_column; c1.Open(); string fkName = ""; using (var reader = command.ExecuteReader()) { while (reader.Read()) { string cName = reader["name"].ToString(); if (cName == "fkJobKey") { fkName = "fkJobKey"; break; } if (cName == "klJobKey") { fkName = "klJobKey"; break; } } // 2-2. 判斷欄位有FK的SQL query statement 加入條件。 if (fkName == "fkJobKey") { query = string.Format("Select * From [{0}] Where fkJobKey IN ({1}) ", tableName, condiction); } else if (fkName == "klJobKey") { query = string.Format("Select * From [{0}] Where klJobKey IN ({1}) ", tableName, condiction); } else { query = string.Format("Select * From [{0}]", tableName); } } using (SqlDataAdapter da = new SqlDataAdapter(query, c1)) { da.Fill(dts[tableName]); } } // 3. 如果 FK 有 Job Key 過濾移除 /// || dts[tableName].Columns.Contains("klJobKey") //if (dts[tableName].Columns.Contains("fkJobKey")) //{ // // var rows = dts[tableName].Select("fkJobKey in ") // var dealtable = from tbl in dts[tableName].AsEnumerable() // select tbl.Field<int>("fkJobKey"); // var filtration = dealtable.Except(jobKeys); // var rows = from tbl in dts[tableName].AsEnumerable() // where filtration.Any(f => f == tbl.Field<int>("fkJobKey")) // select tbl; // foreach (DataRow r in rows.ToArray()) // { // dts[tableName].Rows.Remove(r); // } //} //if (dts[tableName].Columns.Contains("klJobKey")) //{ // // var rows = dts[tableName].Select("fkJobKey in ") // var dealtable = from tbl in dts[tableName].AsEnumerable() // select tbl.Field<int>("klJobKey"); // var filtration = dealtable.Except(jobKeys); // var rows = from tbl in dts[tableName].AsEnumerable() // where filtration.Any(f => f == tbl.Field<int>("klJobKey")) // select tbl; // foreach (DataRow r in rows.ToArray()) // { // dts[tableName].Rows.Remove(r); // } //} // 4. 跑轉移 using (SqlBulkCopy mySbc = new SqlBulkCopy(this._target_connstring, SqlBulkCopyOptions.KeepIdentity)) { // bulk_conn.Open(); //設定 mySbc.BatchSize = 10000; //批次寫入的數量 mySbc.BulkCopyTimeout = 60; //逾時時間 //處理完後丟出一個事件,或是說處理幾筆後就丟出事件 //mySbc.NotifyAfter = DTableList.Rows.Count; //mySbc.SqlRowsCopied += new SqlRowsCopiedEventHandler(mySbc_SqlRowsCopied); // 更新哪個資料表 mySbc.DestinationTableName = tableName; foreach (var item in dts[tableName].Columns.Cast<DataColumn>()) { mySbc.ColumnMappings.Add(item.ColumnName, item.ColumnName); } //開始寫入 mySbc.WriteToServer(dts[tableName]); } } } } //完成交易 scope.Complete(); } } MessageBox.Show("Done"); } else { // database exists but: // #region * situation 1 : no [table] no [data range] // step 1. 一樣用transfer傳遞結構開表格 // transfer.CreateTargetDatabase = true; transfer.DestinationLoginSecure = false; transfer.DestinationServer = d_server; if (!string.IsNullOrEmpty(d_username) && !string.IsNullOrEmpty(d_pwd)) { transfer.DestinationLogin = d_username; transfer.DestinationPassword = d_pwd; } else { transfer.DestinationLoginSecure = true; } transfer.DestinationDatabase = d_db; // step 2 判斷哪些 Table 沒有的 先靠 transfer 開 int intTableToMove = transfer.ObjectList.Count; using (SqlConnection connCheckTable = new SqlConnection(this._target_connstring)) { connCheckTable.Open(); SqlCommand cmdCheckTable = new SqlCommand(); cmdCheckTable.Connection = connCheckTable; cmdCheckTable.CommandText = "SELECT name FROM sys.tables WHERE is_ms_shipped = 0"; // target 已經有的 Table 先拉掉 using (var dr = cmdCheckTable.ExecuteReader()) { while (dr.Read()) { transfer.ObjectList.Remove(source_db.Tables[dr[0].ToString()]); } } } // 表示完全沒有 Table 空有DB 也沒有日期限制 就直接全部複製。 if (transfer.ObjectList.Count == 0 && !((bool)chkBackupDateRange.IsChecked)) { transfer.ScriptTransfer(); transfer.TransferData(); } else //否則就把target不存在的Table都開完 資料再一次處理 { transfer.CopySchema = true; transfer.CopyData = false; transfer.ScriptTransfer(); // transfer.TransferData(); //step 3. 開始搬資料 分成有DateRange和沒有的 if (!((bool)chkBackupDateRange.IsChecked)) //沒有時間條件的話 { using (TransactionScope scope = new TransactionScope(TransactionScopeOption.Suppress)) { using (SqlConnection bulk_conn = new SqlConnection(this._target_connstring)) { // step 1 check target tables List<string> tableList = new List<string>(); try { bulk_conn.Open(); } catch (SqlException exp) { throw new InvalidOperationException("Data could not be read", exp); } SqlCommand cmd = new SqlCommand(); cmd.Connection = bulk_conn; cmd.CommandText = "SELECT name FROM sys.tables WHERE is_ms_shipped = 0"; SqlDataReader dr = cmd.ExecuteReader(); tableList.Clear(); while (dr.Read()) { tableList.Add(dr[0].ToString()); } // Jobs 和 MCS 一定要有所以排除 下面會直接跑。 //tableList.Remove("Jobs"); //tableList.Remove("MCS"); dr.Close(); bulk_conn.Close(); // TODO: 2013/4/13 把所有 Dictionary 換成 Dataset // step 2 取得所有target上的資料。 DataSet dsTarget = new DataSet(); // 取得 Target上面所有的 Table資料 Dictionary<string, int> keyTarget = new Dictionary<string, int>();// 取得Target 上面 有資料的 Table 最後一個KEY 作為換KEY的起始值 foreach (string tableName in tableList) { string queryGetTargetNowData = string.Format("Select * From {0}", tableName); using (SqlDataAdapter da = new SqlDataAdapter(queryGetTargetNowData, bulk_conn)) { da.FillSchema(dsTarget, SchemaType.Source, tableName); da.Fill(dsTarget, tableName); } string keyColumnName = dsTarget.Tables[tableName].Columns[0].ColumnName; // TODO: 取得FK的加入ForeignKeyConstraint限制 foreach (var i in source_db.Tables[tableName].ForeignKeys) { string x = i.ToString(); } // dsTarget.Tables[tableName].Constraints int targetJobsLastKey = dsTarget.Tables[tableName].AsEnumerable().LastOrDefault().Field<int>(keyColumnName); if (targetJobsLastKey != 0) { keyTarget.Add(tableName, targetJobsLastKey); } } // step 3. Target上面有資料的就換KEY 先把大家的KEY都換一輪 再來改參考的FK //foreach (KeyValuePair<string, int> key in keyTarget) //{ // int startKey = key.Value; // foreach (DataRow item in dtsTarget[key.Key].Rows) // { // startKey++; // item[0] = startKey; // } //} // step 4 . 大家都換完KEY 開始換 FK // step 5 . 換完 FK 排順序寫入 MCS -> JOBS -> etc } } } else // 有加時間範圍的 { } } #endregion // * situation 2 : no [table] exists [data range] // * situation 3 : exists [table] no [data range] // * situation 4 : exists [table] exists [data range] } //Smo.Server srv = new Smo.Server(); //// really you would get these from config or elsewhere: ////srv.ConnectionContext.Login = "******"; ////srv.ConnectionContext.Password = "******"; //srv.ConnectionContext.ServerInstance = @"(localdb)\v11.0"; //string dbName = "TEST"; //Smo.Database db = new Smo.Database(); //db = srv.Databases[dbName]; //StringBuilder sb = new StringBuilder(); //List<SechmaModel> sms = new List<SechmaModel>(); //foreach (Smo.Table tbl in db.Tables) //{ // SechmaModel model = new SechmaModel(); // Smo.ScriptingOptions options = new Smo.ScriptingOptions(); // options.ClusteredIndexes = true; // options.Default = true; // options.DriAll = true; // options.Indexes = true; // options.IncludeHeaders = true; // StringCollection coll = tbl.Script(options); // foreach (string str in coll) // { // sb.Append(str); // sb.Append(Environment.NewLine); // } //} //System.IO.StreamWriter fs = System.IO.File.CreateText("c:\\temp\\output.txt"); //fs.Write(sb.ToString()); //fs.Close(); //Step2. Build Table & Save FK Table //Step3. SQL Statement }
private static void CreateInternal(string connectionString) { var builder = new SqlConnectionStringBuilder(connectionString); Server server = new Server(builder.DataSource); Microsoft.SqlServer.Management.Smo.Database database = new Microsoft.SqlServer.Management.Smo.Database(server, builder.InitialCatalog); database.Create(); }
/// <summary> /// Function that adds a new user with a new database (copied from admin account). /// </summary> public void _AddUserCommand() { Views.Utils.NewLogInDialog newLogInDialog = new Views.Utils.NewLogInDialog("Please enter the new user login data!"); if (newLogInDialog.ShowDialog() == true && newLogInDialog.logIn.Password != "" && newLogInDialog.logIn.Password != "") { // Create database Microsoft.SqlServer.Management.Smo.Database db = new Microsoft.SqlServer.Management.Smo.Database(server, newLogInDialog.logIn.UserName + "_db"); db.Create(); // Create login & user Login login = new Login(server, newLogInDialog.logIn.UserName); login.LoginType = LoginType.SqlLogin; login.Create(newLogInDialog.logIn.Password); User user = new User(db, newLogInDialog.logIn.UserName); user.Login = newLogInDialog.logIn.UserName; user.Create(); trace.Value.TraceEvent(TraceEventType.Information, 0, "Created new User '" + user.Login + "'"); // Creating database permission Sets DatabasePermissionSet databasePermissionSet = new DatabasePermissionSet(); databasePermissionSet.Add(DatabasePermission.Insert); databasePermissionSet.Add(DatabasePermission.Update); databasePermissionSet.Add(DatabasePermission.Select); databasePermissionSet.Add(DatabasePermission.Delete); // Granting Database Permission Sets to Roles db.Grant(databasePermissionSet, newLogInDialog.logIn.UserName); trace.Value.TraceEvent(TraceEventType.Information, 0, "Granted permissions to User '" + user.Login + "'"); // Copy database Microsoft.SqlServer.Management.Smo.Database adminDB = server.Databases[AdminLogIn.UserName + "_db"]; Transfer transfer = new Transfer(adminDB); transfer.CopyAllTables = true; transfer.Options.WithDependencies = true; transfer.Options.DriAll = true; transfer.DestinationDatabase = newLogInDialog.logIn.UserName + "_db"; transfer.DestinationServer = server.Name; transfer.DestinationLoginSecure = false; transfer.DestinationLogin = AdminLogIn.UserName; transfer.DestinationPassword = AdminLogIn.Password; transfer.CopySchema = true; transfer.TransferData(); trace.Value.TraceEvent(TraceEventType.Information, 0, "Copied default database to User '" + user.Login + "'"); FillUserList(); } }
public void bgwValidateConnection_DoWorkHandler(object sender, DoWorkEventArgs e) { try { List<CheckedListItem> backupTables = e.Argument as List<CheckedListItem>; Smo.Server srvDestination = new Smo.Server(Destination.ServerConnection); // 如果Db不存在 if (!srvDestination.Databases.Contains(Destination.Database)) { #region Create Database and Copy Table sechma Smo.Database newdb = new Smo.Database(srvDestination, Destination.Database); newdb.Create(); Smo.Server srvSource = new Smo.Server(Source.ServerConnection); Smo.Database dbSource = srvSource.Databases[Source.Database]; Smo.Transfer transfer = new Smo.Transfer(dbSource); transfer.CopyAllUsers = true; transfer.CopyAllObjects = true; transfer.CopyAllTables = false; transfer.CopyData = false; transfer.CopySchema = true; transfer.Options.WithDependencies = true; transfer.Options.DriAll = true; transfer.Options.ContinueScriptingOnError = false; // Create all table when database not exist foreach (var tbl in dbSource.Tables) { transfer.ObjectList.Add(tbl); } transfer.DestinationServer = Destination.Server; transfer.DestinationDatabase = newdb.Name; transfer.DestinationLoginSecure = Destination.LoginSecurity; if (!Destination.LoginSecurity) { transfer.DestinationLogin = Destination.UserId; transfer.DestinationPassword = Destination.Password; } transfer.TransferData(); #endregion #region Get Source Data and Filter DataSet ds = DbHelper.CopySechmaFromDatabase(Source.ConnectionString()); List<string> sortTables = DbHelper.Fill(Source.ConnectionString(), ds, backupTables.Select(b => b.Name).ToList()); if (ds.Tables["Jobs"].Rows.Count > 0) { ds.Tables["Jobs"].Rows.Cast<DataRow>().LastOrDefault().Delete(); // Always delete last record. ds.Tables["Jobs"].AcceptChanges(); if (IsDateFiltration) { ds.Tables["Jobs"].Rows.Cast<DataRow>().Where(j => (DateTime)j["Date"] < DateFrom || (DateTime)j["Date"] > DateTo).ToList().ForEach(j => j.Delete()); } foreach (var tbl in sortTables) { ds.Tables[tbl].AcceptChanges(); } } #endregion #region Execute SqlBulk Copy foreach (string tbl in sortTables) { DbHelper.ExecuteSqlBulk(Destination.ConnectionString(), ds.Tables[tbl]); this.Progress += 100 / backupTables.Count; if (DbHelper.SqlBulkLog.Count > 0) { this.Log += DbHelper.SqlBulkLog.LastOrDefault() + Environment.NewLine; } } #endregion } else { if (CheckVersion()) { // TODO: Job 這邊無法判斷 #region Get Source Data and Filter date range DataSet ds = DbHelper.CopySechmaFromDatabase(Source.ConnectionString()); List<string> sortTables = DbHelper.Fill(Source.ConnectionString(), ds, backupTables.Select(b => b.Name).ToList()); if (ds.Tables["Jobs"].Rows.Count > 0) { ds.Tables["Jobs"].Rows.Cast<DataRow>().LastOrDefault().Delete(); // Always delete last record. ds.Tables["Jobs"].AcceptChanges(); if (IsDateFiltration) { ds.Tables["Jobs"].Rows.Cast<DataRow>().Where(j => (DateTime)j["Date"] < DateFrom || (DateTime)j["Date"] > DateTo).ToList().ForEach(j => j.Delete()); } foreach (var tbl in sortTables) { ds.Tables[tbl].AcceptChanges(); } } #endregion #region Get destination PK list of table exists and modify for merge // filter override table don't modify key List<string> overrideTable = backupTables.Where(b => b.IsOverride == true).Select(b => b.Name).ToList(); foreach (string tbl in sortTables) { if (!overrideTable.Contains(tbl)) { string keycolumn = DbHelper.PrimaryKeyColumn(Destination.ConnectionString(), tbl); string sql = string.Format("Select TOP 1 {0} From {1} Order By {0} DESC", keycolumn, tbl); int? lastkey = (int?)(DbHelper.ReadOne(Destination.ConnectionString(), sql)); if (lastkey != null) { int newkey = (int)lastkey + 1; int row = ds.Tables[tbl].Rows.Count; ds.Tables[tbl].Columns[keycolumn].ReadOnly = false; #region Delete Duplicate First for (int i = row - 1; i >= 0; i--) { // check duplicate string sqlCheckDataDuplicate; int? duplicateNum; switch (tbl) { case "MCS": // NOTE: MCS sechma will appear many wrong situation. string sName = ds.Tables[tbl].Rows[i]["sName"].ToString(); string pkMCS = ds.Tables[tbl].Rows[i]["pkMCS"].ToString(); sqlCheckDataDuplicate = string.Format("Select Count(*) From MCS Where sName='{0}' AND pkMCS='{1}'", sName, pkMCS); duplicateNum = (int?)(DbHelper.ReadOne(Destination.ConnectionString(), sqlCheckDataDuplicate)); if (duplicateNum > 0) { ds.Tables[tbl].Rows[i].Delete(); } break; default: break; } ds.Tables[tbl].AcceptChanges(); } #endregion #region Change KEY row = ds.Tables[tbl].Rows.Count; for (int i = row - 1; i >= 0; i--) { ds.Tables[tbl].Rows[i][keycolumn] = newkey + i; } #endregion ds.Tables[tbl].AcceptChanges(); } } } ds.AcceptChanges(); #endregion #region Delete override table data List<string> clearTable = backupTables.Where(b => b.IsOverride == true).Select(b => b.Name).ToList(); DeleteDestinationOverride(clearTable); #endregion #region Execute SqlBulk Copy foreach (string tbl in sortTables) { DbHelper.ExecuteSqlBulk(Destination.ConnectionString(), ds.Tables[tbl]); this.Progress += 100 / backupTables.Count; if (DbHelper.SqlBulkLog.Count > 0) { this.Log += DbHelper.SqlBulkLog.LastOrDefault() + Environment.NewLine; } } #endregion } else { this.Log += "*** Database upgrade failed ***" + Environment.NewLine; e.Result = false; } } } catch (Exception ex) { this.Log += "An error occurred during backup database." + Environment.NewLine; this.Log += ex.Message.ToString() + Environment.NewLine; e.Result = false; } // DONE: 1. Check destination database exsits. // DONE: 2. If No date range, use transfer copy all database. // DONE: 2-1. If use date range, DataTable.Select(); filter Jobs key (klKey) and filter another table has FK by Jobs (fkJobKey, klJobKey) // DONE: 2-2. Use Sqlbulk copy datatable // DONE: 3. If YES Check db version // CheckVersion(); // DONE: 3-1. Source > Destination => upgrade scripts // DONE: 3-2. Source == Destination => Run step 4 for merge data. // DONE: 3-3. Source < Destination => false; alert message and block; // DONE: 4. Deal table releationship PK/FK to create DataSet & Datatable from Source. // DONE: 5. If table of ObservTable selected get record of Destination last PK int. // List<Record> Record.LastKey, Record.TableName, Record.PKColumnName // TODO: 6. DataSet.Fill(); get data and filter date range. // TODO: 7. Use Sqlbulk copy datatable. }