Example #1
0
        private static async Task <bool> ImportTableDelta(string SqlInstance, string TableName)
        {
            Stopwatch sw = Stopwatch.StartNew();

            using (SqlConnection repoConnection = SqlWatchRepo.Connection())
            {
                await repoConnection.OpenAsync();

                string snapshotTime = "1970-01-01";

                using (SqlCommand cmdGetLastSnapshotTime = new SqlCommand($"select [snapshot_time]=isnull(convert(varchar(23),max([snapshot_time]),121),'{ snapshotTime }') from { TableName } (nolock)", repoConnection))
                {
                    Stopwatch ls = Stopwatch.StartNew();
                    snapshotTime = (await cmdGetLastSnapshotTime.ExecuteScalarAsync()).ToString();
                    t3          += ls.Elapsed.TotalMilliseconds;
                }

                //Logger tables must have a valid header record. Despite the dependency order, it may happen that we will try to import new logger records after we have alrady imported header.
                //This could happen if the import is running for a long time and enough time has passed between header and the logger records that new data was inserted at source.
                //We have to make sure we're only importing logger records up to the max snapshot time in the repository. For this to happen, we need to know the snapshot_type_id for each table.

                using (SqlConnection remoteConnection = SqlWatchRemote.Connection(SqlInstance))
                {
                    await remoteConnection.OpenAsync();

                    using (SqlCommand remoteCommand = new SqlCommand($"select * " +
                                                                     $"from { TableName } " +
                                                                     $"where [snapshot_time] > '{ snapshotTime }' " +
                                                                     $"and [snapshot_time] <= '{ SqlWatchRepo.LastHeaderSnapshotDate(SqlInstance, SqlWatchRepo.TableSnapshotType(TableName)) }'", remoteConnection))
                    {
                        remoteCommand.CommandTimeout = Config.DataCopyExecTimeout;

                        Stopwatch bk2 = Stopwatch.StartNew();
                        using (SqlDataReader reader = await remoteCommand.ExecuteReaderAsync())
                        {
                            using (SqlBulkCopy sqlBulkCopy = new SqlBulkCopy(repoConnection, SqlBulkCopyOptions.KeepIdentity, null))
                            {
                                sqlBulkCopy.DestinationTableName = TableName;
                                sqlBulkCopy.BulkCopyTimeout      = Config.DataCopyExecTimeout;

                                if (Config.StreamData)
                                {
                                    sqlBulkCopy.EnableStreaming = true;
                                }

                                int rowCount = reader.Cast <object>().Count();

                                Logger.LogVerbose("Writing remote data from [" + SqlInstance + "]." + TableName);

                                try
                                {
                                    await sqlBulkCopy.WriteToServerAsync(reader);

                                    t4 += bk2.Elapsed.TotalMilliseconds;
                                    if (rowCount == 1)
                                    {
                                        Logger.LogVerbose($"Bulk copied { rowCount.ToString() } row from [{ SqlInstance }].{ TableName } in { sw.Elapsed.TotalMilliseconds }ms");
                                    }
                                    else
                                    {
                                        Logger.LogVerbose($"Bulk copied { rowCount.ToString() } rows from [{ SqlInstance }].{ TableName } in { sw.Elapsed.TotalMilliseconds }ms");
                                    }

                                    Logger.LogSuccess($"Imported { TableName } from { SqlInstance } in { sw.Elapsed.TotalMilliseconds }ms");
                                }
                                catch (Exception e)
                                {
                                    Logger.LogError("Failed to Bulk Copy data from [" + SqlInstance + "]." + TableName);
                                    Logger.LogError(e.ToString());
                                }
                            }
                        }
                    }
                }
            }
            return(true);
        }
Example #2
0
        private static async Task <bool> ImportTableFull(string SqlInstance, string TableName)
        {
            Stopwatch tt = Stopwatch.StartNew();

            using (SqlConnection repoConnection = SqlWatchRepo.Connection())
            {
                repoConnection.Open();

                string PkColumns = SqlWatchRepo.TablePrimaryKey(TableName);

                string sql = $"select top 0 * into [#{ TableName }] from { TableName };";

                if (PkColumns != "")
                {
                    sql += $"alter table [#{ TableName }] add primary key ({ PkColumns }); ";
                }

                using (SqlCommand repoCommand = new SqlCommand(sql, repoConnection))
                {
                    Logger.LogVerbose($"Preparing landing table for [{ SqlInstance }].{ TableName }");

                    try
                    {
                        await repoCommand.ExecuteNonQueryAsync();
                    }
                    catch (SqlException e)
                    {
                        Logger.LogError($"Failed to prepare table for [{ SqlInstance }].{ TableName}", e.Errors[0].Message);
                        return(false);
                    }
                }

                using (SqlConnection remoteConnection = SqlWatchRemote.Connection(SqlInstance))
                {
                    remoteConnection.Open();

                    //Logger tables must have a valid header record. Despite the dependency order, it may happen that we will try to import new logger records after we have alrady imported header.
                    //This could happen if the import is running for a long time and enough time has passed between header and the logger records that new data was inserted at source.
                    //We have to make sure we're only importing logger records up to the max snapshot time in the repository. For this to happen, we need to know the snapshot_type_id for each table.

                    string lastSeenInRepo = "";
                    if (Config.respectDateLastSeen == true)
                    {
                        if (SqlWatchRepo.Table.HasColumnLastSeen(TableName) == true)
                        {
                            sql = $"select isnull(max(date_last_seen),'1970-01-01') from { TableName }";
                            using (SqlCommand cmdGetRepoLastSeen = new SqlCommand(sql, repoConnection))
                            {
                                lastSeenInRepo = (await cmdGetRepoLastSeen.ExecuteScalarAsync()).ToString();
                            }
                        }
                    }

                    if (TableName.Contains("sqlwatch_logger"))
                    {
                        sql = $"select * from { TableName } where snapshot_time <= '{ SqlWatchRepo.LastHeaderSnapshotDate(SqlInstance, SqlWatchRepo.TableSnapshotType(TableName)) }'";
                        if (lastSeenInRepo != "")
                        {
                            sql += $" and date_last_seen > '{ lastSeenInRepo }'";
                        }
                    }
                    else
                    {
                        sql = $"select * from { TableName }";
                        if (lastSeenInRepo != "")
                        {
                            sql += $" where date_last_seen > '{ lastSeenInRepo }'";
                        }
                    }

                    using (SqlCommand cmdGetData = new SqlCommand(sql, remoteConnection))
                    {
                        cmdGetData.CommandTimeout = Config.DataCopyExecTimeout;
                        //import data into #t table
                        try
                        {
                            Stopwatch bk1 = Stopwatch.StartNew();
                            using (SqlDataReader reader = await cmdGetData.ExecuteReaderAsync())
                            {
                                Logger.LogVerbose($"Preparing to Bulk Copy remote data from [{ SqlInstance }].{ TableName } to landing table #{ TableName }");

                                using (SqlBulkCopy sqlBulkCopy = new SqlBulkCopy(repoConnection, SqlBulkCopyOptions.KeepIdentity, null))
                                {
                                    sqlBulkCopy.DestinationTableName = $"[#{ TableName }]";
                                    sqlBulkCopy.BulkCopyTimeout      = Config.DataCopyExecTimeout;
                                    if (Config.StreamData)
                                    {
                                        sqlBulkCopy.EnableStreaming = true;
                                    }

                                    try
                                    {
                                        Logger.LogVerbose($"Copying remote data from [{ SqlInstance }].{ TableName } to landing table #{ TableName } using BulkCopy.");
                                        await sqlBulkCopy.WriteToServerAsync(reader);

                                        t1 += bk1.Elapsed.TotalMilliseconds;
                                        Logger.LogVerbose($"Bulk Copied remote data from [{ SqlInstance }].{ TableName } to landing table #{ TableName } in { bk1.Elapsed.TotalMilliseconds }ms. Awaiting Merge.");
                                    }
                                    catch (SqlException e)
                                    {
                                        Logger.LogError($"Failed to Bulk Copy data from [{ SqlInstance }].{ TableName }", e.Errors[0].Message);
                                        return(false);
                                    }
                                }
                            }
                        }
                        catch (SqlException e)
                        {
                            Logger.LogError($"Failed to populate DataReader with remote Data from [{ SqlInstance }].{ TableName }", e.Errors[0].Message, sql);
                            return(false);
                        }
                    }

                    sql = "";
                    if (SqlWatchRepo.TableHasIdentity(TableName) == true)
                    {
                        sql += $"\nset identity_insert { TableName } on;";
                    }

                    string allColumns = SqlWatchRepo.TableColumns(TableName);

                    sql += $@"
							;merge { TableName } as target
								using [#{ TableName }] as source
							on ({ SqlWatchRepo.TableMergeJoins(TableName) })
							when not matched
							then insert ({ allColumns })
							values (source.{ allColumns.Replace(",", ",source.") })"                            ;

                    string updateColumns = SqlWatchRepo.TableMergeUpdateColumns(TableName);
                    if (updateColumns != "")
                    {
                        sql += $@"
							when matched
							then update set
							{ updateColumns }"                            ;
                    }

                    sql += ";";

                    if (SqlWatchRepo.TableHasIdentity(TableName) == true)
                    {
                        sql += $"\nset identity_insert { TableName } off;";
                    }

                    using (SqlCommand cmdMergeTable = new SqlCommand(sql, repoConnection))
                    {
                        cmdMergeTable.CommandTimeout = Config.DataCopyExecTimeout;

                        Logger.LogVerbose($"Merging [{ SqlInstance }]." + TableName);
                        try
                        {
                            Stopwatch mg    = Stopwatch.StartNew();
                            int       nRows = await cmdMergeTable.ExecuteNonQueryAsync();

                            t2 += mg.Elapsed.TotalMilliseconds;
                            if (nRows == 1)
                            {
                                Logger.LogVerbose($"Merged { nRows } row from [{ SqlInstance }].{ TableName } in { mg.Elapsed.TotalMilliseconds }ms");
                            }
                            else
                            {
                                Logger.LogVerbose($"Merged { nRows } rows from [{ SqlInstance }].{ TableName } in { mg.Elapsed.TotalMilliseconds }ms");
                            }
                            Logger.LogSuccess($"Imported { TableName } from { SqlInstance } in { tt.Elapsed.TotalMilliseconds }ms");

                            return(true);
                        }
                        catch (SqlException e)
                        {
                            Logger.LogError($"Failed to merge table [{ SqlInstance }].{ TableName }", e.Errors[0].Message, sql);
                            //dump # table to physical table to help debugging
                            sql = $"select * into [_DUMP_{ string.Format("{0:yyyyMMddHHmmssfff}", DateTime.Now) }_{ SqlInstance }.{ TableName }] from [#{ TableName }]";
                            using (SqlCommand cmdDumpData = new SqlCommand(sql, repoConnection))
                            {
                                try
                                {
                                    cmdDumpData.ExecuteNonQuery();
                                }
                                catch (SqlException x)
                                {
                                    Logger.LogError("Failed to dump data into a table for debugging", x.Errors[0].Message, sql);
                                    return(false);
                                }
                            }
                            return(false);
                        }
                    }
                }
            }
        }