public ExitCodeType Mutilate(IDataLoadJob job) { var db = DataAccessPortal.GetInstance().ExpectDatabase(DatabaseToBackup, DataAccessContext.DataLoad); db.CreateBackup("DataLoadEngineBackup"); return(ExitCodeType.Success); }
public override ExitCodeType Run(IDataLoadJob job, GracefulCancellationToken cancellationToken) { var beforeServer = RuntimeArguments.StageSpecificArguments.DbInfo.Server.Name; var beforeDatabase = RuntimeArguments.StageSpecificArguments.DbInfo.Server.GetCurrentDatabase().GetRuntimeName(); var beforeDatabaseType = RuntimeArguments.StageSpecificArguments.DbInfo.Server.DatabaseType; job.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "About to run Task '" + ProcessTask.Name + "'")); job.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Attacher class is:" + Attacher.GetType().FullName)); try { return(Attacher.Attach(job, cancellationToken)); } catch (Exception e) { job.OnNotify(this, new NotifyEventArgs(ProgressEventType.Error, "Attach failed on job " + job + " Attacher was of type " + Attacher.GetType().Name + " see InnerException for specifics", e)); return(ExitCodeType.Error); } finally { var afterServer = RuntimeArguments.StageSpecificArguments.DbInfo.Server.Name; var afterDatabase = RuntimeArguments.StageSpecificArguments.DbInfo.Server.GetCurrentDatabase().GetRuntimeName(); var afterDatabaseType = RuntimeArguments.StageSpecificArguments.DbInfo.Server.DatabaseType; if (!(beforeServer.Equals(afterServer) && beforeDatabase.Equals(afterDatabase) && beforeDatabaseType == afterDatabaseType)) { job.OnNotify(this, new NotifyEventArgs(ProgressEventType.Error, "Attacher " + Attacher.GetType().Name + " modified the ConnectionString during attaching")); } } }
public override ExitCodeType Attach(IDataLoadJob job, GracefulCancellationToken cancellationToken) { job.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Found Payload:" + job.Payload)); PayloadTest.Success = ReferenceEquals(payload, job.Payload); return(ExitCodeType.OperationNotRequired); }
private void MigrateRAWTableToStaging(IDataLoadJob job, ITableInfo tableInfo, bool isLookupTable, GracefulCancellationToken cancellationToken) { var component = new MigrateRAWTableToStaging(tableInfo, isLookupTable, _databaseConfiguration); _tableMigrations.Add(component); component.Run(job, cancellationToken); }
public void ExecuteSqlFileRuntimeTask_BasicScript(DatabaseType dbType) { var dt = new DataTable(); dt.Columns.Add("Lawl"); dt.Rows.Add(new object [] { 2 }); var db = GetCleanedServer(dbType, true); var tbl = db.CreateTable("Fish", dt); FileInfo f = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, "Bob.sql")); File.WriteAllText(f.FullName, @"UPDATE Fish Set Lawl = 1"); var pt = Mock.Of <IProcessTask>(x => x.Path == f.FullName); var dir = LoadDirectory.CreateDirectoryStructure(new DirectoryInfo(TestContext.CurrentContext.TestDirectory), "ExecuteSqlFileRuntimeTaskTests", true); var task = new ExecuteSqlFileRuntimeTask(pt, new RuntimeArgumentCollection(new IArgument[0], new StageArgs(LoadStage.AdjustRaw, db, dir))); task.Check(new ThrowImmediatelyCheckNotifier()); IDataLoadJob job = Mock.Of <IDataLoadJob>(); task.Run(job, new GracefulCancellationToken()); Assert.AreEqual(1, tbl.GetDataTable().Rows[0][0]); tbl.Drop(); }
public ExitCodeType Mutilate(IDataLoadJob job) { if (TableRegexPattern != null) { TableRegexPattern = new Regex(TableRegexPattern.ToString(), RegexOptions.IgnoreCase); } foreach (var tableInfo in job.RegularTablesToLoad) { if (OnlyTables != null && OnlyTables.Any()) { if (OnlyTables.Contains(tableInfo)) { FireMutilate(tableInfo, job); } } else if (TableRegexPattern == null) { throw new Exception("You must specify either TableRegexPattern or OnlyTables"); } else if (TableRegexPattern.IsMatch(tableInfo.GetRuntimeName())) { FireMutilate(tableInfo, job); } } return(ExitCodeType.Success); }
public override ExitCodeType Run(IDataLoadJob job, GracefulCancellationToken cancellationToken) { if (Skip(job)) { return(ExitCodeType.Error); } ExitCodeType toReturn = ExitCodeType.Success; //This default will be returned unless there is an explicit DataProvider or collection of runtime tasks to run which return a different result (See below) // Figure out where we are getting the source files from try { if (Components.Any()) { toReturn = base.Run(job, cancellationToken); } else if (job.LoadDirectory.ForLoading.EnumerateFileSystemInfos().Any()) { job.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Using existing files in '" + job.LoadDirectory.ForLoading.FullName + "', there are no GetFiles processes or DataProviders configured")); } else { job.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "There are no GetFiles tasks and there are no files in the ForLoading directory (" + job.LoadDirectory.ForLoading.FullName + ")")); } } finally { // We can only clean up ForLoading after the job is finished, so give it the necessary disposal operation job.PushForDisposal(new DeleteForLoadingFilesOperation(job)); } return(toReturn); }
public void Execute(IDataLoadJob job, IEnumerable <MigrationColumnSet> toMigrate, IDataLoadInfo dataLoadInfo, GracefulCancellationToken cancellationToken) { _dataLoadInfo = dataLoadInfo; // Column set for each table we are migrating foreach (var columnsToMigrate in toMigrate) { var inserts = 0; var updates = 0; var tableLoadInfo = dataLoadInfo.CreateTableLoadInfo("", columnsToMigrate.DestinationTable.GetFullyQualifiedName(), new[] { new DataSource(columnsToMigrate.SourceTable.GetFullyQualifiedName(), DateTime.Now) }, 0); try { MigrateTable(job, columnsToMigrate, dataLoadInfo.ID, cancellationToken, ref inserts, ref updates); OnTableMigrationCompleteHandler(columnsToMigrate.DestinationTable.GetFullyQualifiedName(), inserts, updates); tableLoadInfo.Inserts = inserts; tableLoadInfo.Updates = updates; tableLoadInfo.Notes = "Part of Transaction"; } finally { tableLoadInfo.CloseAndArchive(); } cancellationToken.ThrowIfCancellationRequested(); } }
public override ExitCodeType Run(IDataLoadJob job, GracefulCancellationToken cancellationToken) { if (Skip(job)) { return(ExitCodeType.Success); } var datasetID = job.DataLoadInfo.ID; var destFile = Path.Combine(job.LoadDirectory.ForArchiving.FullName, datasetID + ".zip"); // If there is nothing in the forLoadingDirectory then // There may be a HiddenFromArchiver directory with data that may be processed by another component, but this component should *always* archive *something* even if it is just some metadata about the load (if, for example, imaging data is being loaded which is too large to archive) if (!FoundFilesOrDirsToArchive(job)) { job.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "There is nothing to archive: " + job.LoadDirectory.ForLoading.FullName + " is empty after completion of the load process and there is no hidden archive directory (" + HiddenFromArchiver + ").")); return(ExitCodeType.Success); } job.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Archiving to " + destFile)); if (File.Exists(destFile)) { throw new Exception("Cannot archive files, " + destFile + " already exists"); } // create directory for zipping, leaving out __hidden_from_archiver__ var zipDir = job.LoadDirectory.ForLoading.CreateSubdirectory(TempArchiveDirName); MoveDirectories(job, zipDir); MoveFiles(job, zipDir); ZipFile.CreateFromDirectory(zipDir.FullName, destFile); return(ExitCodeType.Success); }
public ExitCodeType Fetch(IDataLoadJob job, GracefulCancellationToken cancellationToken) { int imported = 0; try { var shareManager = new ShareManager(job.RepositoryLocator); foreach (var shareDefinitionFile in job.LoadDirectory.ForLoading.EnumerateFiles("*.sd")) { job.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Found '" + shareDefinitionFile.Name + "'")); using (var stream = File.Open(shareDefinitionFile.FullName, FileMode.Open)) shareManager.ImportSharedObject(stream); imported++; job.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Imported '" + shareDefinitionFile.Name + "' Succesfully")); } } catch (SharingException ex) { job.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "Error occured importing ShareDefinitions", ex)); } job.OnNotify(this, new NotifyEventArgs(imported == 0 ? ProgressEventType.Warning : ProgressEventType.Information, "Imported " + imported + " ShareDefinition files")); return(ExitCodeType.Success); }
public override ExitCodeType Run(IDataLoadJob job, GracefulCancellationToken cancellationToken) { job.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "About to run Task '" + ProcessTask.Name + "'")); job.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "About to fetch data using class " + Provider.GetType().FullName)); return(Provider.Fetch(job, cancellationToken)); }
public ExitCodeType Mutilate(IDataLoadJob job) { var tbl = _database.ExpectTable(ColumnToResolveOn.TableInfo.GetRuntimeName(_loadStage, job.Configuration.DatabaseNamer)); var pks = ColumnToResolveOn.TableInfo.ColumnInfos.Where(ci => ci.IsPrimaryKey).ToArray(); DeleteRows(tbl, pks, job); return(ExitCodeType.Success); }
public override ExitCodeType Fetch(IDataLoadJob dataLoadJob, GracefulCancellationToken cancellationToken) { var scheduledJob = ConvertToScheduledJob(dataLoadJob); GetDataLoadWorkload(scheduledJob); ExtractJobs(scheduledJob); return(ExitCodeType.Success); }
private static void MoveFiles(IDataLoadJob job, DirectoryInfo zipDir) { var filesToMove = job.LoadDirectory.ForLoading.EnumerateFiles().ToList(); foreach (var toMove in filesToMove) { toMove.MoveTo(Path.Combine(zipDir.FullName, toMove.Name)); } }
private static void MoveDirectories(IDataLoadJob job, DirectoryInfo zipDir) { var dirsToMove = job.LoadDirectory.ForLoading.EnumerateDirectories().Where(info => !DirsToIgnore.Contains(info.Name)).ToList(); foreach (var toMove in dirsToMove) { toMove.MoveTo(Path.Combine(zipDir.FullName, toMove.Name)); } }
public ExitCodeType Mutilate(IDataLoadJob job) { if (ConditionsToTerminateUnder == PrematureLoadEndCondition.Always) { job.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "ConditionsToTerminateUnder is " + ConditionsToTerminateUnder + " so terminating load with " + ExitCodeToReturnIfConditionMet)); return(ExitCodeToReturnIfConditionMet); } if (ConditionsToTerminateUnder == PrematureLoadEndCondition.NoRecordsInAnyTablesInDatabase) { job.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "About to inspect what tables have rows in them in database " + _databaseInfo.GetRuntimeName())); foreach (var t in _databaseInfo.DiscoverTables(false)) { int rowCount = t.GetRowCount(); job.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Found table " + t.GetRuntimeName() + " with row count " + rowCount)); if (rowCount > 0) { job.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Found at least 1 record in 1 table so condition " + ConditionsToTerminateUnder + " is not met. Therefore returning Success so the load can continue normally.")); return(ExitCodeType.Success); } } job.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "No tables had any rows in them so returning " + ExitCodeToReturnIfConditionMet + " which should terminate the load here")); return(ExitCodeToReturnIfConditionMet); } if (ConditionsToTerminateUnder == PrematureLoadEndCondition.NoFilesInForLoading) { var dataLoadJob = job as IDataLoadJob; if (dataLoadJob == null) { throw new Exception("IDataLoadEventListener " + job + " was not an IDataLoadJob (very unexpected)"); } job.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "About to check ForLoading directory for files, the directory is:" + dataLoadJob.LoadDirectory.ForLoading.FullName)); var files = dataLoadJob.LoadDirectory.ForLoading.GetFiles(); if (!files.Any()) { job.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "No files in ForLoading so returning " + ExitCodeToReturnIfConditionMet + " which should terminate the load here")); return(ExitCodeToReturnIfConditionMet); } job.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Found " + files.Length + " files in ForLoading so not terminating (" + string.Join(",", files.Select(f => f.Name)) + ")")); //There were return(ExitCodeType.Success); } throw new Exception("Didn't know how to handle condition:" + ConditionsToTerminateUnder); }
private void ProcessFile(FileInfo fileInfo, IDataLoadJob job) { using (var fs = new FileStream(fileInfo.FullName, FileMode.Open)) { IWorkbook wb; if (fileInfo.Extension == ".xls") { wb = new HSSFWorkbook(fs); } else { wb = new XSSFWorkbook(fs); } try { var source = new ExcelDataFlowSource(); source.PreInitialize(new FlatFileToLoad(fileInfo), job); for (int i = 0; i < wb.NumberOfSheets; i++) { var sheet = wb.GetSheetAt(i); if (IsWorksheetNameMatch(sheet.SheetName)) { job.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Started processing worksheet:" + sheet.SheetName)); string newName = PrefixWithWorkbookName ? Path.GetFileNameWithoutExtension(fileInfo.FullName) + "_" + sheet.SheetName : sheet.SheetName; //make it sensible newName = new MicrosoftQuerySyntaxHelper().GetSensibleTableNameFromString(newName) + ".csv"; string savePath = Path.Combine(job.LoadDirectory.ForLoading.FullName, newName); var dt = source.GetAllData(sheet, job); dt.SaveAsCsv(savePath); job.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Saved worksheet as " + newName)); } else { job.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Ignoring worksheet:" + sheet.SheetName)); } } } finally { wb.Close(); } } }
protected bool Skip(IDataLoadJob job) { if (!SkipComponent) { return(false); } job.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "Skipped load component: " + Description)); return(true); }
private string GetScheduleParameterDeclarations(IDataLoadJob job, out bool scheduleMismatch) { var jobAsScheduledJob = job as ScheduledDataLoadJob; if (jobAsScheduledJob == null) { throw new NotSupportedException("Job must be of type " + typeof(ScheduledDataLoadJob).Name + " because you have specified a LoadProgress"); } //if the currently scheduled job is not our Schedule then it is a mismatch and we should skip it scheduleMismatch = !jobAsScheduledJob.LoadProgress.Equals(Progress); DateTime min = jobAsScheduledJob.DatesToRetrieve.Min(); DateTime max = jobAsScheduledJob.DatesToRetrieve.Max(); //since it's a date time and fetch list is Dates then we should set the max to the last second of the day (23:59:59) but leave the min as the first second of the day (00:00:00). This allows for single day loads too if (max.Hour == 0 && max.Minute == 0 && max.Second == 0) { max = max.AddHours(23); max = max.AddMinutes(59); max = max.AddSeconds(59); } if (min >= max) { throw new Exception("Problematic max and min dates(" + max + " and " + min + " respectively)"); } var syntaxHelper = _remoteDatabase.Server.Helper.GetQuerySyntaxHelper(); if (!syntaxHelper.SupportsEmbeddedParameters()) { _minDateParam = min; _maxDateParam = max; return(""); } var declareStartDateParameter = syntaxHelper.GetParameterDeclaration(StartDateParameter, new DatabaseTypeRequest(typeof(DateTime))); var declareEndDateParameter = syntaxHelper.GetParameterDeclaration(EndDateParameter, new DatabaseTypeRequest(typeof(DateTime))); string startSql = declareStartDateParameter + Environment.NewLine; startSql += "SET " + StartDateParameter + " = '" + min.ToString("yyyy-MM-dd HH:mm:ss") + "';" + Environment.NewLine; string endSQL = declareEndDateParameter + Environment.NewLine; endSQL += "SET " + EndDateParameter + " = '" + max.ToString("yyyy-MM-dd HH:mm:ss") + "';" + Environment.NewLine; if (min > DateTime.Now) { throw new Exception(FutureLoadMessage + " (min is " + min + ")"); } return(startSql + endSQL + Environment.NewLine); }
protected static ScheduledDataLoadJob ConvertToScheduledJob(IDataLoadJob dataLoadJob) { var scheduledJob = dataLoadJob as ScheduledDataLoadJob; if (scheduledJob == null) { throw new Exception("CachedFileRetriever can only be used in conjunction with a ScheduledDataLoadJob"); } return(scheduledJob); }
private static bool FoundFilesOrDirsToArchive(IDataLoadJob job) { //if there are any files if (job.LoadDirectory.ForLoading.EnumerateFiles().Any()) { return(true); } //or any directories that are not directories we should be ignoring return(job.LoadDirectory.ForLoading.EnumerateDirectories().Any(d => !DirsToIgnore.Contains(d.Name))); }
public override ExitCodeType Fetch(IDataLoadJob job, GracefulCancellationToken cancellationToken) { var scheduledJob = ConvertToScheduledJob(job); var workload = GetDataLoadWorkload(scheduledJob); ExtractJobs(scheduledJob); job.PushForDisposal(new DeleteCachedFilesOperation(scheduledJob, workload)); return(ExitCodeType.Success); }
protected void ExtractJobs(IDataLoadJob dataLoadJob) { // check to see if forLoading has anything in it and bail if it does if (dataLoadJob.LoadDirectory.ForLoading.EnumerateFileSystemInfos().Any()) { // RDMPDEV-185 // There are files in ForLoading, but do they match what we would expect to find? Need to make sure that they aren't from a different dataset and/or there is the expected number of files // We should already have a _workload if (_workload == null) { throw new InvalidOperationException("The workload has not been initialised, don't know what files are to be retrieved from the cache"); } if (!FilesInForLoadingMatchWorkload(dataLoadJob.LoadDirectory)) { throw new InvalidOperationException("The files in ForLoading do not match what this job expects to be loading from the cache. Please delete the files in ForLoading before re-attempting the data load."); } dataLoadJob.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "ForLoading already has files, skipping extraction")); return; } var layout = CreateCacheLayout((ScheduledDataLoadJob)dataLoadJob); //extract all the jobs into the forLoading directory foreach (KeyValuePair <DateTime, FileInfo> job in _workload) { if (job.Value == null) { continue; } if (ExtractFilesFromArchive) { var extractor = CreateExtractor(layout.ArchiveType); extractor.Extract(job, dataLoadJob.LoadDirectory.ForLoading, dataLoadJob); } else { dataLoadJob.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Archive identified:" + job.Value.FullName)); // just copy the archives across var relativePath = GetPathRelativeToCacheRoot(dataLoadJob.LoadDirectory.Cache, job.Value); var absolutePath = Path.Combine(dataLoadJob.LoadDirectory.ForLoading.FullName, relativePath); if (!Directory.Exists(absolutePath)) { Directory.CreateDirectory(absolutePath); } var destFileName = Path.Combine(absolutePath, job.Value.Name); job.Value.CopyTo(destFileName); } } }
private void DoMigration(IDataLoadJob job, GracefulCancellationToken cancellationToken) { foreach (var regularTableInfo in job.RegularTablesToLoad) { MigrateRAWTableToStaging(job, regularTableInfo, false, cancellationToken); } foreach (var lookupTableInfo in job.LookupTablesToLoad) { MigrateRAWTableToStaging(job, lookupTableInfo, true, cancellationToken); } }
public void ExecuteSqlFileRuntimeTask_ValidID_CustomNamer(DatabaseType dbType) { var dt = new DataTable(); dt.Columns.Add("Lawl"); dt.Rows.Add(new object[] { 2 }); var db = GetCleanedServer(dbType, true); var tbl = db.CreateTable("Fish", dt); var tableName = "AAAAAAA"; TableInfo ti; ColumnInfo[] cols; Import(tbl, out ti, out cols); FileInfo f = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, "Bob.sql")); File.WriteAllText(f.FullName, @"UPDATE {T:" + ti.ID + "} Set {C:" + cols[0].ID + "} = 1"); tbl.Rename(tableName); //we renamed the table to simulate RAW, confirm TableInfo doesn't think it exists Assert.IsFalse(ti.Discover(DataAccessContext.InternalDataProcessing).Exists()); var pt = Mock.Of <IProcessTask>(x => x.Path == f.FullName); var dir = LoadDirectory.CreateDirectoryStructure(new DirectoryInfo(TestContext.CurrentContext.TestDirectory), "ExecuteSqlFileRuntimeTaskTests", true); var task = new ExecuteSqlFileRuntimeTask(pt, new RuntimeArgumentCollection(new IArgument[0], new StageArgs(LoadStage.AdjustRaw, db, dir))); task.Check(new ThrowImmediatelyCheckNotifier()); //create a namer that tells the user var namer = RdmpMockFactory.Mock_INameDatabasesAndTablesDuringLoads(db, tableName); HICDatabaseConfiguration configuration = new HICDatabaseConfiguration(db.Server, namer); IDataLoadJob job = Mock.Of <IDataLoadJob>(x => x.RegularTablesToLoad == new List <ITableInfo> { ti } && x.LookupTablesToLoad == new List <ITableInfo>() && x.Configuration == configuration); task.Run(job, new GracefulCancellationToken()); Assert.AreEqual(1, tbl.GetDataTable().Rows[0][0]); tbl.Drop(); }
public ExitCodeType Fetch(IDataLoadJob job, GracefulCancellationToken cancellationToken) { Stopwatch t = new Stopwatch(); FileInfo destinationFile = new FileInfo(Path.Combine(job.LoadDirectory.ForLoading.FullName, Path.GetFileName(UriToFile.LocalPath))); DownloadFileWhilstPretendingToBeFirefox(t, destinationFile, job); job.OnProgress(this, new ProgressEventArgs(destinationFile.FullName, new ProgressMeasurement((int)(destinationFile.Length / 1000), ProgressType.Kilobytes), t.Elapsed)); return(ExitCodeType.Success); }
public override ExitCodeType Fetch(IDataLoadJob dataLoadJob, GracefulCancellationToken cancellationToken) { var LoadDirectory = dataLoadJob.LoadDirectory; var fileToMove = LoadDirectory.Cache.EnumerateFiles("*.csv").FirstOrDefault(); if (fileToMove == null) { return(ExitCodeType.OperationNotRequired); } File.Move(fileToMove.FullName, Path.Combine(LoadDirectory.ForLoading.FullName, "1.csv")); return(ExitCodeType.Success); }
public ExitCodeType Fetch(IDataLoadJob job, GracefulCancellationToken cancellationToken) { _files = new DirectoryInfo(DirectoryPath).GetFiles(FilePattern); foreach (FileInfo f in _files) { var to = Path.Combine(job.LoadDirectory.ForLoading.FullName, f.Name); job.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Copying file " + f.FullName + " to directory " + to)); f.CopyTo(to, true); } return(ExitCodeType.Success); }
public void Migrate(IDataLoadJob job, GracefulCancellationToken cancellationToken) { if (_sourceDbInfo.DiscoverTables(false).All(t => t.IsEmpty())) { throw new Exception("The source database '" + _sourceDbInfo.GetRuntimeName() + "' on " + _sourceDbInfo.Server.Name + " is empty. There is nothing to migrate."); } using (var managedConnectionToDestination = _destinationDbInfo.Server.BeginNewTransactedConnection()) { try { // This will eventually be provided by factory/externally based on LoadMetadata (only one strategy for now) _migrationStrategy = new OverwriteMigrationStrategy(managedConnectionToDestination); _migrationStrategy.TableMigrationCompleteHandler += (name, inserts, updates) => job.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Migrate table " + name + " from STAGING to " + _destinationDbInfo.GetRuntimeName() + ": " + inserts + " inserts, " + updates + " updates")); //migrate all tables (both lookups and live tables in the same way) var dataColsToMigrate = _migrationConfig.CreateMigrationColumnSetFromTableInfos(job.RegularTablesToLoad, job.LookupTablesToLoad, new StagingToLiveMigrationFieldProcessor( _databaseConfiguration.UpdateButDoNotDiff, _databaseConfiguration.IgnoreColumns, job.GetAllColumns().Where(c => c.IgnoreInLoads).ToArray()) { NoBackupTrigger = job.LoadMetadata.IgnoreTrigger }); // Migrate the data columns _migrationStrategy.Execute(job, dataColsToMigrate, job.DataLoadInfo, cancellationToken); managedConnectionToDestination.ManagedTransaction.CommitAndCloseConnection(); job.DataLoadInfo.CloseAndMarkComplete(); } catch (OperationCanceledException) { managedConnectionToDestination.ManagedTransaction.AbandonAndCloseConnection(); } catch (Exception ex) { try { managedConnectionToDestination.ManagedTransaction.AbandonAndCloseConnection(); } catch (Exception) { throw new Exception("Failed to rollback after exception, see inner exception for details of original problem", ex); } throw; } } }
private void TryDispose(ExitCodeType exitCode, IDataLoadJob job) { try { job.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Disposing disposables...")); job.LoadCompletedSoDispose(exitCode, job); job.CloseLogging(); } catch (Exception e) { job.OnNotify(this, new NotifyEventArgs(ProgressEventType.Error, "Job " + job.JobID + " crashed again during disposing", e)); throw; } }