public void TestTemporalTable(bool ignoreWithGlobalPattern) { var dbtype = FAnsi.DatabaseType.MicrosoftSQLServer; var db = GetCleanedServer(dbtype); using (var con = db.Server.GetConnection()) { con.Open(); db.Server.GetCommand(sql, con).ExecuteNonQuery(); } var tbl = db.ExpectTable("Employee"); var defaults = new ServerDefaults(CatalogueRepository); var logServer = defaults.GetDefaultFor(PermissableDefaults.LiveLoggingServer_ID); var logManager = new LogManager(logServer); var raw = db.Server.ExpectDatabase(db.GetRuntimeName() + "_RAW"); if (raw.Exists()) { raw.Drop(); } //define a new load configuration var lmd = new LoadMetadata(CatalogueRepository, "MyLoad"); lmd.IgnoreTrigger = true; lmd.SaveToDatabase(); ITableInfo ti = Import(tbl, lmd, logManager); var projectDirectory = SetupLoadDirectory(lmd); CreateCSVProcessTask(lmd, ti, "*.csv"); //create a text file to load where we update Frank's favourite colour (it's a pk field) and we insert a new record (MrMurder) File.WriteAllText( Path.Combine(projectDirectory.ForLoading.FullName, "LoadMe.csv"), @"EmployeeID,Name,Position,Department,Address,AnnualSalary 1,Frank,Boss,Department of F'Tang, 22 Innsmouth Way, 55000.5 2,Herbert,Super Boss,Department of F'Tang, 22 Innsmouth Way, 155000.5"); //the checks will probably need to be run as ddl admin because it involves creating _Archive table and trigger the first time //clean SetUp RAW / STAGING etc and generally accept proposed cleanup operations var checker = new CheckEntireDataLoadProcess(lmd, new HICDatabaseConfiguration(lmd), new HICLoadConfigurationFlags(), CatalogueRepository.MEF); checker.Check(new AcceptAllCheckNotifier()); if (ignoreWithGlobalPattern) { var regex = new StandardRegex(RepositoryLocator.CatalogueRepository) { ConceptName = StandardRegex.DataLoadEngineGlobalIgnorePattern, Regex = "^Valid((From)|(To))$" }; regex.SaveToDatabase(); } else { var col = ti.ColumnInfos.Single(c => c.GetRuntimeName().Equals("ValidFrom")); col.IgnoreInLoads = true; col.SaveToDatabase(); col = ti.ColumnInfos.Single(c => c.GetRuntimeName().Equals("ValidTo")); col.IgnoreInLoads = true; col.SaveToDatabase(); } var dbConfig = new HICDatabaseConfiguration(lmd, null); var loadFactory = new HICDataLoadFactory( lmd, dbConfig, new HICLoadConfigurationFlags(), CatalogueRepository, logManager ); var exe = loadFactory.Create(new ThrowImmediatelyDataLoadEventListener()); var exitCode = exe.Run( new DataLoadJob(RepositoryLocator, "Go go go!", logManager, lmd, projectDirectory, new ThrowImmediatelyDataLoadEventListener(), dbConfig), new GracefulCancellationToken()); Assert.AreEqual(ExitCodeType.Success, exitCode); //frank should be updated to his new departement and role Assert.AreEqual(2, tbl.GetRowCount()); var result = tbl.GetDataTable(); var frank = result.Rows.Cast <DataRow>().Single(r => (string)r["Name"] == "Frank"); Assert.AreEqual("Department of F'Tang", frank["Department"]); Assert.AreEqual("Boss", frank["Position"]); //post test cleanup foreach (var regex in RepositoryLocator.CatalogueRepository.GetAllObjects <StandardRegex>()) { regex.DeleteInDatabase(); } }
public void Load(DatabaseType databaseType, TestCase testCase) { var defaults = new ServerDefaults(CatalogueRepository); var logServer = defaults.GetDefaultFor(PermissableDefaults.LiveLoggingServer_ID); var logManager = new LogManager(logServer); var db = GetCleanedServer(databaseType); var raw = db.Server.ExpectDatabase(db.GetRuntimeName() + "_RAW"); if (raw.Exists()) { raw.Drop(); } var dt = new DataTable("MyTable"); dt.Columns.Add("Name"); dt.Columns.Add("DateOfBirth"); dt.Columns.Add("FavouriteColour"); dt.Rows.Add("Bob", "2001-01-01", "Pink"); dt.Rows.Add("Frank", "2001-01-01", "Orange"); var nameCol = new DatabaseColumnRequest("Name", new DatabaseTypeRequest(typeof(string), 20), false) { IsPrimaryKey = true }; if (testCase == TestCase.DodgyCollation) { if (databaseType == DatabaseType.MicrosoftSQLServer) { nameCol.Collation = "Latin1_General_CS_AS_KS_WS"; } else if (databaseType == DatabaseType.MySql) { nameCol.Collation = "latin1_german1_ci"; } } DiscoveredTable tbl; if (testCase == TestCase.WithNonPrimaryKeyIdentityColumn) { tbl = db.CreateTable("MyTable", new [] { new DatabaseColumnRequest("ID", new DatabaseTypeRequest(typeof(int)), false) { IsPrimaryKey = false, IsAutoIncrement = true }, nameCol, new DatabaseColumnRequest("DateOfBirth", new DatabaseTypeRequest(typeof(DateTime)), false) { IsPrimaryKey = true }, new DatabaseColumnRequest("FavouriteColour", new DatabaseTypeRequest(typeof(string))), }); using (var blk = tbl.BeginBulkInsert()) blk.Upload(dt); Assert.AreEqual(1, tbl.DiscoverColumns().Count(c => c.GetRuntimeName().Equals("ID", StringComparison.CurrentCultureIgnoreCase)), "Table created did not contain ID column"); } else if (testCase == TestCase.AllPrimaryKeys) { dt.PrimaryKey = dt.Columns.Cast <DataColumn>().ToArray(); tbl = db.CreateTable("MyTable", dt, new [] { nameCol }); //upload the column as is Assert.IsTrue(tbl.DiscoverColumns().All(c => c.IsPrimaryKey)); } else { tbl = db.CreateTable("MyTable", dt, new[] { nameCol, new DatabaseColumnRequest("DateOfBirth", new DatabaseTypeRequest(typeof(DateTime)), false) { IsPrimaryKey = true } }); } Assert.AreEqual(2, tbl.GetRowCount()); //define a new load configuration var lmd = new LoadMetadata(CatalogueRepository, "MyLoad"); if (testCase == TestCase.NoTrigger) { lmd.IgnoreTrigger = true; lmd.SaveToDatabase(); } TableInfo ti = Import(tbl, lmd, logManager); var projectDirectory = SetupLoadDirectory(lmd); CreateCSVProcessTask(lmd, ti, "*.csv"); //create a text file to load where we update Frank's favourite colour (it's a pk field) and we insert a new record (MrMurder) File.WriteAllText( Path.Combine(projectDirectory.ForLoading.FullName, "LoadMe.csv"), @"Name,DateOfBirth,FavouriteColour Frank,2001-01-01,Neon MrMurder,2001-01-01,Yella"); //the checks will probably need to be run as ddl admin because it involves creating _Archive table and trigger the first time //clean SetUp RAW / STAGING etc and generally accept proposed cleanup operations var checker = new CheckEntireDataLoadProcess(lmd, new HICDatabaseConfiguration(lmd), new HICLoadConfigurationFlags(), CatalogueRepository.MEF); checker.Check(new AcceptAllCheckNotifier()); //create a reader if (testCase == TestCase.LowPrivilegeLoaderAccount) { SetupLowPrivilegeUserRightsFor(ti, TestLowPrivilegePermissions.Reader | TestLowPrivilegePermissions.Writer); SetupLowPrivilegeUserRightsFor(db.Server.ExpectDatabase("DLE_STAGING"), TestLowPrivilegePermissions.All); } Assert.AreEqual(testCase != TestCase.NoTrigger, tbl.DiscoverColumns().Select(c => c.GetRuntimeName()).Contains(SpecialFieldNames.DataLoadRunID), $"When running with NoTrigger there shouldn't be any additional columns added to table. Test case was {testCase}"); Assert.AreEqual(testCase != TestCase.NoTrigger, tbl.DiscoverColumns().Select(c => c.GetRuntimeName()).Contains(SpecialFieldNames.ValidFrom), $"When running with NoTrigger there shouldn't be any additional columns added to table. Test case was {testCase}"); var dbConfig = new HICDatabaseConfiguration(lmd, testCase == TestCase.WithCustomTableNamer? new CustomINameDatabasesAndTablesDuringLoads():null); if (testCase == TestCase.WithCustomTableNamer) { new PreExecutionChecker(lmd, dbConfig).Check(new AcceptAllCheckNotifier()); //handles staging database creation etc } if (testCase == TestCase.WithDiffColumnIgnoreRegex) { dbConfig.UpdateButDoNotDiff = new Regex("^FavouriteColour"); //do not diff FavouriteColour } var loadFactory = new HICDataLoadFactory( lmd, dbConfig, new HICLoadConfigurationFlags(), CatalogueRepository, logManager ); try { var exe = loadFactory.Create(new ThrowImmediatelyDataLoadEventListener()); var exitCode = exe.Run( new DataLoadJob(RepositoryLocator, "Go go go!", logManager, lmd, projectDirectory, new ThrowImmediatelyDataLoadEventListener(), dbConfig), new GracefulCancellationToken()); Assert.AreEqual(ExitCodeType.Success, exitCode); if (testCase == TestCase.AllPrimaryKeys) { Assert.AreEqual(4, tbl.GetRowCount()); //Bob, Frank, Frank (with also pk Neon) & MrMurder Assert.Pass(); } if (testCase == TestCase.WithDiffColumnIgnoreRegex) { Assert.AreEqual(3, tbl.GetRowCount()); //Bob, Frank (original since the diff was skipped), & MrMurder //frank should be updated to like Neon instead of Orange Assert.AreEqual(3, tbl.GetRowCount()); var frankOld = tbl.GetDataTable().Rows.Cast <DataRow>().Single(r => (string)r["Name"] == "Frank"); Assert.AreEqual("Orange", frankOld["FavouriteColour"]); Assert.Pass(); } //frank should be updated to like Neon instead of Orange Assert.AreEqual(3, tbl.GetRowCount()); var result = tbl.GetDataTable(); var frank = result.Rows.Cast <DataRow>().Single(r => (string)r["Name"] == "Frank"); Assert.AreEqual("Neon", frank["FavouriteColour"]); if (testCase != TestCase.NoTrigger) { AssertHasDataLoadRunId(frank); } //MrMurder is a new person who likes Yella var mrmurder = result.Rows.Cast <DataRow>().Single(r => (string)r["Name"] == "MrMurder"); Assert.AreEqual("Yella", mrmurder["FavouriteColour"]); Assert.AreEqual(new DateTime(2001, 01, 01), mrmurder["DateOfBirth"]); if (testCase != TestCase.NoTrigger) { AssertHasDataLoadRunId(mrmurder); } //bob should be untouched (same values as before and no dataloadrunID) var bob = result.Rows.Cast <DataRow>().Single(r => (string)r["Name"] == "Bob"); Assert.AreEqual("Pink", bob["FavouriteColour"]); Assert.AreEqual(new DateTime(2001, 01, 01), bob["DateOfBirth"]); if (testCase != TestCase.NoTrigger) { Assert.AreEqual(DBNull.Value, bob[SpecialFieldNames.DataLoadRunID]); //MySql add default of now() on a table will auto populate all the column values with the the now() date while Sql Server will leave them as nulls if (databaseType == DatabaseType.MicrosoftSQLServer) { Assert.AreEqual(DBNull.Value, bob[SpecialFieldNames.ValidFrom]); } } Assert.AreEqual(testCase != TestCase.NoTrigger, tbl.DiscoverColumns().Select(c => c.GetRuntimeName()).Contains(SpecialFieldNames.DataLoadRunID), $"When running with NoTrigger there shouldn't be any additional columns added to table. Test case was {testCase}"); Assert.AreEqual(testCase != TestCase.NoTrigger, tbl.DiscoverColumns().Select(c => c.GetRuntimeName()).Contains(SpecialFieldNames.ValidFrom), $"When running with NoTrigger there shouldn't be any additional columns added to table. Test case was {testCase}"); } finally { Directory.Delete(lmd.LocationOfFlatFiles, true); foreach (Catalogue c in RepositoryLocator.CatalogueRepository.GetAllObjects <Catalogue>()) { c.DeleteInDatabase(); } foreach (TableInfo t in RepositoryLocator.CatalogueRepository.GetAllObjects <TableInfo>()) { t.DeleteInDatabase(); } foreach (LoadMetadata l in RepositoryLocator.CatalogueRepository.GetAllObjects <LoadMetadata>()) { l.DeleteInDatabase(); } } if (testCase == TestCase.WithCustomTableNamer) { var db2 = db.Server.ExpectDatabase("BB_STAGING"); if (db.Exists()) { db2.Drop(); } } }
public int Run(IRDMPPlatformRepositoryServiceLocator locator, IDataLoadEventListener listener, ICheckNotifier checkNotifier, GracefulCancellationToken token) { ILoadProgress loadProgress = locator.CatalogueRepository.GetObjectByID <LoadProgress>(_options.LoadProgress); ILoadMetadata loadMetadata = locator.CatalogueRepository.GetObjectByID <LoadMetadata>(_options.LoadMetadata); if (loadMetadata == null && loadProgress != null) { loadMetadata = loadProgress.LoadMetadata; } if (loadMetadata == null) { throw new ArgumentException("No Load Metadata specified"); } if (loadProgress != null && loadProgress.LoadMetadata_ID != loadMetadata.ID) { throw new ArgumentException("The supplied LoadProgress does not belong to the supplied LoadMetadata load"); } var databaseConfiguration = new HICDatabaseConfiguration(loadMetadata); var flags = new HICLoadConfigurationFlags(); flags.ArchiveData = !_options.DoNotArchiveData; flags.DoLoadToStaging = !_options.StopAfterRAW; flags.DoMigrateFromStagingToLive = !_options.StopAfterSTAGING; var checkable = new CheckEntireDataLoadProcess(loadMetadata, databaseConfiguration, flags, locator.CatalogueRepository.MEF); switch (_options.Command) { case CommandLineActivity.run: var loggingServer = loadMetadata.GetDistinctLoggingDatabase(); var logManager = new LogManager(loggingServer); // Create the pipeline to pass into the DataLoadProcess object var dataLoadFactory = new HICDataLoadFactory(loadMetadata, databaseConfiguration, flags, locator.CatalogueRepository, logManager); IDataLoadExecution execution = dataLoadFactory.Create(listener); IDataLoadProcess dataLoadProcess; if (loadMetadata.LoadProgresses.Any()) { //Then the load is designed to run X days of source data at a time //Load Progress ILoadProgressSelectionStrategy whichLoadProgress = loadProgress != null ? (ILoadProgressSelectionStrategy) new SingleLoadProgressSelectionStrategy(loadProgress) : new AnyAvailableLoadProgressSelectionStrategy(loadMetadata); var jobDateFactory = new JobDateGenerationStrategyFactory(whichLoadProgress); dataLoadProcess = _options.Iterative ? (IDataLoadProcess) new IterativeScheduledDataLoadProcess(locator, loadMetadata, checkable, execution, jobDateFactory, whichLoadProgress, _options.DaysToLoad, logManager, listener, databaseConfiguration) : new SingleJobScheduledDataLoadProcess(locator, loadMetadata, checkable, execution, jobDateFactory, whichLoadProgress, _options.DaysToLoad, logManager, listener, databaseConfiguration); } else { //OnDemand dataLoadProcess = new DataLoadProcess(locator, loadMetadata, checkable, logManager, listener, execution, databaseConfiguration); } var exitCode = dataLoadProcess.Run(token); //return 0 for success or load not required otherwise return the exit code (which will be non zero so error) return(exitCode == ExitCodeType.Success || exitCode == ExitCodeType.OperationNotRequired? 0: (int)exitCode); case CommandLineActivity.check: checkable.Check(checkNotifier); return(0); default: throw new ArgumentOutOfRangeException(); } }
public void DLELoadTwoTables(DatabaseType databaseType) { //setup the data tables var defaults = new ServerDefaults(CatalogueRepository); var logServer = defaults.GetDefaultFor(PermissableDefaults.LiveLoggingServer_ID); var logManager = new LogManager(logServer); var db = GetCleanedServer(databaseType); var dtParent = new DataTable(); dtParent.Columns.Add("ID", typeof(int)); dtParent.Columns.Add("Name"); dtParent.Columns.Add("Height"); dtParent.PrimaryKey = new[] { dtParent.Columns[0] }; dtParent.Rows.Add("1", "Dave", "3.5"); var dtChild = new DataTable(); dtChild.Columns.Add("Parent_ID"); dtChild.Columns.Add("ChildNumber"); dtChild.Columns.Add("Name"); dtChild.Columns.Add("DateOfBirth"); dtChild.Columns.Add("Age"); dtChild.Columns.Add("Height"); dtChild.Rows.Add("1", "1", "Child1", "2001-01-01", "20", "3.5"); dtChild.Rows.Add("1", "2", "Child2", "2002-01-01", "19", "3.4"); dtChild.PrimaryKey = new[] { dtChild.Columns[0], dtChild.Columns[1] }; //create the parent table based on the DataTable var parentTbl = db.CreateTable("Parent", dtParent); //go find the primary key column created var pkParentID = parentTbl.DiscoverColumn("ID"); //forward declare this column as part of pk (will be used to specify foreign key var fkParentID = new DatabaseColumnRequest("Parent_ID", "int") { IsPrimaryKey = true }; var args = new CreateTableArgs( db, "Child", null, dtChild, false, new Dictionary <DatabaseColumnRequest, DiscoveredColumn>() { { fkParentID, pkParentID } }, true); args.ExplicitColumnDefinitions = new[] { fkParentID }; var childTbl = db.CreateTable(args); Assert.AreEqual(1, parentTbl.GetRowCount()); Assert.AreEqual(2, childTbl.GetRowCount()); //create a new load var lmd = new LoadMetadata(CatalogueRepository, "MyLoading2"); TableInfo childTableInfo = Import(childTbl, lmd, logManager); TableInfo parentTableInfo = Import(parentTbl, lmd, logManager); var projectDirectory = SetupLoadDirectory(lmd); CreateCSVProcessTask(lmd, parentTableInfo, "parent.csv"); CreateCSVProcessTask(lmd, childTableInfo, "child.csv"); //create a text file to load where we update Frank's favourite colour (it's a pk field) and we insert a new record (MrMurder) File.WriteAllText( Path.Combine(projectDirectory.ForLoading.FullName, "parent.csv"), @"ID,Name,Height 2,Man2,3.1 1,Dave,3.2"); File.WriteAllText( Path.Combine(projectDirectory.ForLoading.FullName, "child.csv"), @"Parent_ID,ChildNumber,Name,DateOfBirth,Age,Height 1,1,UpdC1,2001-01-01,20,3.5 2,1,NewC1,2000-01-01,19,null"); //clean SetUp RAW / STAGING etc and generally accept proposed cleanup operations var checker = new CheckEntireDataLoadProcess(lmd, new HICDatabaseConfiguration(lmd), new HICLoadConfigurationFlags(), CatalogueRepository.MEF); checker.Check(new AcceptAllCheckNotifier()); var config = new HICDatabaseConfiguration(lmd); var loadFactory = new HICDataLoadFactory( lmd, config, new HICLoadConfigurationFlags(), CatalogueRepository, logManager ); try { var exe = loadFactory.Create(new ThrowImmediatelyDataLoadEventListener()); var exitCode = exe.Run( new DataLoadJob(RepositoryLocator, "Go go go!", logManager, lmd, projectDirectory, new ThrowImmediatelyDataLoadEventListener(), config), new GracefulCancellationToken()); Assert.AreEqual(ExitCodeType.Success, exitCode); //should now be 2 parents (the original - who was updated) + 1 new one (Man2) Assert.AreEqual(2, parentTbl.GetRowCount()); var result = parentTbl.GetDataTable(); var dave = result.Rows.Cast <DataRow>().Single(r => (string)r["Name"] == "Dave"); Assert.AreEqual(3.2f, dave["Height"]); //should now be only 3.2 inches high AssertHasDataLoadRunId(dave); //should be 3 children (Child1 who gets updated to be called UpdC1) and NewC1 Assert.AreEqual(3, childTbl.GetRowCount()); result = childTbl.GetDataTable(); var updC1 = result.Rows.Cast <DataRow>().Single(r => (string)r["Name"] == "UpdC1"); Assert.AreEqual(1, updC1["Parent_ID"]); Assert.AreEqual(1, updC1["ChildNumber"]); AssertHasDataLoadRunId(updC1); var newC1 = result.Rows.Cast <DataRow>().Single(r => (string)r["Name"] == "NewC1"); Assert.AreEqual(2, newC1["Parent_ID"]); Assert.AreEqual(1, newC1["ChildNumber"]); Assert.AreEqual(DBNull.Value, newC1["Height"]); //the "null" in the input file should be DBNull.Value in the final database AssertHasDataLoadRunId(newC1); } finally { Directory.Delete(lmd.LocationOfFlatFiles, true); foreach (Catalogue c in RepositoryLocator.CatalogueRepository.GetAllObjects <Catalogue>()) { c.DeleteInDatabase(); } foreach (TableInfo t in RepositoryLocator.CatalogueRepository.GetAllObjects <TableInfo>()) { t.DeleteInDatabase(); } foreach (LoadMetadata l in RepositoryLocator.CatalogueRepository.GetAllObjects <LoadMetadata>()) { l.DeleteInDatabase(); } } }
public override void Execute() { if (DicomSourceType == null) { SetImpossible("You must specify a Type for DicomSourceType"); throw new ImpossibleCommandException(this, ReasonCommandImpossible); } base.Execute(); List <DiscoveredTable> tablesCreated = new List <DiscoveredTable>(); //Create with template? if (Template != null) { foreach (ImageTableTemplate table in Template.Tables) { string tblName = GetNameWithPrefix(table.TableName); var tbl = _databaseToCreateInto.ExpectTable(tblName); var cmd = new ExecuteCommandCreateNewImagingDataset(_repositoryLocator, tbl, table); cmd.Execute(); NewCataloguesCreated.Add(cmd.NewCatalogueCreated); tablesCreated.Add(tbl); } } else { throw new Exception("No Template provided"); } //that's us done if we aren't creating a load if (!CreateLoad) { return; } string loadName = GetNameWithPrefixInBracketsIfAny("SMI Image Loading"); NewLoadMetadata = new LoadMetadata(_catalogueRepository, loadName); //tell all the catalogues that they are part of this load and where to log under the same task foreach (Catalogue c in NewCataloguesCreated) { c.LoadMetadata_ID = NewLoadMetadata.ID; c.LoggingDataTask = loadName; c.LiveLoggingServer_ID = _loggingServer.ID; c.SaveToDatabase(); } //create the logging task new Core.Logging.LogManager(_loggingServer).CreateNewLoggingTaskIfNotExists(loadName); var projDir = LoadDirectory.CreateDirectoryStructure(_projectDirectory, "ImageLoading", true); NewLoadMetadata.LocationOfFlatFiles = projDir.RootPath.FullName; NewLoadMetadata.SaveToDatabase(); /////////////////////////////////////////////Attacher//////////////////////////// //Create a pipeline for reading from Dicom files and writing to any destination component (which must be fixed) var pipe = new Pipeline(_catalogueRepository, GetNameWithPrefixInBracketsIfAny("Image Loading Pipe")); DicomSourcePipelineComponent = new PipelineComponent(_catalogueRepository, pipe, DicomSourceType, 0, DicomSourceType.Name); DicomSourcePipelineComponent.CreateArgumentsForClassIfNotExists(DicomSourceType); pipe.SourcePipelineComponent_ID = DicomSourcePipelineComponent.ID; pipe.SaveToDatabase(); //Create the load process task that uses the pipe to load RAW tables with data from the dicom files var pt = new ProcessTask(_catalogueRepository, NewLoadMetadata, LoadStage.Mounting); pt.Name = "Auto Routing Attacher"; pt.ProcessTaskType = ProcessTaskType.Attacher; pt.Path = PersistentRaw? typeof(AutoRoutingAttacherWithPersistentRaw).FullName: typeof(AutoRoutingAttacher).FullName; pt.Order = 1; pt.SaveToDatabase(); var args = PersistentRaw? pt.CreateArgumentsForClassIfNotExists <AutoRoutingAttacherWithPersistentRaw>() : pt.CreateArgumentsForClassIfNotExists <AutoRoutingAttacher>(); SetArgument(args, "LoadPipeline", pipe); /////////////////////////////////////// Distinct tables on load ///////////////////////// var distincter = new ProcessTask(_catalogueRepository, NewLoadMetadata, LoadStage.AdjustRaw); var distincterArgs = distincter.CreateArgumentsForClassIfNotExists <Distincter>(); distincter.Name = "Distincter"; distincter.ProcessTaskType = ProcessTaskType.MutilateDataTable; distincter.Path = typeof(Distincter).FullName; distincter.Order = 2; distincter.SaveToDatabase(); SetArgument(distincterArgs, "TableRegexPattern", ".*"); ///////////////////////////////////////////////////////////////////////////////////// if (CreateCoalescer) { var coalescer = new ProcessTask(_catalogueRepository, NewLoadMetadata, LoadStage.AdjustRaw); coalescer.Name = "Coalescer"; coalescer.ProcessTaskType = ProcessTaskType.MutilateDataTable; coalescer.Path = typeof(Coalescer).FullName; coalescer.Order = 3; coalescer.SaveToDatabase(); StringBuilder regexPattern = new StringBuilder(); foreach (var tbl in tablesCreated) { if (!tbl.DiscoverColumns().Any(c => c.GetRuntimeName().Equals("SOPInstanceUID", StringComparison.CurrentCultureIgnoreCase))) { regexPattern.Append("(" + tbl.GetRuntimeName() + ")|"); } } var coalArgs = coalescer.CreateArgumentsForClassIfNotExists <Coalescer>(); SetArgument(coalArgs, "TableRegexPattern", regexPattern.ToString().TrimEnd('|')); SetArgument(coalArgs, "CreateIndex", true); } ////////////////////////////////Load Ender (if no rows in load) //////////////////////////// var prematureLoadEnder = new ProcessTask(_catalogueRepository, NewLoadMetadata, LoadStage.Mounting); prematureLoadEnder.Name = "Premature Load Ender"; prematureLoadEnder.ProcessTaskType = ProcessTaskType.MutilateDataTable; prematureLoadEnder.Path = typeof(PrematureLoadEnder).FullName; prematureLoadEnder.Order = 4; prematureLoadEnder.SaveToDatabase(); args = prematureLoadEnder.CreateArgumentsForClassIfNotExists <PrematureLoadEnder>(); SetArgument(args, "ExitCodeToReturnIfConditionMet", ExitCodeType.OperationNotRequired); SetArgument(args, "ConditionsToTerminateUnder", PrematureLoadEndCondition.NoRecordsInAnyTablesInDatabase); //////////////////////////////////////////////////////////////////////////////////////////////// var checker = new CheckEntireDataLoadProcess(NewLoadMetadata, new HICDatabaseConfiguration(NewLoadMetadata), new HICLoadConfigurationFlags(), _catalogueRepository.MEF); checker.Check(new AcceptAllCheckNotifier()); }