public void CacheProvider_NoPipeline() { var pt1 = new ProcessTask(CatalogueRepository, _lmd, LoadStage.GetFiles); pt1.Path = typeof(TestCachedFileRetriever).FullName; pt1.ProcessTaskType = ProcessTaskType.DataProvider; pt1.Name = "Cache1"; pt1.SaveToDatabase(); _cp.CacheFillProgress = new DateTime(1999, 1, 1); _cp.Name = "MyTestCp"; _cp.SaveToDatabase(); pt1.CreateArgumentsForClassIfNotExists <TestCachedFileRetriever>(); var projDir = LoadDirectory.CreateDirectoryStructure(new DirectoryInfo(TestContext.CurrentContext.TestDirectory), "delme", true); _lmd.LocationOfFlatFiles = projDir.RootPath.FullName; _lmd.SaveToDatabase(); try { var ex = Assert.Throws <Exception>(() => _factory.Create(_lp, new ThrowImmediatelyDataLoadEventListener())); Assert.AreEqual("CacheProgress MyTestCp does not have a Pipeline configured on it", ex.Message); } finally { projDir.RootPath.Delete(true); } }
public void Test_IgnoreTrigger_GetSet() { var loadMetadata = new LoadMetadata(CatalogueRepository); try { //default Assert.IsFalse(loadMetadata.IgnoreTrigger); loadMetadata.SaveToDatabase(); Assert.IsFalse(loadMetadata.IgnoreTrigger); loadMetadata.SaveToDatabase(); loadMetadata.IgnoreTrigger = true; Assert.IsTrue(loadMetadata.IgnoreTrigger); loadMetadata.RevertToDatabaseState(); Assert.IsFalse(loadMetadata.IgnoreTrigger); loadMetadata.IgnoreTrigger = true; Assert.IsTrue(loadMetadata.IgnoreTrigger); loadMetadata.SaveToDatabase(); var lmd2 = RepositoryLocator.CatalogueRepository.GetObjectByID <LoadMetadata>(loadMetadata.ID); Assert.IsTrue(lmd2.IgnoreTrigger); } finally { loadMetadata.DeleteInDatabase(); } }
protected override void SetUp() { base.SetUp(); RepositoryLocator.CatalogueRepository.MEF.AddTypeToCatalogForTesting(typeof(TestDataWriter)); RepositoryLocator.CatalogueRepository.MEF.AddTypeToCatalogForTesting(typeof(TestDataInventor)); _lmd = new LoadMetadata(CatalogueRepository, "Ive got a lovely bunch o' coconuts"); _LoadDirectory = LoadDirectory.CreateDirectoryStructure(new DirectoryInfo(TestContext.CurrentContext.TestDirectory), @"EndToEndCacheTest", true); _lmd.LocationOfFlatFiles = _LoadDirectory.RootPath.FullName; _lmd.SaveToDatabase(); Clear(_LoadDirectory); _cata = new Catalogue(CatalogueRepository, "EndToEndCacheTest"); _cata.LoadMetadata_ID = _lmd.ID; _cata.SaveToDatabase(); _lp = new LoadProgress(CatalogueRepository, _lmd); _cp = new CacheProgress(CatalogueRepository, _lp); _lp.OriginDate = new DateTime(2001, 1, 1); _lp.SaveToDatabase(); _testPipeline = new TestDataPipelineAssembler("EndToEndCacheTestPipeline" + Guid.NewGuid(), CatalogueRepository); _testPipeline.ConfigureCacheProgressToUseThePipeline(_cp); _cp.CacheFillProgress = DateTime.Now.AddDays(-NumDaysToCache); _cp.SaveToDatabase(); _cp.SaveToDatabase(); }
public void CreateTask() { _lmd = new LoadMetadata(CatalogueRepository); _dir = new DirectoryInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, "ProcessTaskCheckingTests")); _dir.Create(); var hicdir = LoadDirectory.CreateDirectoryStructure(_dir, "ProjDir", true); _lmd.LocationOfFlatFiles = hicdir.RootPath.FullName; _lmd.SaveToDatabase(); Catalogue c = new Catalogue(CatalogueRepository, "c"); CatalogueItem ci = new CatalogueItem(CatalogueRepository, c, "ci"); TableInfo t = new TableInfo(CatalogueRepository, "t"); t.Server = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.Name; t.Database = "mydb"; t.SaveToDatabase(); ColumnInfo col = new ColumnInfo(CatalogueRepository, "col", "bit", t); ci.SetColumnInfo(col); c.LoadMetadata_ID = _lmd.ID; c.SaveToDatabase(); _task = new ProcessTask(CatalogueRepository, _lmd, LoadStage.GetFiles); _checker = new ProcessTaskChecks(_lmd); }
private LoadDirectory SetupLoadDirectory(LoadMetadata lmd) { var projectDirectory = LoadDirectory.CreateDirectoryStructure(new DirectoryInfo(TestContext.CurrentContext.TestDirectory), "MyLoadDir", true); lmd.LocationOfFlatFiles = projectDirectory.RootPath.FullName; lmd.SaveToDatabase(); return(projectDirectory); }
public override void Execute() { base.Execute(); var dialog = new ChooseLoadDirectoryUI(_loadMetadata); if (dialog.ShowDialog() == DialogResult.OK) { _loadMetadata.LocationOfFlatFiles = dialog.Result.RootPath.FullName; _loadMetadata.SaveToDatabase(); Publish(_loadMetadata); } }
public void CreateNewAndGetBackFromDatabase() { var loadMetadata = new LoadMetadata(CatalogueRepository); try { loadMetadata.LocationOfFlatFiles = "C:\\temp"; loadMetadata.SaveToDatabase(); var loadMetadataWithIdAfterwards = CatalogueRepository.GetObjectByID <LoadMetadata>(loadMetadata.ID); Assert.AreEqual(loadMetadataWithIdAfterwards.LocationOfFlatFiles, "C:\\temp"); } finally { loadMetadata.DeleteInDatabase(); } }
public ExecuteCommandPacsFetch(IBasicActivateItems activator, string start, string end, string remoteAeUri, int remotePort, string remoteAeTitle, string localAeUri, int localPort, string localAeTitle, string outDir, int maxRetries) : base(activator) { var startDate = DateTime.Parse(start); var endDate = DateTime.Parse(end); // Make something that kinda looks like a valid DLE load var memory = new MemoryCatalogueRepository(); var lmd = new LoadMetadata(memory); var dir = Directory.CreateDirectory(outDir); var results = LoadDirectory.CreateDirectoryStructure(dir, "out", true); lmd.LocationOfFlatFiles = results.RootPath.FullName; lmd.SaveToDatabase(); var lp = new LoadProgress(memory, lmd); var cp = new CacheProgress(memory, lp); //Create the source component only and a valid request range to fetch _source = new PACSSource { RemoteAEUri = new Uri("http://" + remoteAeUri), RemoteAEPort = remotePort, RemoteAETitle = remoteAeTitle, LocalAEUri = new Uri("http://" + localAeUri), LocalAEPort = localPort, LocalAETitle = localAeTitle, TransferTimeOutInSeconds = 50000, Modality = "ALL", MaxRetries = maxRetries }; //<- rly? its not gonna pass without an http!? _request = new BackfillCacheFetchRequest(BasicActivator.RepositoryLocator.CatalogueRepository, startDate) { ChunkPeriod = endDate.Subtract(startDate), CacheProgress = cp }; //Initialize it _source.PreInitialize(BasicActivator.RepositoryLocator.CatalogueRepository, new ThrowImmediatelyDataLoadEventListener { WriteToConsole = true }); _source.PreInitialize(this, new ThrowImmediatelyDataLoadEventListener { WriteToConsole = true }); }
public void MEFCompatibleType_NoProjectDirectory() { _lmd.LocationOfFlatFiles = null; _lmd.SaveToDatabase(); _task.ProcessTaskType = ProcessTaskType.Attacher; _task.LoadStage = LoadStage.Mounting; _task.Path = typeof(AnySeparatorFileAttacher).FullName; _task.SaveToDatabase(); _task.CreateArgumentsForClassIfNotExists <AnySeparatorFileAttacher>(); var ex = Assert.Throws <Exception>(() => _checker.Check(new ThrowImmediatelyCheckNotifier() { ThrowOnWarning = true })); Assert.AreEqual(@"No Project Directory (LocationOfFlatFiles) has been configured on LoadMetadata " + _lmd.Name, ex.InnerException.Message); }
public void Create(CatalogueRepository repository, DiscoveredDatabase database, ILoadDirectory directory) { TableInfo = new TableInfo(repository, "TestData") { Server = database.Server.Name, Database = database.GetRuntimeName() }; TableInfo.SaveToDatabase(); if (!string.IsNullOrWhiteSpace(database.Server.ExplicitUsernameIfAny)) { Credentials = new DataAccessCredentialsFactory(repository).Create(TableInfo, database.Server.ExplicitUsernameIfAny, database.Server.ExplicitPasswordIfAny, DataAccessContext.Any); } ColumnInfo = new ColumnInfo(repository, "Col1", "int", TableInfo) { IsPrimaryKey = true }; ColumnInfo.SaveToDatabase(); LoadMetadata = new LoadMetadata(repository, "HICLoadPipelineTests") { LocationOfFlatFiles = directory.RootPath.FullName }; LoadMetadata.SaveToDatabase(); Catalogue = new Catalogue(repository, "HICLoadPipelineTests") { LoggingDataTask = "Test", LoadMetadata_ID = LoadMetadata.ID }; Catalogue.SaveToDatabase(); var catalogueItem = new CatalogueItem(repository, Catalogue, "Test"); catalogueItem.SetColumnInfo(ColumnInfo); SetupLoadProcessTasks(repository); }
protected override void SetUp() { base.SetUp(); Database = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer); var rootFolder = new DirectoryInfo(TestContext.CurrentContext.TestDirectory); var subdir = rootFolder.CreateSubdirectory("TestsRequiringADle"); LoadDirectory = LoadDirectory.CreateDirectoryStructure(rootFolder, subdir.FullName, true); Clear(LoadDirectory); LiveTable = CreateDataset <Demography>(Database, 500, 5000, new Random(190)); LiveTable.CreatePrimaryKey(new DiscoveredColumn[] { LiveTable.DiscoverColumn("chi"), LiveTable.DiscoverColumn("dtCreated"), LiveTable.DiscoverColumn("hb_extract") }); TestCatalogue = Import(LiveTable); RowsBefore = 5000; TestLoadMetadata = new LoadMetadata(CatalogueRepository, "Loading Test Catalogue"); TestLoadMetadata.LocationOfFlatFiles = LoadDirectory.RootPath.FullName; TestLoadMetadata.SaveToDatabase(); //make the load load the table TestCatalogue.LoadMetadata_ID = TestLoadMetadata.ID; TestCatalogue.SaveToDatabase(); CreateFlatFileAttacher(TestLoadMetadata, "*.csv", TestCatalogue.GetTableInfoList(false).Single(), ","); //Get DleRunner to run pre load checks (includes trigger creation etc) var runner = new DleRunner(new DleOptions() { LoadMetadata = TestLoadMetadata.ID, Command = CommandLineActivity.check }); runner.Run(RepositoryLocator, new ThrowImmediatelyDataLoadEventListener(), new AcceptAllCheckNotifier(), new GracefulCancellationToken()); }
public override void Execute() { base.Execute(); if (_server == null) { if (SelectOne(_available, out ExternalDatabaseServer selected)) { _server = selected; } else { return; } } _loadMetadata.OverrideRAWServer_ID = _server == null ? null : (int?)_server.ID; _loadMetadata.SaveToDatabase(); Publish(_loadMetadata); }
public override void Execute() { base.Execute(); var d = Dir; string newFolderName = null; // if called with an explicit full dir then that is where we create load folders // otherwise get them to pick something that exists and then name a new folder to // create if (d == null) { d = BasicActivator.SelectDirectory("Directory to create in"); if (d == null) { return; } if (!BasicActivator.TypeText("New Folder Name", "Name", 255, null, out newFolderName, false)) { return; } } var loadDir = string.IsNullOrWhiteSpace(newFolderName) ? LoadDirectory.CreateDirectoryStructure(d.Parent, d.Name, true) : LoadDirectory.CreateDirectoryStructure(d, newFolderName, true); // if we have a load then update the path to this location we just created if (LoadMetadata != null) { LoadMetadata.LocationOfFlatFiles = loadDir.RootPath.FullName; LoadMetadata.SaveToDatabase(); Publish(LoadMetadata); } }
public void TestPayloadInjection() { BulkTestsData b = new BulkTestsData(CatalogueRepository, DiscoveredDatabaseICanCreateRandomTablesIn, 10); b.SetupTestData(); b.ImportAsCatalogue(); var lmd = new LoadMetadata(CatalogueRepository, "Loading"); lmd.LocationOfFlatFiles = LoadDirectory.CreateDirectoryStructure(new DirectoryInfo(TestContext.CurrentContext.TestDirectory), "delme", true).RootPath.FullName; lmd.SaveToDatabase(); CatalogueRepository.MEF.AddTypeToCatalogForTesting(typeof(TestPayloadAttacher)); b.catalogue.LoadMetadata_ID = lmd.ID; b.catalogue.LoggingDataTask = "TestPayloadInjection"; b.catalogue.SaveToDatabase(); var lm = new LogManager(new ServerDefaults(CatalogueRepository).GetDefaultFor(PermissableDefaults.LiveLoggingServer_ID)); lm.CreateNewLoggingTaskIfNotExists("TestPayloadInjection"); var pt = new ProcessTask(CatalogueRepository, lmd, LoadStage.Mounting); pt.Path = typeof(TestPayloadAttacher).FullName; pt.ProcessTaskType = ProcessTaskType.Attacher; pt.SaveToDatabase(); var config = new HICDatabaseConfiguration(DiscoveredDatabaseICanCreateRandomTablesIn.Server); var factory = new HICDataLoadFactory(lmd, config, new HICLoadConfigurationFlags(), CatalogueRepository, lm); IDataLoadExecution execution = factory.Create(new ThrowImmediatelyDataLoadEventListener()); var proceedure = new DataLoadProcess(RepositoryLocator, lmd, null, lm, new ThrowImmediatelyDataLoadEventListener(), execution, config); proceedure.Run(new GracefulCancellationToken(), payload); Assert.IsTrue(PayloadTest.Success, "Expected IAttacher to detect Payload and set this property to true"); }
public override void Execute() { base.Execute(); if (_server == null) { var dialog = new SelectIMapsDirectlyToDatabaseTableDialog(_available, true, false); if (dialog.ShowDialog() == DialogResult.OK) { _server = dialog.Selected as ExternalDatabaseServer; } else { return; } } _loadMetadata.OverrideRAWServer_ID = _server == null ? null : (int?)_server.ID; _loadMetadata.SaveToDatabase(); Publish(_loadMetadata); }
public void TestDle_DodgyColumnNames(DatabaseType dbType) { var db = GetCleanedServer(dbType); var tbl = db.CreateTable("Troll Select * Loll", new DatabaseColumnRequest[] { new DatabaseColumnRequest("group by", new DatabaseTypeRequest(typeof(string), 100)) { IsPrimaryKey = true }, new DatabaseColumnRequest(",,,,", new DatabaseTypeRequest(typeof(string))), }); CreateFileInForLoading("Troll.csv", new string[] { "group by,\",,,,\"", "fish,fishon" }); var cata = Import(tbl); var lmd = new LoadMetadata(CatalogueRepository, nameof(TestDle_DodgyColumnNames)); lmd.LocationOfFlatFiles = LoadDirectory.RootPath.FullName; lmd.SaveToDatabase(); CreateFlatFileAttacher(lmd, "Troll.csv", cata.GetTableInfoList(false).Single()); cata.LoadMetadata_ID = lmd.ID; cata.SaveToDatabase(); Assert.AreEqual(0, tbl.GetRowCount()); RunDLE(lmd, 30000, true); Assert.AreEqual(1, tbl.GetRowCount()); Assert.AreEqual("fishon", tbl.GetDataTable().Rows[0][",,,,"]); }
public void Load(DatabaseType databaseType, TestCase testCase) { var defaults = new ServerDefaults(CatalogueRepository); var logServer = defaults.GetDefaultFor(PermissableDefaults.LiveLoggingServer_ID); var logManager = new LogManager(logServer); var db = GetCleanedServer(databaseType); var raw = db.Server.ExpectDatabase(db.GetRuntimeName() + "_RAW"); if (raw.Exists()) { raw.Drop(); } var dt = new DataTable("MyTable"); dt.Columns.Add("Name"); dt.Columns.Add("DateOfBirth"); dt.Columns.Add("FavouriteColour"); dt.Rows.Add("Bob", "2001-01-01", "Pink"); dt.Rows.Add("Frank", "2001-01-01", "Orange"); var nameCol = new DatabaseColumnRequest("Name", new DatabaseTypeRequest(typeof(string), 20), false) { IsPrimaryKey = true }; if (testCase == TestCase.DodgyCollation) { if (databaseType == DatabaseType.MicrosoftSQLServer) { nameCol.Collation = "Latin1_General_CS_AS_KS_WS"; } else if (databaseType == DatabaseType.MySql) { nameCol.Collation = "latin1_german1_ci"; } } DiscoveredTable tbl; if (testCase == TestCase.WithNonPrimaryKeyIdentityColumn) { tbl = db.CreateTable("MyTable", new [] { new DatabaseColumnRequest("ID", new DatabaseTypeRequest(typeof(int)), false) { IsPrimaryKey = false, IsAutoIncrement = true }, nameCol, new DatabaseColumnRequest("DateOfBirth", new DatabaseTypeRequest(typeof(DateTime)), false) { IsPrimaryKey = true }, new DatabaseColumnRequest("FavouriteColour", new DatabaseTypeRequest(typeof(string))), }); using (var blk = tbl.BeginBulkInsert()) blk.Upload(dt); Assert.AreEqual(1, tbl.DiscoverColumns().Count(c => c.GetRuntimeName().Equals("ID", StringComparison.CurrentCultureIgnoreCase)), "Table created did not contain ID column"); } else if (testCase == TestCase.AllPrimaryKeys) { dt.PrimaryKey = dt.Columns.Cast <DataColumn>().ToArray(); tbl = db.CreateTable("MyTable", dt, new [] { nameCol }); //upload the column as is Assert.IsTrue(tbl.DiscoverColumns().All(c => c.IsPrimaryKey)); } else { tbl = db.CreateTable("MyTable", dt, new[] { nameCol, new DatabaseColumnRequest("DateOfBirth", new DatabaseTypeRequest(typeof(DateTime)), false) { IsPrimaryKey = true } }); } Assert.AreEqual(2, tbl.GetRowCount()); //define a new load configuration var lmd = new LoadMetadata(CatalogueRepository, "MyLoad"); if (testCase == TestCase.NoTrigger) { lmd.IgnoreTrigger = true; lmd.SaveToDatabase(); } TableInfo ti = Import(tbl, lmd, logManager); var projectDirectory = SetupLoadDirectory(lmd); CreateCSVProcessTask(lmd, ti, "*.csv"); //create a text file to load where we update Frank's favourite colour (it's a pk field) and we insert a new record (MrMurder) File.WriteAllText( Path.Combine(projectDirectory.ForLoading.FullName, "LoadMe.csv"), @"Name,DateOfBirth,FavouriteColour Frank,2001-01-01,Neon MrMurder,2001-01-01,Yella"); //the checks will probably need to be run as ddl admin because it involves creating _Archive table and trigger the first time //clean SetUp RAW / STAGING etc and generally accept proposed cleanup operations var checker = new CheckEntireDataLoadProcess(lmd, new HICDatabaseConfiguration(lmd), new HICLoadConfigurationFlags(), CatalogueRepository.MEF); checker.Check(new AcceptAllCheckNotifier()); //create a reader if (testCase == TestCase.LowPrivilegeLoaderAccount) { SetupLowPrivilegeUserRightsFor(ti, TestLowPrivilegePermissions.Reader | TestLowPrivilegePermissions.Writer); SetupLowPrivilegeUserRightsFor(db.Server.ExpectDatabase("DLE_STAGING"), TestLowPrivilegePermissions.All); } Assert.AreEqual(testCase != TestCase.NoTrigger, tbl.DiscoverColumns().Select(c => c.GetRuntimeName()).Contains(SpecialFieldNames.DataLoadRunID), $"When running with NoTrigger there shouldn't be any additional columns added to table. Test case was {testCase}"); Assert.AreEqual(testCase != TestCase.NoTrigger, tbl.DiscoverColumns().Select(c => c.GetRuntimeName()).Contains(SpecialFieldNames.ValidFrom), $"When running with NoTrigger there shouldn't be any additional columns added to table. Test case was {testCase}"); var dbConfig = new HICDatabaseConfiguration(lmd, testCase == TestCase.WithCustomTableNamer? new CustomINameDatabasesAndTablesDuringLoads():null); if (testCase == TestCase.WithCustomTableNamer) { new PreExecutionChecker(lmd, dbConfig).Check(new AcceptAllCheckNotifier()); //handles staging database creation etc } if (testCase == TestCase.WithDiffColumnIgnoreRegex) { dbConfig.UpdateButDoNotDiff = new Regex("^FavouriteColour"); //do not diff FavouriteColour } var loadFactory = new HICDataLoadFactory( lmd, dbConfig, new HICLoadConfigurationFlags(), CatalogueRepository, logManager ); try { var exe = loadFactory.Create(new ThrowImmediatelyDataLoadEventListener()); var exitCode = exe.Run( new DataLoadJob(RepositoryLocator, "Go go go!", logManager, lmd, projectDirectory, new ThrowImmediatelyDataLoadEventListener(), dbConfig), new GracefulCancellationToken()); Assert.AreEqual(ExitCodeType.Success, exitCode); if (testCase == TestCase.AllPrimaryKeys) { Assert.AreEqual(4, tbl.GetRowCount()); //Bob, Frank, Frank (with also pk Neon) & MrMurder Assert.Pass(); } if (testCase == TestCase.WithDiffColumnIgnoreRegex) { Assert.AreEqual(3, tbl.GetRowCount()); //Bob, Frank (original since the diff was skipped), & MrMurder //frank should be updated to like Neon instead of Orange Assert.AreEqual(3, tbl.GetRowCount()); var frankOld = tbl.GetDataTable().Rows.Cast <DataRow>().Single(r => (string)r["Name"] == "Frank"); Assert.AreEqual("Orange", frankOld["FavouriteColour"]); Assert.Pass(); } //frank should be updated to like Neon instead of Orange Assert.AreEqual(3, tbl.GetRowCount()); var result = tbl.GetDataTable(); var frank = result.Rows.Cast <DataRow>().Single(r => (string)r["Name"] == "Frank"); Assert.AreEqual("Neon", frank["FavouriteColour"]); if (testCase != TestCase.NoTrigger) { AssertHasDataLoadRunId(frank); } //MrMurder is a new person who likes Yella var mrmurder = result.Rows.Cast <DataRow>().Single(r => (string)r["Name"] == "MrMurder"); Assert.AreEqual("Yella", mrmurder["FavouriteColour"]); Assert.AreEqual(new DateTime(2001, 01, 01), mrmurder["DateOfBirth"]); if (testCase != TestCase.NoTrigger) { AssertHasDataLoadRunId(mrmurder); } //bob should be untouched (same values as before and no dataloadrunID) var bob = result.Rows.Cast <DataRow>().Single(r => (string)r["Name"] == "Bob"); Assert.AreEqual("Pink", bob["FavouriteColour"]); Assert.AreEqual(new DateTime(2001, 01, 01), bob["DateOfBirth"]); if (testCase != TestCase.NoTrigger) { Assert.AreEqual(DBNull.Value, bob[SpecialFieldNames.DataLoadRunID]); //MySql add default of now() on a table will auto populate all the column values with the the now() date while Sql Server will leave them as nulls if (databaseType == DatabaseType.MicrosoftSQLServer) { Assert.AreEqual(DBNull.Value, bob[SpecialFieldNames.ValidFrom]); } } Assert.AreEqual(testCase != TestCase.NoTrigger, tbl.DiscoverColumns().Select(c => c.GetRuntimeName()).Contains(SpecialFieldNames.DataLoadRunID), $"When running with NoTrigger there shouldn't be any additional columns added to table. Test case was {testCase}"); Assert.AreEqual(testCase != TestCase.NoTrigger, tbl.DiscoverColumns().Select(c => c.GetRuntimeName()).Contains(SpecialFieldNames.ValidFrom), $"When running with NoTrigger there shouldn't be any additional columns added to table. Test case was {testCase}"); } finally { Directory.Delete(lmd.LocationOfFlatFiles, true); foreach (Catalogue c in RepositoryLocator.CatalogueRepository.GetAllObjects <Catalogue>()) { c.DeleteInDatabase(); } foreach (TableInfo t in RepositoryLocator.CatalogueRepository.GetAllObjects <TableInfo>()) { t.DeleteInDatabase(); } foreach (LoadMetadata l in RepositoryLocator.CatalogueRepository.GetAllObjects <LoadMetadata>()) { l.DeleteInDatabase(); } } if (testCase == TestCase.WithCustomTableNamer) { var db2 = db.Server.ExpectDatabase("BB_STAGING"); if (db.Exists()) { db2.Drop(); } } }
protected override void SetUp() { base.SetUp(); Database = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer); var rootFolder = new DirectoryInfo(TestContext.CurrentContext.TestDirectory); var subdir = rootFolder.CreateSubdirectory("TestsRequiringADle"); LoadDirectory = LoadDirectory.CreateDirectoryStructure(rootFolder, subdir.FullName, true); Clear(LoadDirectory); LiveTable = CreateDataset <Demography>(Database, 500, 5000, new Random(190)); LiveTable.CreatePrimaryKey(new DiscoveredColumn[] { LiveTable.DiscoverColumn("chi"), LiveTable.DiscoverColumn("dtCreated"), LiveTable.DiscoverColumn("hb_extract") }); TestCatalogue = Import(LiveTable); RowsBefore = 5000; TestLoadMetadata = new LoadMetadata(CatalogueRepository, "Loading Test Catalogue"); TestLoadMetadata.LocationOfFlatFiles = LoadDirectory.RootPath.FullName; TestLoadMetadata.SaveToDatabase(); //make the load load the table TestCatalogue.LoadMetadata_ID = TestLoadMetadata.ID; TestCatalogue.SaveToDatabase(); var csvProcessTask = new ProcessTask(CatalogueRepository, TestLoadMetadata, LoadStage.Mounting); var args = csvProcessTask.CreateArgumentsForClassIfNotExists <AnySeparatorFileAttacher>(); csvProcessTask.Path = typeof(AnySeparatorFileAttacher).FullName; csvProcessTask.ProcessTaskType = ProcessTaskType.Attacher; csvProcessTask.SaveToDatabase(); var filePattern = args.Single(a => a.Name == "FilePattern"); filePattern.SetValue("*.csv"); filePattern.SaveToDatabase(); var tableToLoad = args.Single(a => a.Name == "TableToLoad"); tableToLoad.SetValue(TestCatalogue.GetTableInfoList(false).Single()); tableToLoad.SaveToDatabase(); var separator = args.Single(a => a.Name == "Separator"); separator.SetValue(","); separator.SaveToDatabase(); var ignoreDataLoadRunIDCol = args.Single(a => a.Name == "IgnoreColumns"); ignoreDataLoadRunIDCol.SetValue("hic_dataLoadRunID"); ignoreDataLoadRunIDCol.SaveToDatabase(); //Get DleRunner to run pre load checks (includes trigger creation etc) var runner = new DleRunner(new DleOptions() { LoadMetadata = TestLoadMetadata.ID, Command = CommandLineActivity.check }); runner.Run(RepositoryLocator, new ThrowImmediatelyDataLoadEventListener(), new AcceptAllCheckNotifier(), new GracefulCancellationToken()); }
public void TestTemporalTable(bool ignoreWithGlobalPattern) { var dbtype = FAnsi.DatabaseType.MicrosoftSQLServer; var db = GetCleanedServer(dbtype); using (var con = db.Server.GetConnection()) { con.Open(); db.Server.GetCommand(sql, con).ExecuteNonQuery(); } var tbl = db.ExpectTable("Employee"); var defaults = new ServerDefaults(CatalogueRepository); var logServer = defaults.GetDefaultFor(PermissableDefaults.LiveLoggingServer_ID); var logManager = new LogManager(logServer); var raw = db.Server.ExpectDatabase(db.GetRuntimeName() + "_RAW"); if (raw.Exists()) { raw.Drop(); } //define a new load configuration var lmd = new LoadMetadata(CatalogueRepository, "MyLoad"); lmd.IgnoreTrigger = true; lmd.SaveToDatabase(); ITableInfo ti = Import(tbl, lmd, logManager); var projectDirectory = SetupLoadDirectory(lmd); CreateCSVProcessTask(lmd, ti, "*.csv"); //create a text file to load where we update Frank's favourite colour (it's a pk field) and we insert a new record (MrMurder) File.WriteAllText( Path.Combine(projectDirectory.ForLoading.FullName, "LoadMe.csv"), @"EmployeeID,Name,Position,Department,Address,AnnualSalary 1,Frank,Boss,Department of F'Tang, 22 Innsmouth Way, 55000.5 2,Herbert,Super Boss,Department of F'Tang, 22 Innsmouth Way, 155000.5"); //the checks will probably need to be run as ddl admin because it involves creating _Archive table and trigger the first time //clean SetUp RAW / STAGING etc and generally accept proposed cleanup operations var checker = new CheckEntireDataLoadProcess(lmd, new HICDatabaseConfiguration(lmd), new HICLoadConfigurationFlags(), CatalogueRepository.MEF); checker.Check(new AcceptAllCheckNotifier()); if (ignoreWithGlobalPattern) { var regex = new StandardRegex(RepositoryLocator.CatalogueRepository) { ConceptName = StandardRegex.DataLoadEngineGlobalIgnorePattern, Regex = "^Valid((From)|(To))$" }; regex.SaveToDatabase(); } else { var col = ti.ColumnInfos.Single(c => c.GetRuntimeName().Equals("ValidFrom")); col.IgnoreInLoads = true; col.SaveToDatabase(); col = ti.ColumnInfos.Single(c => c.GetRuntimeName().Equals("ValidTo")); col.IgnoreInLoads = true; col.SaveToDatabase(); } var dbConfig = new HICDatabaseConfiguration(lmd, null); var loadFactory = new HICDataLoadFactory( lmd, dbConfig, new HICLoadConfigurationFlags(), CatalogueRepository, logManager ); var exe = loadFactory.Create(new ThrowImmediatelyDataLoadEventListener()); var exitCode = exe.Run( new DataLoadJob(RepositoryLocator, "Go go go!", logManager, lmd, projectDirectory, new ThrowImmediatelyDataLoadEventListener(), dbConfig), new GracefulCancellationToken()); Assert.AreEqual(ExitCodeType.Success, exitCode); //frank should be updated to his new departement and role Assert.AreEqual(2, tbl.GetRowCount()); var result = tbl.GetDataTable(); var frank = result.Rows.Cast <DataRow>().Single(r => (string)r["Name"] == "Frank"); Assert.AreEqual("Department of F'Tang", frank["Department"]); Assert.AreEqual("Boss", frank["Position"]); //post test cleanup foreach (var regex in RepositoryLocator.CatalogueRepository.GetAllObjects <StandardRegex>()) { regex.DeleteInDatabase(); } }
public override void Execute() { if (DicomSourceType == null) { SetImpossible("You must specify a Type for DicomSourceType"); throw new ImpossibleCommandException(this, ReasonCommandImpossible); } base.Execute(); List <DiscoveredTable> tablesCreated = new List <DiscoveredTable>(); //Create with template? if (Template != null) { foreach (ImageTableTemplate table in Template.Tables) { string tblName = GetNameWithPrefix(table.TableName); var tbl = _databaseToCreateInto.ExpectTable(tblName); var cmd = new ExecuteCommandCreateNewImagingDataset(_repositoryLocator, tbl, table); cmd.Execute(); NewCataloguesCreated.Add(cmd.NewCatalogueCreated); tablesCreated.Add(tbl); } } else { throw new Exception("No Template provided"); } //that's us done if we aren't creating a load if (!CreateLoad) { return; } string loadName = GetNameWithPrefixInBracketsIfAny("SMI Image Loading"); NewLoadMetadata = new LoadMetadata(_catalogueRepository, loadName); //tell all the catalogues that they are part of this load and where to log under the same task foreach (Catalogue c in NewCataloguesCreated) { c.LoadMetadata_ID = NewLoadMetadata.ID; c.LoggingDataTask = loadName; c.LiveLoggingServer_ID = _loggingServer.ID; c.SaveToDatabase(); } //create the logging task new Core.Logging.LogManager(_loggingServer).CreateNewLoggingTaskIfNotExists(loadName); var projDir = LoadDirectory.CreateDirectoryStructure(_projectDirectory, "ImageLoading", true); NewLoadMetadata.LocationOfFlatFiles = projDir.RootPath.FullName; NewLoadMetadata.SaveToDatabase(); /////////////////////////////////////////////Attacher//////////////////////////// //Create a pipeline for reading from Dicom files and writing to any destination component (which must be fixed) var pipe = new Pipeline(_catalogueRepository, GetNameWithPrefixInBracketsIfAny("Image Loading Pipe")); DicomSourcePipelineComponent = new PipelineComponent(_catalogueRepository, pipe, DicomSourceType, 0, DicomSourceType.Name); DicomSourcePipelineComponent.CreateArgumentsForClassIfNotExists(DicomSourceType); pipe.SourcePipelineComponent_ID = DicomSourcePipelineComponent.ID; pipe.SaveToDatabase(); //Create the load process task that uses the pipe to load RAW tables with data from the dicom files var pt = new ProcessTask(_catalogueRepository, NewLoadMetadata, LoadStage.Mounting); pt.Name = "Auto Routing Attacher"; pt.ProcessTaskType = ProcessTaskType.Attacher; pt.Path = PersistentRaw? typeof(AutoRoutingAttacherWithPersistentRaw).FullName: typeof(AutoRoutingAttacher).FullName; pt.Order = 1; pt.SaveToDatabase(); var args = PersistentRaw? pt.CreateArgumentsForClassIfNotExists <AutoRoutingAttacherWithPersistentRaw>() : pt.CreateArgumentsForClassIfNotExists <AutoRoutingAttacher>(); SetArgument(args, "LoadPipeline", pipe); /////////////////////////////////////// Distinct tables on load ///////////////////////// var distincter = new ProcessTask(_catalogueRepository, NewLoadMetadata, LoadStage.AdjustRaw); var distincterArgs = distincter.CreateArgumentsForClassIfNotExists <Distincter>(); distincter.Name = "Distincter"; distincter.ProcessTaskType = ProcessTaskType.MutilateDataTable; distincter.Path = typeof(Distincter).FullName; distincter.Order = 2; distincter.SaveToDatabase(); SetArgument(distincterArgs, "TableRegexPattern", ".*"); ///////////////////////////////////////////////////////////////////////////////////// if (CreateCoalescer) { var coalescer = new ProcessTask(_catalogueRepository, NewLoadMetadata, LoadStage.AdjustRaw); coalescer.Name = "Coalescer"; coalescer.ProcessTaskType = ProcessTaskType.MutilateDataTable; coalescer.Path = typeof(Coalescer).FullName; coalescer.Order = 3; coalescer.SaveToDatabase(); StringBuilder regexPattern = new StringBuilder(); foreach (var tbl in tablesCreated) { if (!tbl.DiscoverColumns().Any(c => c.GetRuntimeName().Equals("SOPInstanceUID", StringComparison.CurrentCultureIgnoreCase))) { regexPattern.Append("(" + tbl.GetRuntimeName() + ")|"); } } var coalArgs = coalescer.CreateArgumentsForClassIfNotExists <Coalescer>(); SetArgument(coalArgs, "TableRegexPattern", regexPattern.ToString().TrimEnd('|')); SetArgument(coalArgs, "CreateIndex", true); } ////////////////////////////////Load Ender (if no rows in load) //////////////////////////// var prematureLoadEnder = new ProcessTask(_catalogueRepository, NewLoadMetadata, LoadStage.Mounting); prematureLoadEnder.Name = "Premature Load Ender"; prematureLoadEnder.ProcessTaskType = ProcessTaskType.MutilateDataTable; prematureLoadEnder.Path = typeof(PrematureLoadEnder).FullName; prematureLoadEnder.Order = 4; prematureLoadEnder.SaveToDatabase(); args = prematureLoadEnder.CreateArgumentsForClassIfNotExists <PrematureLoadEnder>(); SetArgument(args, "ExitCodeToReturnIfConditionMet", ExitCodeType.OperationNotRequired); SetArgument(args, "ConditionsToTerminateUnder", PrematureLoadEndCondition.NoRecordsInAnyTablesInDatabase); //////////////////////////////////////////////////////////////////////////////////////////////// var checker = new CheckEntireDataLoadProcess(NewLoadMetadata, new HICDatabaseConfiguration(NewLoadMetadata), new HICLoadConfigurationFlags(), _catalogueRepository.MEF); checker.Check(new AcceptAllCheckNotifier()); }