public void CreateNewExternalServerAndConfigureItAsDefault() { ServerDefaults defaults = new ServerDefaults(CatalogueRepository); var databaseServer = new ExternalDatabaseServer(CatalogueRepository, "Deleteme", null); try { Assert.AreEqual("Deleteme", databaseServer.Name); databaseServer.Password = "******"; //automatically encrypts password Assert.AreNotEqual("nothing", databaseServer.Password); //should not match what we just set it to Assert.AreEqual("nothing", databaseServer.GetDecryptedPassword()); //should match what we set it to because of explicit call to decrypt databaseServer.Server = "Bob"; databaseServer.Database = "TEST"; databaseServer.SaveToDatabase(); Catalogue cata = new Catalogue(CatalogueRepository, "TestCatalogueFor_CreateNewExternalServerAndConfigureItAsDefault"); cata.DeleteInDatabase(); } finally { databaseServer.DeleteInDatabase(); } }
public SmtpListener(IServiceScopeFactory scopeFactory, IOptions <ServerDefaults> serverDefaults) { _scopeFactory = scopeFactory; _serverDefaults = serverDefaults.Value; var options = new OptionsBuilder() .ServerName("localhost") .Port(GetSmtpPorts()) .AuthenticationRequired(false) .AllowUnsecureAuthentication(true) .UserAuthenticator(new AuthFactory()) .MessageStore(new SmtpMessageStore()) .Build(); smtpServer = new SmtpServer.SmtpServer(options); smtpServer.SessionCreated += SmtpServer_SessionCreated; smtpServer.SessionCompleted += SmtpServer_SessionCompleted; }
public void TestSingleJob(bool overrideRAW, bool sendDodgyCredentials) { if (sendDodgyCredentials && !overrideRAW) { throw new NotSupportedException("Cannot send dodgy credentials if you aren't overriding RAW"); } ServerDefaults defaults = new ServerDefaults(CatalogueRepository); var oldDefault = defaults.GetDefaultFor(PermissableDefaults.RAWDataLoadServer); var testDirPath = Path.Combine(Path.GetTempPath(), Path.GetRandomFileName()); var testDir = Directory.CreateDirectory(testDirPath); var server = DiscoveredServerICanCreateRandomDatabasesAndTablesOn; var catalogueEntities = new CatalogueEntities(); var databaseHelper = new DatabaseHelper(); ExternalDatabaseServer external = null; try { // Set SetUp the dataset's project directory and add the CSV file to ForLoading var loadDirectory = LoadDirectory.CreateDirectoryStructure(testDir, "TestDataset"); File.WriteAllText(Path.Combine(loadDirectory.ForLoading.FullName, "1.csv"), "Col1\r\n1\r\n2\r\n3\r\n4"); databaseHelper.SetUp(server); // Create the Catalogue entities for the dataset catalogueEntities.Create(CatalogueRepository, databaseHelper.DatabaseToLoad, loadDirectory); if (overrideRAW) { external = new ExternalDatabaseServer(CatalogueRepository, "RAW Server", null); external.SetProperties(DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase("master")); if (sendDodgyCredentials) { external.Username = "******"; external.Password = "******"; } external.SaveToDatabase(); defaults.SetDefault(PermissableDefaults.RAWDataLoadServer, external); } var options = new DleOptions(); options.LoadMetadata = catalogueEntities.LoadMetadata.ID; options.Command = CommandLineActivity.check; //run checks (with ignore errors if we are sending dodgy credentials) new RunnerFactory().CreateRunner(new ThrowImmediatelyActivator(RepositoryLocator), options).Run(RepositoryLocator, new ThrowImmediatelyDataLoadEventListener(), sendDodgyCredentials? (ICheckNotifier) new IgnoreAllErrorsCheckNotifier(): new AcceptAllCheckNotifier(), new GracefulCancellationToken()); //run load options.Command = CommandLineActivity.run; var runner = new RunnerFactory().CreateRunner(new ThrowImmediatelyActivator(RepositoryLocator), options); if (sendDodgyCredentials) { var ex = Assert.Throws <Exception>(() => runner.Run(RepositoryLocator, new ThrowImmediatelyDataLoadEventListener(), new AcceptAllCheckNotifier(), new GracefulCancellationToken())); Assert.IsTrue(ex.InnerException.Message.Contains("Login failed for user 'IveGotaLovely'"), "Error message did not contain expected text"); return; } else { runner.Run(RepositoryLocator, new ThrowImmediatelyDataLoadEventListener(), new AcceptAllCheckNotifier(), new GracefulCancellationToken()); } var archiveFile = loadDirectory.ForArchiving.EnumerateFiles("*.zip").OrderByDescending(f => f.FullName).FirstOrDefault(); Assert.NotNull(archiveFile, "Archive file has not been created by the load."); Assert.IsFalse(loadDirectory.ForLoading.EnumerateFileSystemInfos().Any()); } finally { //reset the original RAW server defaults.SetDefault(PermissableDefaults.RAWDataLoadServer, oldDefault); if (external != null) { external.DeleteInDatabase(); } testDir.Delete(true); databaseHelper.Dispose(); catalogueEntities.Dispose(); } }
public void TestTemporalTable(bool ignoreWithGlobalPattern) { var dbtype = FAnsi.DatabaseType.MicrosoftSQLServer; var db = GetCleanedServer(dbtype); using (var con = db.Server.GetConnection()) { con.Open(); db.Server.GetCommand(sql, con).ExecuteNonQuery(); } var tbl = db.ExpectTable("Employee"); var defaults = new ServerDefaults(CatalogueRepository); var logServer = defaults.GetDefaultFor(PermissableDefaults.LiveLoggingServer_ID); var logManager = new LogManager(logServer); var raw = db.Server.ExpectDatabase(db.GetRuntimeName() + "_RAW"); if (raw.Exists()) { raw.Drop(); } //define a new load configuration var lmd = new LoadMetadata(CatalogueRepository, "MyLoad"); lmd.IgnoreTrigger = true; lmd.SaveToDatabase(); ITableInfo ti = Import(tbl, lmd, logManager); var projectDirectory = SetupLoadDirectory(lmd); CreateCSVProcessTask(lmd, ti, "*.csv"); //create a text file to load where we update Frank's favourite colour (it's a pk field) and we insert a new record (MrMurder) File.WriteAllText( Path.Combine(projectDirectory.ForLoading.FullName, "LoadMe.csv"), @"EmployeeID,Name,Position,Department,Address,AnnualSalary 1,Frank,Boss,Department of F'Tang, 22 Innsmouth Way, 55000.5 2,Herbert,Super Boss,Department of F'Tang, 22 Innsmouth Way, 155000.5"); //the checks will probably need to be run as ddl admin because it involves creating _Archive table and trigger the first time //clean SetUp RAW / STAGING etc and generally accept proposed cleanup operations var checker = new CheckEntireDataLoadProcess(lmd, new HICDatabaseConfiguration(lmd), new HICLoadConfigurationFlags(), CatalogueRepository.MEF); checker.Check(new AcceptAllCheckNotifier()); if (ignoreWithGlobalPattern) { var regex = new StandardRegex(RepositoryLocator.CatalogueRepository) { ConceptName = StandardRegex.DataLoadEngineGlobalIgnorePattern, Regex = "^Valid((From)|(To))$" }; regex.SaveToDatabase(); } else { var col = ti.ColumnInfos.Single(c => c.GetRuntimeName().Equals("ValidFrom")); col.IgnoreInLoads = true; col.SaveToDatabase(); col = ti.ColumnInfos.Single(c => c.GetRuntimeName().Equals("ValidTo")); col.IgnoreInLoads = true; col.SaveToDatabase(); } var dbConfig = new HICDatabaseConfiguration(lmd, null); var loadFactory = new HICDataLoadFactory( lmd, dbConfig, new HICLoadConfigurationFlags(), CatalogueRepository, logManager ); var exe = loadFactory.Create(new ThrowImmediatelyDataLoadEventListener()); var exitCode = exe.Run( new DataLoadJob(RepositoryLocator, "Go go go!", logManager, lmd, projectDirectory, new ThrowImmediatelyDataLoadEventListener(), dbConfig), new GracefulCancellationToken()); Assert.AreEqual(ExitCodeType.Success, exitCode); //frank should be updated to his new departement and role Assert.AreEqual(2, tbl.GetRowCount()); var result = tbl.GetDataTable(); var frank = result.Rows.Cast <DataRow>().Single(r => (string)r["Name"] == "Frank"); Assert.AreEqual("Department of F'Tang", frank["Department"]); Assert.AreEqual("Boss", frank["Position"]); //post test cleanup foreach (var regex in RepositoryLocator.CatalogueRepository.GetAllObjects <StandardRegex>()) { regex.DeleteInDatabase(); } }
/// <inheritdoc/> public LogManager GetDefaultLogManager() { ServerDefaults defaults = new ServerDefaults(this); return(new LogManager(defaults.GetDefaultFor(PermissableDefaults.LiveLoggingServer_ID))); }
public void TestMerge(DatabaseType databaseType) { //microsoft one gets called for free in test setup (see base class) if (databaseType != DatabaseType.MicrosoftSQLServer) { SetupFromTo(databaseType); } var dt = new DataTable(); var colName = new DataColumn("Name", typeof(string)); var colAge = new DataColumn("Age", typeof(int)); dt.Columns.Add(colName); dt.Columns.Add(colAge); dt.Columns.Add("Postcode", typeof(string)); //Data in live awaiting toTbl be updated dt.Rows.Add(new object[] { "Dave", 18, "DD3 1AB" }); dt.Rows.Add(new object[] { "Dave", 25, "DD1 1XS" }); dt.Rows.Add(new object[] { "Mango", 32, DBNull.Value }); dt.Rows.Add(new object[] { "Filli", 32, "DD3 78L" }); dt.Rows.Add(new object[] { "Mandrake", 32, DBNull.Value }); dt.PrimaryKey = new[] { colName, colAge }; var toTbl = To.CreateTable("ToTable", dt); Assert.IsTrue(toTbl.DiscoverColumn("Name").IsPrimaryKey); Assert.IsTrue(toTbl.DiscoverColumn("Age").IsPrimaryKey); Assert.IsFalse(toTbl.DiscoverColumn("Postcode").IsPrimaryKey); dt.Rows.Clear(); //new data being loaded dt.Rows.Add(new object[] { "Dave", 25, "DD1 1PS" }); //update toTbl change postcode toTbl "DD1 1PS" dt.Rows.Add(new object[] { "Chutney", 32, DBNull.Value }); //new insert Chutney dt.Rows.Add(new object[] { "Mango", 32, DBNull.Value }); //ignored because already present in dataset dt.Rows.Add(new object[] { "Filli", 32, DBNull.Value }); //update from "DD3 78L" null dt.Rows.Add(new object[] { "Mandrake", 32, "DD1 1PS" }); //update from null toTbl "DD1 1PS" dt.Rows.Add(new object[] { "Mandrake", 31, "DD1 1PS" }); // insert because Age is unique (and part of pk) var fromTbl = From.CreateTable(DatabaseName + "_ToTable_STAGING", dt); //import the toTbl table as a TableInfo TableInfo ti; ColumnInfo[] cis; var cata = Import(toTbl, out ti, out cis); //put the backup trigger on the live table (this will also create the needed hic_ columns etc) var triggerImplementer = new TriggerImplementerFactory(databaseType).Create(toTbl); triggerImplementer.CreateTrigger(new ThrowImmediatelyCheckNotifier()); var configuration = new MigrationConfiguration(From, LoadBubble.Staging, LoadBubble.Live, new FixedStagingDatabaseNamer(toTbl.Database.GetRuntimeName(), fromTbl.Database.GetRuntimeName())); var lmd = new LoadMetadata(CatalogueRepository); cata.LoadMetadata_ID = lmd.ID; cata.SaveToDatabase(); var migrationHost = new MigrationHost(From, To, configuration, new HICDatabaseConfiguration(lmd)); //set SetUp a logging task var logServer = new ServerDefaults(CatalogueRepository).GetDefaultFor(PermissableDefaults.LiveLoggingServer_ID); var logManager = new LogManager(logServer); logManager.CreateNewLoggingTaskIfNotExists("CrossDatabaseMergeCommandTest"); var dli = logManager.CreateDataLoadInfo("CrossDatabaseMergeCommandTest", "tests", "running test", "", true); var job = new ThrowImmediatelyDataLoadJob(); job.DataLoadInfo = dli; job.RegularTablesToLoad = new List <ITableInfo>(new[] { ti }); migrationHost.Migrate(job, new GracefulCancellationToken()); var resultantDt = toTbl.GetDataTable(); Assert.AreEqual(7, resultantDt.Rows.Count); AssertRowEquals(resultantDt, "Dave", 25, "DD1 1PS"); AssertRowEquals(resultantDt, "Chutney", 32, DBNull.Value); AssertRowEquals(resultantDt, "Mango", 32, DBNull.Value); AssertRowEquals(resultantDt, "Filli", 32, DBNull.Value); AssertRowEquals(resultantDt, "Mandrake", 32, "DD1 1PS"); AssertRowEquals(resultantDt, "Mandrake", 31, "DD1 1PS"); AssertRowEquals(resultantDt, "Dave", 18, "DD3 1AB"); var archival = logManager.GetArchivalDataLoadInfos("CrossDatabaseMergeCommandTest", new CancellationToken()); var log = archival.First(); Assert.AreEqual(dli.ID, log.ID); Assert.AreEqual(2, log.TableLoadInfos.Single().Inserts); Assert.AreEqual(3, log.TableLoadInfos.Single().Updates); }
public void Load(DatabaseType databaseType, TestCase testCase) { var defaults = new ServerDefaults(CatalogueRepository); var logServer = defaults.GetDefaultFor(PermissableDefaults.LiveLoggingServer_ID); var logManager = new LogManager(logServer); var db = GetCleanedServer(databaseType); var raw = db.Server.ExpectDatabase(db.GetRuntimeName() + "_RAW"); if (raw.Exists()) { raw.Drop(); } var dt = new DataTable("MyTable"); dt.Columns.Add("Name"); dt.Columns.Add("DateOfBirth"); dt.Columns.Add("FavouriteColour"); dt.Rows.Add("Bob", "2001-01-01", "Pink"); dt.Rows.Add("Frank", "2001-01-01", "Orange"); var nameCol = new DatabaseColumnRequest("Name", new DatabaseTypeRequest(typeof(string), 20), false) { IsPrimaryKey = true }; if (testCase == TestCase.DodgyCollation) { if (databaseType == DatabaseType.MicrosoftSQLServer) { nameCol.Collation = "Latin1_General_CS_AS_KS_WS"; } else if (databaseType == DatabaseType.MySql) { nameCol.Collation = "latin1_german1_ci"; } } DiscoveredTable tbl; if (testCase == TestCase.WithNonPrimaryKeyIdentityColumn) { tbl = db.CreateTable("MyTable", new [] { new DatabaseColumnRequest("ID", new DatabaseTypeRequest(typeof(int)), false) { IsPrimaryKey = false, IsAutoIncrement = true }, nameCol, new DatabaseColumnRequest("DateOfBirth", new DatabaseTypeRequest(typeof(DateTime)), false) { IsPrimaryKey = true }, new DatabaseColumnRequest("FavouriteColour", new DatabaseTypeRequest(typeof(string))), }); using (var blk = tbl.BeginBulkInsert()) blk.Upload(dt); Assert.AreEqual(1, tbl.DiscoverColumns().Count(c => c.GetRuntimeName().Equals("ID", StringComparison.CurrentCultureIgnoreCase)), "Table created did not contain ID column"); } else if (testCase == TestCase.AllPrimaryKeys) { dt.PrimaryKey = dt.Columns.Cast <DataColumn>().ToArray(); tbl = db.CreateTable("MyTable", dt, new [] { nameCol }); //upload the column as is Assert.IsTrue(tbl.DiscoverColumns().All(c => c.IsPrimaryKey)); } else { tbl = db.CreateTable("MyTable", dt, new[] { nameCol, new DatabaseColumnRequest("DateOfBirth", new DatabaseTypeRequest(typeof(DateTime)), false) { IsPrimaryKey = true } }); } Assert.AreEqual(2, tbl.GetRowCount()); //define a new load configuration var lmd = new LoadMetadata(CatalogueRepository, "MyLoad"); if (testCase == TestCase.NoTrigger) { lmd.IgnoreTrigger = true; lmd.SaveToDatabase(); } TableInfo ti = Import(tbl, lmd, logManager); var projectDirectory = SetupLoadDirectory(lmd); CreateCSVProcessTask(lmd, ti, "*.csv"); //create a text file to load where we update Frank's favourite colour (it's a pk field) and we insert a new record (MrMurder) File.WriteAllText( Path.Combine(projectDirectory.ForLoading.FullName, "LoadMe.csv"), @"Name,DateOfBirth,FavouriteColour Frank,2001-01-01,Neon MrMurder,2001-01-01,Yella"); //the checks will probably need to be run as ddl admin because it involves creating _Archive table and trigger the first time //clean SetUp RAW / STAGING etc and generally accept proposed cleanup operations var checker = new CheckEntireDataLoadProcess(lmd, new HICDatabaseConfiguration(lmd), new HICLoadConfigurationFlags(), CatalogueRepository.MEF); checker.Check(new AcceptAllCheckNotifier()); //create a reader if (testCase == TestCase.LowPrivilegeLoaderAccount) { SetupLowPrivilegeUserRightsFor(ti, TestLowPrivilegePermissions.Reader | TestLowPrivilegePermissions.Writer); SetupLowPrivilegeUserRightsFor(db.Server.ExpectDatabase("DLE_STAGING"), TestLowPrivilegePermissions.All); } Assert.AreEqual(testCase != TestCase.NoTrigger, tbl.DiscoverColumns().Select(c => c.GetRuntimeName()).Contains(SpecialFieldNames.DataLoadRunID), $"When running with NoTrigger there shouldn't be any additional columns added to table. Test case was {testCase}"); Assert.AreEqual(testCase != TestCase.NoTrigger, tbl.DiscoverColumns().Select(c => c.GetRuntimeName()).Contains(SpecialFieldNames.ValidFrom), $"When running with NoTrigger there shouldn't be any additional columns added to table. Test case was {testCase}"); var dbConfig = new HICDatabaseConfiguration(lmd, testCase == TestCase.WithCustomTableNamer? new CustomINameDatabasesAndTablesDuringLoads():null); if (testCase == TestCase.WithCustomTableNamer) { new PreExecutionChecker(lmd, dbConfig).Check(new AcceptAllCheckNotifier()); //handles staging database creation etc } if (testCase == TestCase.WithDiffColumnIgnoreRegex) { dbConfig.UpdateButDoNotDiff = new Regex("^FavouriteColour"); //do not diff FavouriteColour } var loadFactory = new HICDataLoadFactory( lmd, dbConfig, new HICLoadConfigurationFlags(), CatalogueRepository, logManager ); try { var exe = loadFactory.Create(new ThrowImmediatelyDataLoadEventListener()); var exitCode = exe.Run( new DataLoadJob(RepositoryLocator, "Go go go!", logManager, lmd, projectDirectory, new ThrowImmediatelyDataLoadEventListener(), dbConfig), new GracefulCancellationToken()); Assert.AreEqual(ExitCodeType.Success, exitCode); if (testCase == TestCase.AllPrimaryKeys) { Assert.AreEqual(4, tbl.GetRowCount()); //Bob, Frank, Frank (with also pk Neon) & MrMurder Assert.Pass(); } if (testCase == TestCase.WithDiffColumnIgnoreRegex) { Assert.AreEqual(3, tbl.GetRowCount()); //Bob, Frank (original since the diff was skipped), & MrMurder //frank should be updated to like Neon instead of Orange Assert.AreEqual(3, tbl.GetRowCount()); var frankOld = tbl.GetDataTable().Rows.Cast <DataRow>().Single(r => (string)r["Name"] == "Frank"); Assert.AreEqual("Orange", frankOld["FavouriteColour"]); Assert.Pass(); } //frank should be updated to like Neon instead of Orange Assert.AreEqual(3, tbl.GetRowCount()); var result = tbl.GetDataTable(); var frank = result.Rows.Cast <DataRow>().Single(r => (string)r["Name"] == "Frank"); Assert.AreEqual("Neon", frank["FavouriteColour"]); if (testCase != TestCase.NoTrigger) { AssertHasDataLoadRunId(frank); } //MrMurder is a new person who likes Yella var mrmurder = result.Rows.Cast <DataRow>().Single(r => (string)r["Name"] == "MrMurder"); Assert.AreEqual("Yella", mrmurder["FavouriteColour"]); Assert.AreEqual(new DateTime(2001, 01, 01), mrmurder["DateOfBirth"]); if (testCase != TestCase.NoTrigger) { AssertHasDataLoadRunId(mrmurder); } //bob should be untouched (same values as before and no dataloadrunID) var bob = result.Rows.Cast <DataRow>().Single(r => (string)r["Name"] == "Bob"); Assert.AreEqual("Pink", bob["FavouriteColour"]); Assert.AreEqual(new DateTime(2001, 01, 01), bob["DateOfBirth"]); if (testCase != TestCase.NoTrigger) { Assert.AreEqual(DBNull.Value, bob[SpecialFieldNames.DataLoadRunID]); //MySql add default of now() on a table will auto populate all the column values with the the now() date while Sql Server will leave them as nulls if (databaseType == DatabaseType.MicrosoftSQLServer) { Assert.AreEqual(DBNull.Value, bob[SpecialFieldNames.ValidFrom]); } } Assert.AreEqual(testCase != TestCase.NoTrigger, tbl.DiscoverColumns().Select(c => c.GetRuntimeName()).Contains(SpecialFieldNames.DataLoadRunID), $"When running with NoTrigger there shouldn't be any additional columns added to table. Test case was {testCase}"); Assert.AreEqual(testCase != TestCase.NoTrigger, tbl.DiscoverColumns().Select(c => c.GetRuntimeName()).Contains(SpecialFieldNames.ValidFrom), $"When running with NoTrigger there shouldn't be any additional columns added to table. Test case was {testCase}"); } finally { Directory.Delete(lmd.LocationOfFlatFiles, true); foreach (Catalogue c in RepositoryLocator.CatalogueRepository.GetAllObjects <Catalogue>()) { c.DeleteInDatabase(); } foreach (TableInfo t in RepositoryLocator.CatalogueRepository.GetAllObjects <TableInfo>()) { t.DeleteInDatabase(); } foreach (LoadMetadata l in RepositoryLocator.CatalogueRepository.GetAllObjects <LoadMetadata>()) { l.DeleteInDatabase(); } } if (testCase == TestCase.WithCustomTableNamer) { var db2 = db.Server.ExpectDatabase("BB_STAGING"); if (db.Exists()) { db2.Drop(); } } }
public void DLELoadTwoTables(DatabaseType databaseType) { //setup the data tables var defaults = new ServerDefaults(CatalogueRepository); var logServer = defaults.GetDefaultFor(PermissableDefaults.LiveLoggingServer_ID); var logManager = new LogManager(logServer); var db = GetCleanedServer(databaseType); var dtParent = new DataTable(); dtParent.Columns.Add("ID", typeof(int)); dtParent.Columns.Add("Name"); dtParent.Columns.Add("Height"); dtParent.PrimaryKey = new[] { dtParent.Columns[0] }; dtParent.Rows.Add("1", "Dave", "3.5"); var dtChild = new DataTable(); dtChild.Columns.Add("Parent_ID"); dtChild.Columns.Add("ChildNumber"); dtChild.Columns.Add("Name"); dtChild.Columns.Add("DateOfBirth"); dtChild.Columns.Add("Age"); dtChild.Columns.Add("Height"); dtChild.Rows.Add("1", "1", "Child1", "2001-01-01", "20", "3.5"); dtChild.Rows.Add("1", "2", "Child2", "2002-01-01", "19", "3.4"); dtChild.PrimaryKey = new[] { dtChild.Columns[0], dtChild.Columns[1] }; //create the parent table based on the DataTable var parentTbl = db.CreateTable("Parent", dtParent); //go find the primary key column created var pkParentID = parentTbl.DiscoverColumn("ID"); //forward declare this column as part of pk (will be used to specify foreign key var fkParentID = new DatabaseColumnRequest("Parent_ID", "int") { IsPrimaryKey = true }; var args = new CreateTableArgs( db, "Child", null, dtChild, false, new Dictionary <DatabaseColumnRequest, DiscoveredColumn>() { { fkParentID, pkParentID } }, true); args.ExplicitColumnDefinitions = new[] { fkParentID }; var childTbl = db.CreateTable(args); Assert.AreEqual(1, parentTbl.GetRowCount()); Assert.AreEqual(2, childTbl.GetRowCount()); //create a new load var lmd = new LoadMetadata(CatalogueRepository, "MyLoading2"); TableInfo childTableInfo = Import(childTbl, lmd, logManager); TableInfo parentTableInfo = Import(parentTbl, lmd, logManager); var projectDirectory = SetupLoadDirectory(lmd); CreateCSVProcessTask(lmd, parentTableInfo, "parent.csv"); CreateCSVProcessTask(lmd, childTableInfo, "child.csv"); //create a text file to load where we update Frank's favourite colour (it's a pk field) and we insert a new record (MrMurder) File.WriteAllText( Path.Combine(projectDirectory.ForLoading.FullName, "parent.csv"), @"ID,Name,Height 2,Man2,3.1 1,Dave,3.2"); File.WriteAllText( Path.Combine(projectDirectory.ForLoading.FullName, "child.csv"), @"Parent_ID,ChildNumber,Name,DateOfBirth,Age,Height 1,1,UpdC1,2001-01-01,20,3.5 2,1,NewC1,2000-01-01,19,null"); //clean SetUp RAW / STAGING etc and generally accept proposed cleanup operations var checker = new CheckEntireDataLoadProcess(lmd, new HICDatabaseConfiguration(lmd), new HICLoadConfigurationFlags(), CatalogueRepository.MEF); checker.Check(new AcceptAllCheckNotifier()); var config = new HICDatabaseConfiguration(lmd); var loadFactory = new HICDataLoadFactory( lmd, config, new HICLoadConfigurationFlags(), CatalogueRepository, logManager ); try { var exe = loadFactory.Create(new ThrowImmediatelyDataLoadEventListener()); var exitCode = exe.Run( new DataLoadJob(RepositoryLocator, "Go go go!", logManager, lmd, projectDirectory, new ThrowImmediatelyDataLoadEventListener(), config), new GracefulCancellationToken()); Assert.AreEqual(ExitCodeType.Success, exitCode); //should now be 2 parents (the original - who was updated) + 1 new one (Man2) Assert.AreEqual(2, parentTbl.GetRowCount()); var result = parentTbl.GetDataTable(); var dave = result.Rows.Cast <DataRow>().Single(r => (string)r["Name"] == "Dave"); Assert.AreEqual(3.2f, dave["Height"]); //should now be only 3.2 inches high AssertHasDataLoadRunId(dave); //should be 3 children (Child1 who gets updated to be called UpdC1) and NewC1 Assert.AreEqual(3, childTbl.GetRowCount()); result = childTbl.GetDataTable(); var updC1 = result.Rows.Cast <DataRow>().Single(r => (string)r["Name"] == "UpdC1"); Assert.AreEqual(1, updC1["Parent_ID"]); Assert.AreEqual(1, updC1["ChildNumber"]); AssertHasDataLoadRunId(updC1); var newC1 = result.Rows.Cast <DataRow>().Single(r => (string)r["Name"] == "NewC1"); Assert.AreEqual(2, newC1["Parent_ID"]); Assert.AreEqual(1, newC1["ChildNumber"]); Assert.AreEqual(DBNull.Value, newC1["Height"]); //the "null" in the input file should be DBNull.Value in the final database AssertHasDataLoadRunId(newC1); } finally { Directory.Delete(lmd.LocationOfFlatFiles, true); foreach (Catalogue c in RepositoryLocator.CatalogueRepository.GetAllObjects <Catalogue>()) { c.DeleteInDatabase(); } foreach (TableInfo t in RepositoryLocator.CatalogueRepository.GetAllObjects <TableInfo>()) { t.DeleteInDatabase(); } foreach (LoadMetadata l in RepositoryLocator.CatalogueRepository.GetAllObjects <LoadMetadata>()) { l.DeleteInDatabase(); } } }