public void SetupBulkTestData() { Console.WriteLine("Cleaning up remnants"); Cleanup(); Console.WriteLine("Setting up bulk test data"); _bulkData = new BulkTestsData(RepositoryLocator.CatalogueRepository, DiscoveredDatabaseICanCreateRandomTablesIn); _bulkData.SetupTestData(); Console.WriteLine("Importing to Catalogue"); var tbl = DiscoveredDatabaseICanCreateRandomTablesIn.ExpectTable(BulkTestsData.BulkDataTable); TableInfoImporter importer = new TableInfoImporter(CatalogueRepository, tbl); importer.DoImport(out tableInfoCreated, out columnInfosCreated); Console.WriteLine("Imported TableInfo " + tableInfoCreated); Console.WriteLine("Imported ColumnInfos " + string.Join(",", columnInfosCreated.Select(c => c.GetRuntimeName()))); Assert.NotNull(tableInfoCreated); ColumnInfo chi = columnInfosCreated.Single(c => c.GetRuntimeName().Equals("chi")); Console.WriteLine("CHI is primary key? (expecting true):" + chi.IsPrimaryKey); Assert.IsTrue(chi.IsPrimaryKey); tableInfoCreated.ColumnInfos.Single(c => c.GetRuntimeName().Equals("surname")).DeleteInDatabase(); tableInfoCreated.ColumnInfos.Single(c => c.GetRuntimeName().Equals("forename")).DeleteInDatabase(); tableInfoCreated.ClearAllInjections(); }
private BulkTestsData SetupTestData(out ColumnInfo l2ColumnInfo) { //Setup test data var testData = new BulkTestsData(CatalogueRepository, DiscoveredDatabaseICanCreateRandomTablesIn); testData.SetupTestData(); testData.ImportAsCatalogue(); //Setup some validation rules Validator v = new Validator(); //rule is that previous address line 1 cannot be the same as previous address line 2 var iv = new ItemValidator("previous_address_L1"); l2ColumnInfo = testData.columnInfos.Single(c => c.GetRuntimeName().Equals("previous_address_L2")); //define the secondary constraint var referentialConstraint = new ReferentialIntegrityConstraint(CatalogueRepository); referentialConstraint.InvertLogic = true; referentialConstraint.OtherColumnInfo = l2ColumnInfo; //add it to the item validator for previous_address_L1 iv.SecondaryConstraints.Add(referentialConstraint); //add the completed item validator to the validator (normally there would be 1 item validator per column with validation but in this test we only have 1) v.ItemValidators.Add(iv); testData.catalogue.ValidatorXML = v.SaveToXml(); testData.catalogue.SaveToDatabase(); return(testData); }
protected override void OneTimeSetUp() { base.OneTimeSetUp(); Console.WriteLine("Setting SetUp bulk test data"); _bulkData = new BulkTestsData(RepositoryLocator.CatalogueRepository, GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer)); _bulkData.SetupTestData(); Console.WriteLine("Importing to Catalogue"); var tbl = _bulkData.Table; TableInfoImporter importer = new TableInfoImporter(CatalogueRepository, tbl); importer.DoImport(out tableInfoCreated, out columnInfosCreated); Console.WriteLine("Imported TableInfo " + tableInfoCreated); Console.WriteLine("Imported ColumnInfos " + string.Join(",", columnInfosCreated.Select(c => c.GetRuntimeName()))); Assert.NotNull(tableInfoCreated); ColumnInfo chi = columnInfosCreated.Single(c => c.GetRuntimeName().Equals("chi")); Console.WriteLine("CHI is primary key? (expecting true):" + chi.IsPrimaryKey); Assert.IsTrue(chi.IsPrimaryKey); tableInfoCreated.ColumnInfos.Single(c => c.GetRuntimeName().Equals("surname")).DeleteInDatabase(); tableInfoCreated.ColumnInfos.Single(c => c.GetRuntimeName().Equals("forename")).DeleteInDatabase(); tableInfoCreated.ClearAllInjections(); }
protected override void OneTimeSetUp() { base.OneTimeSetUp(); _bulkTestData = new BulkTestsData(CatalogueRepository, GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer)); _bulkTestData.SetupTestData(); }
public void PlanManagementTest() { var dbName = TestDatabaseNames.GetConsistentName("PlanManagementTests"); var db = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(dbName); db.Create(true); BulkTestsData bulk = new BulkTestsData(CatalogueRepository, DiscoveredDatabaseICanCreateRandomTablesIn, 100); bulk.SetupTestData(); bulk.ImportAsCatalogue(); var planManager = new ForwardEngineerANOCataloguePlanManager(RepositoryLocator, bulk.catalogue); planManager.TargetDatabase = db; //no operations are as yet configured Assert.DoesNotThrow(() => planManager.Check(new ThrowImmediatelyCheckNotifier())); //create a table with the same name in the endpoint database to confirm that that's a problem db.CreateTable(bulk.tableInfo.GetRuntimeName(), new DatabaseColumnRequest[] { new DatabaseColumnRequest("fish", "varchar(100)") }); //throws because table already exists Assert.Throws <Exception>(() => planManager.Check(new ThrowImmediatelyCheckNotifier())); db.ExpectTable(bulk.tableInfo.GetRuntimeName()).Drop(); //back to being fine again Assert.DoesNotThrow(() => planManager.Check(new ThrowImmediatelyCheckNotifier())); //setup test rules for migrator CreateMigrationRules(planManager, bulk); //rules should pass Assert.DoesNotThrow(() => planManager.Check(new ThrowImmediatelyCheckNotifier())); var chi = bulk.GetColumnInfo("chi"); Assert.Throws <Exception>(() => { planManager.GetPlanForColumnInfo(chi).Plan = Plan.Drop; planManager.GetPlanForColumnInfo(chi).Check(new ThrowImmediatelyCheckNotifier()); } , "Should not be able to drop primary key column"); db.Drop(); }
public void SetupTestData(ICatalogueRepository repository) { BlitzMainDataTables(); Database = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer); testData = new BulkTestsData(repository, Database, 100); testData.SetupTestData(); testData.ImportAsCatalogue(); aggregate1 = new AggregateConfiguration(repository, testData.catalogue, "UnitTestAggregate1"); aggregate1.CountSQL = null; aggregate1.SaveToDatabase(); new AggregateDimension(repository, testData.extractionInformations.Single(e => e.GetRuntimeName().Equals("chi")), aggregate1); aggregate2 = new AggregateConfiguration(repository, testData.catalogue, "UnitTestAggregate2"); aggregate2.CountSQL = null; aggregate2.SaveToDatabase(); new AggregateDimension(repository, testData.extractionInformations.Single(e => e.GetRuntimeName().Equals("chi")), aggregate2); aggregate3 = new AggregateConfiguration(repository, testData.catalogue, "UnitTestAggregate3"); aggregate3.CountSQL = null; aggregate3.SaveToDatabase(); new AggregateDimension(repository, testData.extractionInformations.Single(e => e.GetRuntimeName().Equals("chi")), aggregate3); cohortIdentificationConfiguration = new CohortIdentificationConfiguration(repository, "UnitTestIdentification"); rootcontainer = new CohortAggregateContainer(repository, SetOperation.EXCEPT); container1 = new CohortAggregateContainer(repository, SetOperation.UNION); cohortIdentificationConfiguration.RootCohortAggregateContainer_ID = rootcontainer.ID; cohortIdentificationConfiguration.SaveToDatabase(); cohortIdentificationConfiguration.EnsureNamingConvention(aggregate1); cohortIdentificationConfiguration.EnsureNamingConvention(aggregate2); cohortIdentificationConfiguration.EnsureNamingConvention(aggregate3); }
public void DeleteAReferencedValidationXML() { ColumnInfo l2ColumnInfo; BulkTestsData testData = SetupTestData(out l2ColumnInfo); try { Validator.LocatorForXMLDeserialization = RepositoryLocator; var worked = Validator.LoadFromXml(testData.catalogue.ValidatorXML); //notice that it is the ID of the referenced column that is maintained not the name of it! that is because we need to use a data access portal to get the contents of the column which might be in a different table (and normally would be) Assert.IsFalse(testData.catalogue.ValidatorXML.Contains("previous_address_L2")); Assert.IsTrue(testData.catalogue.ValidatorXML.Contains(l2ColumnInfo.ID.ToString())); Assert.IsTrue(testData.catalogue.ValidatorXML.Contains("previous_address_L1")); //we expect the validation XML to find the reference ValidationXMLObscureDependencyFinder finder = new ValidationXMLObscureDependencyFinder(RepositoryLocator); //and explode Assert.Throws <ValidationXmlDependencyException>(() => finder.ThrowIfDeleteDisallowed(l2ColumnInfo)); Assert.AreEqual(0, finder.BlackList.Count); //now clear the validation XML testData.catalogue.ValidatorXML = testData.catalogue.ValidatorXML.Insert(100, "I've got a lovely bunch of coconuts!"); testData.catalogue.SaveToDatabase(); //column info should be deleteable but only because we got ourselves onto the blacklist Assert.DoesNotThrow(() => finder.ThrowIfDeleteDisallowed(l2ColumnInfo)); Assert.AreEqual(1, finder.BlackList.Count); testData.catalogue.ValidatorXML = ""; testData.catalogue.SaveToDatabase(); //column info should be deleteable now that we cleared the XML Assert.DoesNotThrow(() => finder.ThrowIfDeleteDisallowed(l2ColumnInfo)); } finally { testData.DeleteCatalogue(); } }
public void SetupTestData(ICatalogueRepository repository) { testData = new BulkTestsData(repository, DiscoveredDatabaseICanCreateRandomTablesIn, 100); testData.SetupTestData(); testData.ImportAsCatalogue(); aggregate1 = new AggregateConfiguration(repository, testData.catalogue, "UnitTestAggregate1"); aggregate1.CountSQL = null; aggregate1.SaveToDatabase(); new AggregateDimension(repository, testData.extractionInformations.Single(e => e.GetRuntimeName().Equals("chi")), aggregate1); aggregate2 = new AggregateConfiguration(repository, testData.catalogue, "UnitTestAggregate2"); aggregate2.CountSQL = null; aggregate2.SaveToDatabase(); new AggregateDimension(repository, testData.extractionInformations.Single(e => e.GetRuntimeName().Equals("chi")), aggregate2); aggregate3 = new AggregateConfiguration(repository, testData.catalogue, "UnitTestAggregate3"); aggregate3.CountSQL = null; aggregate3.SaveToDatabase(); new AggregateDimension(repository, testData.extractionInformations.Single(e => e.GetRuntimeName().Equals("chi")), aggregate3); cohortIdentificationConfiguration = new CohortIdentificationConfiguration(repository, "UnitTestIdentification"); rootcontainer = new CohortAggregateContainer(repository, SetOperation.EXCEPT); container1 = new CohortAggregateContainer(repository, SetOperation.UNION); cohortIdentificationConfiguration.RootCohortAggregateContainer_ID = rootcontainer.ID; cohortIdentificationConfiguration.SaveToDatabase(); cohortIdentificationConfiguration.EnsureNamingConvention(aggregate1); cohortIdentificationConfiguration.EnsureNamingConvention(aggregate2); cohortIdentificationConfiguration.EnsureNamingConvention(aggregate3); }
public void CreateANOVersionTest() { var dbName = TestDatabaseNames.GetConsistentName("CreateANOVersionTest"); var db = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(dbName); db.Create(true); BulkTestsData bulk = new BulkTestsData(CatalogueRepository, DiscoveredDatabaseICanCreateRandomTablesIn, 100); bulk.SetupTestData(); bulk.ImportAsCatalogue(); var planManager = new ForwardEngineerANOCataloguePlanManager(RepositoryLocator, bulk.catalogue); planManager.TargetDatabase = db; //setup test rules for migrator CreateMigrationRules(planManager, bulk); //rules should pass checks Assert.DoesNotThrow(() => planManager.Check(new ThrowImmediatelyCheckNotifier())); var engine = new ForwardEngineerANOCatalogueEngine(RepositoryLocator, planManager); engine.Execute(); var anoCatalogue = CatalogueRepository.GetAllObjects <Catalogue>().Single(c => c.Folder.Path.StartsWith("\\ano")); Assert.IsTrue(anoCatalogue.Exists()); db.Drop(); var exports = CatalogueRepository.GetAllObjects <ObjectExport>().Count(); var imports = CatalogueRepository.GetAllObjects <ObjectImport>().Count(); Assert.AreEqual(exports, imports); Assert.IsTrue(exports > 0); }
public void TestPayloadInjection() { BulkTestsData b = new BulkTestsData(CatalogueRepository, DiscoveredDatabaseICanCreateRandomTablesIn, 10); b.SetupTestData(); b.ImportAsCatalogue(); var lmd = new LoadMetadata(CatalogueRepository, "Loading"); lmd.LocationOfFlatFiles = LoadDirectory.CreateDirectoryStructure(new DirectoryInfo(TestContext.CurrentContext.TestDirectory), "delme", true).RootPath.FullName; lmd.SaveToDatabase(); CatalogueRepository.MEF.AddTypeToCatalogForTesting(typeof(TestPayloadAttacher)); b.catalogue.LoadMetadata_ID = lmd.ID; b.catalogue.LoggingDataTask = "TestPayloadInjection"; b.catalogue.SaveToDatabase(); var lm = new LogManager(new ServerDefaults(CatalogueRepository).GetDefaultFor(PermissableDefaults.LiveLoggingServer_ID)); lm.CreateNewLoggingTaskIfNotExists("TestPayloadInjection"); var pt = new ProcessTask(CatalogueRepository, lmd, LoadStage.Mounting); pt.Path = typeof(TestPayloadAttacher).FullName; pt.ProcessTaskType = ProcessTaskType.Attacher; pt.SaveToDatabase(); var config = new HICDatabaseConfiguration(DiscoveredDatabaseICanCreateRandomTablesIn.Server); var factory = new HICDataLoadFactory(lmd, config, new HICLoadConfigurationFlags(), CatalogueRepository, lm); IDataLoadExecution execution = factory.Create(new ThrowImmediatelyDataLoadEventListener()); var proceedure = new DataLoadProcess(RepositoryLocator, lmd, null, lm, new ThrowImmediatelyDataLoadEventListener(), execution, config); proceedure.Run(new GracefulCancellationToken(), payload); Assert.IsTrue(PayloadTest.Success, "Expected IAttacher to detect Payload and set this property to true"); }
public void ValidateBulkTestData(bool testCancellingValiationEarly) { int numberOfRecordsToGenerate = 10000; DateTime startTime = DateTime.Now; BulkTestsData testData = new BulkTestsData(CatalogueRepository, DiscoveredDatabaseICanCreateRandomTablesIn, numberOfRecordsToGenerate); testData.SetupTestData(); testData.ImportAsCatalogue(); DQERepository dqeRepository = new DQERepository(CatalogueRepository); //the shouldn't be any lingering results in the database Assert.IsNull(dqeRepository.GetMostRecentEvaluationFor(_catalogue)); //set some validation rules testData.catalogue.ValidatorXML = bulkTestDataValidation; //set the time periodicity field var toBeTimePeriodicityCol = testData.catalogue.GetAllExtractionInformation(ExtractionCategory.Any).Single(e => e.GetRuntimeName().Equals("dtCreated")); testData.catalogue.TimeCoverage_ExtractionInformation_ID = toBeTimePeriodicityCol.ID; //do the validation CatalogueConstraintReport report = new CatalogueConstraintReport(testData.catalogue, SpecialFieldNames.DataLoadRunID); report.Check(new ThrowImmediatelyCheckNotifier()); CancellationTokenSource source = new CancellationTokenSource(); if (testCancellingValiationEarly) { source.Cancel(); } ToMemoryDataLoadEventListener listener = new ToMemoryDataLoadEventListener(false); report.GenerateReport(testData.catalogue, listener, source.Token); if (testCancellingValiationEarly) { Assert.IsTrue(listener.EventsReceivedBySender[report].Count(m => m.Exception is OperationCanceledException) == 1); testData.Destroy(); testData.DeleteCatalogue(); return; } Assert.IsTrue(listener.EventsReceivedBySender[report].All(m => m.Exception == null));//all messages must have null exceptions //get the reuslts now var results = dqeRepository.GetMostRecentEvaluationFor(testData.catalogue); Assert.IsNotNull(results); //the sum of all consquences across all data load run ids should be the record count Assert.AreEqual(10000, results.RowStates.Sum(r => r.Missing + r.Invalid + r.Wrong + r.Correct)); //there should be at least 5 data load run ids (should be around 12 actually - see BulkTestData but theoretically everyone could magically - all 10,000 into 5 decades - or even less but those statistics must be astronomical) Assert.GreaterOrEqual(results.RowStates.Count(), 5); //there should be lots of column results too Assert.GreaterOrEqual(results.ColumnStates.Count(), 5); //Did it log? LogManager logManager = new LogManager(CatalogueRepository.GetServerDefaults().GetDefaultFor(PermissableDefaults.LiveLoggingServer_ID)); var log = logManager.GetArchivalDataLoadInfos("DQE").FirstOrDefault(); Assert.IsNotNull(log); Assert.GreaterOrEqual(log.StartTime, startTime); Assert.AreEqual(0, log.Errors.Count); Assert.AreEqual(numberOfRecordsToGenerate, log.TableLoadInfos.Single().Inserts); testData.Destroy(); testData.DeleteCatalogue(); }
private void CreateMigrationRules(ForwardEngineerANOCataloguePlanManager planManager, BulkTestsData bulk) { var chi = bulk.GetColumnInfo("chi"); var anoChi = new ANOTable(CatalogueRepository, ANOStore_ExternalDatabaseServer, "ANOCHI", "C"); anoChi.NumberOfIntegersToUseInAnonymousRepresentation = 9; anoChi.NumberOfCharactersToUseInAnonymousRepresentation = 1; anoChi.SaveToDatabase(); anoChi.PushToANOServerAsNewTable(chi.Data_type, new ThrowImmediatelyCheckNotifier()); planManager.GetPlanForColumnInfo(chi).Plan = Plan.ANO; planManager.GetPlanForColumnInfo(chi).ANOTable = anoChi; var dob = bulk.GetColumnInfo("date_of_birth"); planManager.GetPlanForColumnInfo(dob).Plan = Plan.Dilute; planManager.GetPlanForColumnInfo(dob).Dilution = new RoundDateToMiddleOfQuarter(); var postcode = bulk.GetColumnInfo("current_postcode"); planManager.GetPlanForColumnInfo(postcode).Plan = Plan.Dilute; planManager.GetPlanForColumnInfo(postcode).Dilution = new ExcludeRight3OfUKPostcodes(); }
public void CreateANOVersionTest_LookupsAndExtractionInformations() { var dbName = TestDatabaseNames.GetConsistentName("CreateANOVersionTest"); var db = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(dbName); db.Create(true); BulkTestsData bulk = new BulkTestsData(CatalogueRepository, DiscoveredDatabaseICanCreateRandomTablesIn, 100); bulk.SetupTestData(); bulk.ImportAsCatalogue(); //Create a lookup table on the server var lookupTbl = DiscoveredDatabaseICanCreateRandomTablesIn.CreateTable("z_sexLookup", new[] { new DatabaseColumnRequest("Code", "varchar(1)") { IsPrimaryKey = true }, new DatabaseColumnRequest("hb_Code", "varchar(1)") { IsPrimaryKey = true }, new DatabaseColumnRequest("Description", "varchar(100)") }); //import a reference to the table TableInfoImporter importer = new TableInfoImporter(CatalogueRepository, lookupTbl); ColumnInfo[] lookupColumnInfos; TableInfo lookupTableInfo; importer.DoImport(out lookupTableInfo, out lookupColumnInfos); //Create a Lookup reference var ciSex = bulk.catalogue.CatalogueItems.Single(c => c.Name == "sex"); var ciHb = bulk.catalogue.CatalogueItems.Single(c => c.Name == "hb_extract"); var eiChi = bulk.extractionInformations.Single(ei => ei.GetRuntimeName() == "chi"); eiChi.IsExtractionIdentifier = true; eiChi.SaveToDatabase(); var eiCentury = bulk.extractionInformations.Single(ei => ei.GetRuntimeName() == "century"); eiCentury.HashOnDataRelease = true; eiCentury.ExtractionCategory = ExtractionCategory.Internal; eiCentury.SaveToDatabase(); //add a transform var eiPostcode = bulk.extractionInformations.Single(ei => ei.GetRuntimeName() == "current_postcode"); eiPostcode.SelectSQL = string.Format("LEFT(10,{0}.[current_postcode])", eiPostcode.ColumnInfo.TableInfo.Name); eiPostcode.Alias = "MyMutilatedColumn"; eiPostcode.SaveToDatabase(); //add a combo transform var ciComboCol = new CatalogueItem(CatalogueRepository, bulk.catalogue, "ComboColumn"); var colForename = bulk.columnInfos.Single(c => c.GetRuntimeName() == "forename"); var colSurname = bulk.columnInfos.Single(c => c.GetRuntimeName() == "surname"); var eiComboCol = new ExtractionInformation(CatalogueRepository, ciComboCol, colForename, colForename + " + ' ' + " + colSurname); eiComboCol.Alias = "ComboColumn"; eiComboCol.SaveToDatabase(); var eiDataLoadRunId = bulk.extractionInformations.Single(ei => ei.GetRuntimeName().Equals(SpecialFieldNames.DataLoadRunID)); eiDataLoadRunId.DeleteInDatabase(); var lookup = new Lookup(CatalogueRepository, lookupColumnInfos[2], ciSex.ColumnInfo, lookupColumnInfos[0], ExtractionJoinType.Left, null); //now lets make it worse, lets assume the sex code changes per healthboard therefore the join to the lookup requires both fields sex and hb_extract var compositeLookup = new LookupCompositeJoinInfo(CatalogueRepository, lookup, ciHb.ColumnInfo, lookupColumnInfos[1]); //now lets make the _Desc field in the original Catalogue int orderToInsertDescriptionFieldAt = ciSex.ExtractionInformation.Order; //bump everyone down 1 foreach (var toBumpDown in bulk.catalogue.CatalogueItems.Select(ci => ci.ExtractionInformation).Where(e => e != null && e.Order > orderToInsertDescriptionFieldAt)) { toBumpDown.Order++; toBumpDown.SaveToDatabase(); } var ciDescription = new CatalogueItem(CatalogueRepository, bulk.catalogue, "Sex_Desc"); var eiDescription = new ExtractionInformation(CatalogueRepository, ciDescription, lookupColumnInfos[2], lookupColumnInfos[2].Name); eiDescription.Alias = "Sex_Desc"; eiDescription.Order = orderToInsertDescriptionFieldAt + 1; eiDescription.ExtractionCategory = ExtractionCategory.Supplemental; eiDescription.SaveToDatabase(); bulk.catalogue.ClearAllInjections(); //check it worked QueryBuilder qb = new QueryBuilder(null, null); qb.AddColumnRange(bulk.catalogue.GetAllExtractionInformation(ExtractionCategory.Any)); //The query builder should be able to succesfully create SQL Console.WriteLine(qb.SQL); //there should be 2 tables involved in the query [z_sexLookup] and [BulkData] Assert.AreEqual(2, qb.TablesUsedInQuery.Count); //the query builder should have identified the lookup Assert.AreEqual(lookup, qb.GetDistinctRequiredLookups().Single()); //////////////////////////////////////////////////////////////////////////////////////The Actual Bit Being Tested//////////////////////////////////////////////////// var planManager = new ForwardEngineerANOCataloguePlanManager(RepositoryLocator, bulk.catalogue); planManager.TargetDatabase = db; //setup test rules for migrator CreateMigrationRules(planManager, bulk); //rules should pass checks Assert.DoesNotThrow(() => planManager.Check(new ThrowImmediatelyCheckNotifier())); var engine = new ForwardEngineerANOCatalogueEngine(RepositoryLocator, planManager); engine.Execute(); //////////////////////////////////////////////////////////////////////////////////////End The Actual Bit Being Tested//////////////////////////////////////////////////// var anoCatalogue = CatalogueRepository.GetAllObjects <Catalogue>().Single(c => c.Folder.Path.StartsWith("\\ano")); Assert.IsTrue(anoCatalogue.Exists()); //The new Catalogue should have the same number of ExtractionInformations var eiSource = bulk.catalogue.GetAllExtractionInformation(ExtractionCategory.Any).OrderBy(ei => ei.Order).ToArray(); var eiDestination = anoCatalogue.GetAllExtractionInformation(ExtractionCategory.Any).OrderBy(ei => ei.Order).ToArray(); Assert.AreEqual(eiSource.Length, eiDestination.Length, "Both the new and the ANO catalogue should have the same number of ExtractionInformations (extractable columns)"); for (int i = 0; i < eiSource.Length; i++) { Assert.AreEqual(eiSource[i].Order, eiDestination[i].Order, "ExtractionInformations in the source and destination Catalogue should have the same order"); Assert.AreEqual(eiSource[i].GetRuntimeName(), eiDestination[i].GetRuntimeName().Replace("ANO", ""), "ExtractionInformations in the source and destination Catalogue should have the same names (excluding ANO prefix)"); Assert.AreEqual(eiSource[i].ExtractionCategory, eiDestination[i].ExtractionCategory, "Old / New ANO ExtractionInformations did not match on ExtractionCategory"); Assert.AreEqual(eiSource[i].IsExtractionIdentifier, eiDestination[i].IsExtractionIdentifier, "Old / New ANO ExtractionInformations did not match on IsExtractionIdentifier"); Assert.AreEqual(eiSource[i].HashOnDataRelease, eiDestination[i].HashOnDataRelease, "Old / New ANO ExtractionInformations did not match on HashOnDataRelease"); Assert.AreEqual(eiSource[i].IsPrimaryKey, eiDestination[i].IsPrimaryKey, "Old / New ANO ExtractionInformations did not match on IsPrimaryKey"); } //check it worked QueryBuilder qbdestination = new QueryBuilder(null, null); qbdestination.AddColumnRange(anoCatalogue.GetAllExtractionInformation(ExtractionCategory.Any)); //The query builder should be able to succesfully create SQL Console.WriteLine(qbdestination.SQL); var anoEiPostcode = anoCatalogue.GetAllExtractionInformation(ExtractionCategory.Any).Single(ei => ei.GetRuntimeName().Equals("MyMutilatedColumn")); //The transform on postcode should have been refactored to the new table name and preserve the scalar function LEFT... Assert.AreEqual(string.Format("LEFT(10,{0}.[current_postcode])", anoEiPostcode.ColumnInfo.TableInfo.GetFullyQualifiedName()), anoEiPostcode.SelectSQL); var anoEiComboCol = anoCatalogue.GetAllExtractionInformation(ExtractionCategory.Any).Single(ei => ei.GetRuntimeName().Equals("ComboColumn")); //The transform on postcode should have been refactored to the new table name and preserve the scalar function LEFT... Assert.AreEqual(string.Format("{0}.[forename] + ' ' + {0}.[surname]", anoEiPostcode.ColumnInfo.TableInfo.GetFullyQualifiedName()), anoEiComboCol.SelectSQL); //there should be 2 tables involved in the query [z_sexLookup] and [BulkData] Assert.AreEqual(2, qbdestination.TablesUsedInQuery.Count); //the query builder should have identified the lookup but it should be the new one not the old one Assert.AreEqual(1, qbdestination.GetDistinctRequiredLookups().Count(), "New query builder for ano catalogue did not correctly identify that there was a Lookup"); Assert.AreNotEqual(lookup, qbdestination.GetDistinctRequiredLookups().Single(), "New query builder for ano catalogue identified the OLD Lookup!"); Assert.AreEqual(1, qbdestination.GetDistinctRequiredLookups().Single().GetSupplementalJoins().Count(), "The new Lookup did not have the composite join key (sex/hb_extract)"); Assert.AreNotEqual(compositeLookup, qbdestination.GetDistinctRequiredLookups().Single().GetSupplementalJoins(), "New query builder for ano catalogue identified the OLD LookupCompositeJoinInfo!"); db.Drop(); var exports = CatalogueRepository.GetAllObjects <ObjectExport>().Count(); var imports = CatalogueRepository.GetAllObjects <ObjectImport>().Count(); Assert.AreEqual(exports, imports); Assert.IsTrue(exports > 0); }
public void BulkTestsSetUp() { _bulkTests = new BulkTestsData(CatalogueRepository, DiscoveredDatabaseICanCreateRandomTablesIn); _bulkTests.SetupTestData(); }