private void SetupJoin() { DataTable dt = new DataTable(); dt.Columns.Add("Name"); dt.Columns.Add("Description"); dt.Rows.Add(new object[] { "Dave", "Is a maniac" }); var tbl = DiscoveredDatabaseICanCreateRandomTablesIn.CreateTable("SimpleJoin", dt, new[] { new DatabaseColumnRequest("Name", new DatabaseTypeRequest(typeof(string), 50)) { IsPrimaryKey = true } }); var lookupCata = Import(tbl); ExtractionInformation fkEi = _catalogue.GetAllExtractionInformation(ExtractionCategory.Any).Single(n => n.GetRuntimeName() == "Name"); ColumnInfo pk = lookupCata.GetTableInfoList(false).Single().ColumnInfos.Single(n => n.GetRuntimeName() == "Name"); new JoinInfo(CatalogueRepository, fkEi.ColumnInfo, pk, ExtractionJoinType.Left, null); var ci = new CatalogueItem(CatalogueRepository, _catalogue, "Name_2"); var ei = new ExtractionInformation(CatalogueRepository, ci, pk, pk.Name) { Alias = "Name_2" }; ei.SaveToDatabase(); }
public void SetupBulkTestData() { Console.WriteLine("Cleaning up remnants"); Cleanup(); Console.WriteLine("Setting up bulk test data"); _bulkData = new BulkTestsData(RepositoryLocator.CatalogueRepository, DiscoveredDatabaseICanCreateRandomTablesIn); _bulkData.SetupTestData(); Console.WriteLine("Importing to Catalogue"); var tbl = DiscoveredDatabaseICanCreateRandomTablesIn.ExpectTable(BulkTestsData.BulkDataTable); TableInfoImporter importer = new TableInfoImporter(CatalogueRepository, tbl); importer.DoImport(out tableInfoCreated, out columnInfosCreated); Console.WriteLine("Imported TableInfo " + tableInfoCreated); Console.WriteLine("Imported ColumnInfos " + string.Join(",", columnInfosCreated.Select(c => c.GetRuntimeName()))); Assert.NotNull(tableInfoCreated); ColumnInfo chi = columnInfosCreated.Single(c => c.GetRuntimeName().Equals("chi")); Console.WriteLine("CHI is primary key? (expecting true):" + chi.IsPrimaryKey); Assert.IsTrue(chi.IsPrimaryKey); tableInfoCreated.ColumnInfos.Single(c => c.GetRuntimeName().Equals("surname")).DeleteInDatabase(); tableInfoCreated.ColumnInfos.Single(c => c.GetRuntimeName().Equals("forename")).DeleteInDatabase(); tableInfoCreated.ClearAllInjections(); }
public void Setup() { var tbl = DiscoveredDatabaseICanCreateRandomTablesIn.ExpectTable("ReferentialIntegrityConstraintTests"); if (tbl.Exists()) { tbl.Drop(); } var server = DiscoveredDatabaseICanCreateRandomTablesIn.Server; using (var con = server.GetConnection()) { con.Open(); server.GetCommand("CREATE TABLE ReferentialIntegrityConstraintTests(MyValue int)", con).ExecuteNonQuery(); server.GetCommand("INSERT INTO ReferentialIntegrityConstraintTests (MyValue) VALUES (5)", con).ExecuteNonQuery(); } TableInfoImporter importer = new TableInfoImporter(CatalogueRepository, tbl); importer.DoImport(out _tableInfo, out _columnInfo); _constraint = new ReferentialIntegrityConstraint(CatalogueRepository); _constraint.OtherColumnInfo = _columnInfo.Single(); }
public void CreateANOVersionTest_IntIdentity() { var dbName = TestDatabaseNames.GetConsistentName("CreateANOVersionTest"); //setup the anonymisation database (destination) var db = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(dbName); db.Create(true); //Create this table in the scratch database var tbl = DiscoveredDatabaseICanCreateRandomTablesIn.CreateTable("MyTable", new[] { new DatabaseColumnRequest("id", "int identity(1,1)", false) { IsPrimaryKey = true }, new DatabaseColumnRequest("Name", new DatabaseTypeRequest(typeof(string), 10), false) }); TableInfo ti; ColumnInfo[] cols; var cata = Import(tbl, out ti, out cols); var planManager = new ForwardEngineerANOCataloguePlanManager(RepositoryLocator, cata); planManager.TargetDatabase = db; var nameCol = cols.Single(c => c.GetRuntimeName().Equals("Name")); //setup test rules for migrator planManager.Plans[nameCol].Plan = Plan.Drop; //rules should pass checks planManager.Check(new ThrowImmediatelyCheckNotifier()); var engine = new ForwardEngineerANOCatalogueEngine(RepositoryLocator, planManager); engine.Execute(); var anoCatalogue = CatalogueRepository.GetAllObjects <Catalogue>().Single(c => c.Folder.Path.StartsWith("\\ano")); Assert.IsTrue(anoCatalogue.Exists()); //should only be one (the id column Assert.AreEqual(1, anoCatalogue.CatalogueItems.Length); var idColInAnoDatabase = anoCatalogue.CatalogueItems[0].ColumnInfo; Assert.AreEqual("int", idColInAnoDatabase.Data_type); db.Drop(); var exports = CatalogueRepository.GetAllObjects <ObjectExport>().Count(); var imports = CatalogueRepository.GetAllObjects <ObjectImport>().Count(); Assert.AreEqual(exports, imports); Assert.IsTrue(exports > 0); }
private void CreateTvfCatalogue(string cohortDatabaseName) { var svr = DiscoveredDatabaseICanCreateRandomTablesIn.Server; using (var con = svr.GetConnection()) { con.Open(); //create the newID view svr.GetCommand("create view getNewID as select newid() as new_id", con).ExecuteNonQuery(); var sql = string.Format( @"create function GetTopXRandom (@numberOfRecords int) RETURNS @retTable TABLE ( chi varchar(10), definitionID int ) AS BEGIN while(@numberOfRecords >0) begin insert into @retTable select top 1 chi,cohortDefinition_id from {0}..Cohort order by (select new_id from getNewID) set @numberOfRecords = @numberOfRecords - 1 end return end ", cohortDatabaseName); svr.GetCommand(sql, con).ExecuteNonQuery(); } var tblvf = DiscoveredDatabaseICanCreateRandomTablesIn.ExpectTableValuedFunction("GetTopXRandom"); var importer = new TableValuedFunctionImporter(CatalogueRepository, tblvf); TableInfo tbl; ColumnInfo[] cols; importer.DoImport(out tbl, out cols); var engineer = new ForwardEngineerCatalogue(tbl, cols, true); Catalogue cata; CatalogueItem[] cis; ExtractionInformation[] eis; engineer.ExecuteForwardEngineering(out cata, out cis, out eis); Assert.AreEqual("chi", eis[0].GetRuntimeName()); eis[0].IsExtractionIdentifier = true; eis[0].SaveToDatabase(); _tvfCatalogue = cata; _tvfTableInfo = tbl; }
public void TearDown() { var tbl = DiscoveredDatabaseICanCreateRandomTablesIn.ExpectTable("AliasHandlerTests"); if (tbl.Exists()) { tbl.Drop(); } _server.DeleteInDatabase(); }
protected DiscoveredTable CreateDataset <T>(int people, int rows, Random r, out PersonCollection peopleGenerated) where T : IDataGenerator { var f = new DataGeneratorFactory(); T instance = f.Create <T>(r); peopleGenerated = new PersonCollection(); peopleGenerated.GeneratePeople(people, r); var dt = instance.GetDataTable(peopleGenerated, rows); return(DiscoveredDatabaseICanCreateRandomTablesIn.CreateTable(typeof(T).Name, dt, null, false, this)); }
public void TestPreExecutionChecker_TablesDontExist() { var tbl = DiscoveredDatabaseICanCreateRandomTablesIn.ExpectTable("Imaginary"); Assert.IsFalse(tbl.Exists()); var lmd = RdmpMockFactory.Mock_LoadMetadataLoadingTable(tbl); var checker = new PreExecutionChecker(lmd, new HICDatabaseConfiguration(DiscoveredDatabaseICanCreateRandomTablesIn.Server)); var ex = Assert.Throws <Exception>(() => checker.Check(new ThrowImmediatelyCheckNotifier())); StringAssert.IsMatch("Table '.*Imaginary.*' does not exist", ex.Message); }
public void OverwriteMigrationStrategy_NoPrimaryKey() { var from = DiscoveredDatabaseICanCreateRandomTablesIn.CreateTable("Bob", new[] { new DatabaseColumnRequest("Field", "int") }); var to = DiscoveredDatabaseICanCreateRandomTablesIn.CreateTable("Frank", new[] { new DatabaseColumnRequest("Field", "int") }); var connection = Mock.Of <IManagedConnection>(); var job = Mock.Of <IDataLoadJob>(); var strategy = new OverwriteMigrationStrategy(connection); var migrationFieldProcessor = Mock.Of <IMigrationFieldProcessor>(); var ex = Assert.Throws <Exception>(() => new MigrationColumnSet(from, to, migrationFieldProcessor)); Assert.AreEqual("There are no primary keys declared in table Bob", ex.Message); }
public void CreateDataset() { _server = DiscoveredDatabaseICanCreateRandomTablesIn.Server; using (var con = _server.GetConnection()) { con.Open(); _server.GetCommand("CREATE TABLE " + TABLE_NAME + "(Name varchar(10), Address varchar(500))", con).ExecuteNonQuery(); } var tbl = DiscoveredDatabaseICanCreateRandomTablesIn.ExpectTable("TableInfoSynchronizerTests"); TableInfoImporter importer = new TableInfoImporter(CatalogueRepository, tbl); importer.DoImport(out tableInfoCreated, out columnInfosCreated); }
public void Drop() { var tbl = DiscoveredDatabaseICanCreateRandomTablesIn.ExpectTable("ReferentialIntegrityConstraintTests"); if (tbl.Exists()) { tbl.Drop(); } var credentials = (DataAccessCredentials)_tableInfo.GetCredentialsIfExists(DataAccessContext.InternalDataProcessing); _tableInfo.DeleteInDatabase(); if (credentials != null) { credentials.DeleteInDatabase(); } }
public void TestCheckUpdateTrigger() { // set up a test database const string tableName = "TestTable"; var databaseName = DiscoveredDatabaseICanCreateRandomTablesIn.GetRuntimeName(); var table = DiscoveredDatabaseICanCreateRandomTablesIn.CreateTable(tableName, new[] { new DatabaseColumnRequest("Id", "int"), }); var server = DiscoveredDatabaseICanCreateRandomTablesIn.Server; using (var con = server.GetConnection()) { con.Open(); var cmd = server.GetCommand( "CREATE TRIGGER dbo.[TestTable_OnUpdate] ON [dbo].[" + tableName + "] AFTER DELETE AS RAISERROR('MESSAGE',16,10)", con); cmd.ExecuteNonQuery(); } var dbInfo = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(databaseName); var factory = new TriggerImplementerFactory(dbInfo.Server.DatabaseType); var triggerImplementer = factory.Create(table); var isEnabled = triggerImplementer.GetTriggerStatus(); Assert.AreEqual(TriggerStatus.Enabled, isEnabled); // disable the trigger and test correct reporting using (var con = new SqlConnection(dbInfo.Server.Builder.ConnectionString)) { con.Open(); var cmd = new SqlCommand( "USE [" + databaseName + "]; DISABLE TRIGGER TestTable_OnUpdate ON [" + databaseName + "]..[" + tableName + "]", con); cmd.ExecuteNonQuery(); } isEnabled = triggerImplementer.GetTriggerStatus(); Assert.AreEqual(TriggerStatus.Disabled, isEnabled); }
public void GetRowCountWhenNoIndexes() { var server = DiscoveredDatabaseICanCreateRandomTablesIn.Server; var table = DiscoveredDatabaseICanCreateRandomTablesIn.ExpectTable("GetRowCountWhenNoIndexes"); Assert.AreEqual("GetRowCountWhenNoIndexes", table.GetRuntimeName()); using (var con = server.GetConnection()) { con.Open(); var cmd = server.GetCommand("CREATE TABLE " + table.GetRuntimeName() + " (age int, name varchar(5))", con); cmd.ExecuteNonQuery(); var cmdInsert = server.GetCommand("INSERT INTO " + table.GetRuntimeName() + " VALUES (1,'Fish')", con); Assert.AreEqual(1, cmdInsert.ExecuteNonQuery()); Assert.AreEqual(1, table.GetRowCount()); } }
private void SetupCatalogueConfigurationEtc() { DataTable dt = new DataTable(); dt.Columns.Add("PrivateID"); dt.Columns.Add("Name"); dt.Columns.Add("DateOfBirth"); dt.Rows.Add(new object[] { _cohortKeysGenerated.Keys.First(), "Dave", "2001-01-01" }); var tbl = DiscoveredDatabaseICanCreateRandomTablesIn.CreateTable("TestTable", dt, new[] { new DatabaseColumnRequest("Name", new DatabaseTypeRequest(typeof(string), 50)) }); CatalogueItem[] cataItems; _catalogue = Import(tbl, out _tableInfo, out _columnInfos, out cataItems, out _extractionInformations); ExtractionInformation _privateID = _extractionInformations.First(e => e.GetRuntimeName().Equals("PrivateID")); _privateID.IsExtractionIdentifier = true; _privateID.SaveToDatabase(); }
public void Test_CatalogueItems_NonExtractedPrimaryKey_LookupsOnly_IsRespected() { var request = SetupExtractDatasetCommand("LookupsOnly_IsRespected", new string[] { }, pkColumnInfos: new[] { "DateOfBirth" }, withLookup: true); var source = new ExecutePkSynthesizerDatasetExtractionSource(); source.PreInitialize(request, new ThrowImmediatelyDataLoadEventListener()); var chunk = source.GetChunk(new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken()); Assert.That(chunk.PrimaryKey, Is.Not.Null); Assert.That(chunk.Columns.Cast <DataColumn>().ToList(), Has.Count.EqualTo(_columnInfos.Count() + 2)); // the "desc" column is added to the existing ones + the SynthPk Assert.That(chunk.PrimaryKey, Has.Length.EqualTo(1)); Assert.That(chunk.PrimaryKey.First().ColumnName, Is.EqualTo("SynthesizedPk")); var firstvalue = chunk.Rows[0]["SynthesizedPk"].ToString(); Assert.IsTrue(reghex.IsMatch(firstvalue)); DiscoveredDatabaseICanCreateRandomTablesIn.ExpectTable("SimpleLookup").Drop(); }
public void DropTables() { var credentials = (DataAccessCredentials)tableInfoCreated.GetCredentialsIfExists(DataAccessContext.InternalDataProcessing); //if credentials were created, we should only be one user if (credentials != null) { Assert.AreEqual(1, credentials.GetAllTableInfosThatUseThis().Count()); } //delete the table tableInfoCreated.DeleteInDatabase(); //also delete any credentials created as part of TableInfoImport if (credentials != null) { credentials.DeleteInDatabase(); } DiscoveredDatabaseICanCreateRandomTablesIn.ExpectTable(TABLE_NAME).Drop(); }
private void SetupLookupTable() { DataTable dt = new DataTable(); dt.Columns.Add("Name"); dt.Columns.Add("Description"); dt.Rows.Add(new object[] { "Dave", "Is a maniac" }); var tbl = DiscoveredDatabaseICanCreateRandomTablesIn.CreateTable("SimpleLookup", dt, new[] { new DatabaseColumnRequest("Name", new DatabaseTypeRequest(typeof(string), 50)) }); var lookupCata = Import(tbl); ExtractionInformation fkEi = _catalogue.GetAllExtractionInformation(ExtractionCategory.Any).Single(n => n.GetRuntimeName() == "Name"); ColumnInfo pk = lookupCata.GetTableInfoList(false).Single().ColumnInfos.Single(n => n.GetRuntimeName() == "Name"); ColumnInfo descLine1 = lookupCata.GetTableInfoList(false).Single().ColumnInfos.Single(n => n.GetRuntimeName() == "Description"); var cmd = new ExecuteCommandCreateLookup(CatalogueRepository, fkEi, descLine1, pk, null, true); cmd.Execute(); }
public void GetRowCount_Views() { var server = DiscoveredDatabaseICanCreateRandomTablesIn.Server; using (var con = server.GetConnection()) { con.Open(); var cmd = server.GetCommand("CREATE TABLE GetRowCount_Views (age int, name varchar(5))", con); cmd.ExecuteNonQuery(); var cmdInsert = server.GetCommand("INSERT INTO GetRowCount_Views VALUES (1,'Fish')", con); Assert.AreEqual(1, cmdInsert.ExecuteNonQuery()); var cmdView = server.GetCommand( "CREATE VIEW v_GetRowCount_Views as select * from GetRowCount_Views", con); cmdView.ExecuteNonQuery(); Assert.AreEqual(1, DiscoveredDatabaseICanCreateRandomTablesIn.ExpectTable("v_GetRowCount_Views").GetRowCount()); } }
public void PipelineTest() { var source = new DicomFileCollectionSource(); source.FilenameField = "RelativeFileArchiveURI"; var f = new FlatFileToLoad(new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, @"TestData\IM-0001-0013.dcm"))); source.PreInitialize(new FlatFileToLoadDicomFileWorklist(f), new ThrowImmediatelyDataLoadEventListener()); var tbl = source.GetChunk(new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken()); var destination = new DataTableUploadDestination(); destination.PreInitialize(DiscoveredDatabaseICanCreateRandomTablesIn, new ThrowImmediatelyDataLoadEventListener()); destination.AllowResizingColumnsAtUploadTime = true; destination.ProcessPipelineData(tbl, new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken()); destination.Dispose(new ThrowImmediatelyDataLoadEventListener(), null); var finalTable = DiscoveredDatabaseICanCreateRandomTablesIn.ExpectTable(destination.TargetTableName); Assert.IsTrue(finalTable.Exists()); finalTable.Drop(); }
public void SynchronizationTests_ColumnDropped(bool acceptChanges) { Assert.AreEqual(TABLE_NAME, tableInfoCreated.GetRuntimeName()); var table = DiscoveredDatabaseICanCreateRandomTablesIn.ExpectTable(TABLE_NAME); var colToDrop = table.DiscoverColumn("Address"); table.DropColumn(colToDrop); TableInfoSynchronizer synchronizer = new TableInfoSynchronizer(tableInfoCreated); if (acceptChanges) { //accept changes should result in a synchronized table Assert.AreEqual(true, synchronizer.Synchronize(new AcceptAllCheckNotifier())); Assert.AreEqual(1, tableInfoCreated.ColumnInfos.Length);//should only be 1 remaining } else { var ex = Assert.Throws <Exception>(() => synchronizer.Synchronize(new ThrowImmediatelyCheckNotifier())); Assert.AreEqual("The ColumnInfo Address no longer appears in the live table.", ex.Message); } }
private void CreateANormalCatalogue() { var svr = DiscoveredDatabaseICanCreateRandomTablesIn.Server; using (var con = svr.GetConnection()) { con.Open(); svr.GetCommand("CREATE TABLE NonTVFTable ( chi varchar(10))", con).ExecuteNonQuery(); svr.GetCommand("INSERT INTO NonTVFTable VALUES ('0101010101')", con).ExecuteNonQuery(); svr.GetCommand("INSERT INTO NonTVFTable VALUES ('0202020202')", con).ExecuteNonQuery(); svr.GetCommand("INSERT INTO NonTVFTable VALUES ('0303030303')", con).ExecuteNonQuery(); } var importer = new TableInfoImporter(CatalogueRepository, svr.Name, DiscoveredDatabaseICanCreateRandomTablesIn.GetRuntimeName(), "NonTVFTable", DatabaseType.MicrosoftSQLServer); TableInfo tbl; ColumnInfo[] cols; importer.DoImport(out tbl, out cols); var engineer = new ForwardEngineerCatalogue(tbl, cols, true); Catalogue cata; CatalogueItem[] cis; ExtractionInformation[] eis; engineer.ExecuteForwardEngineering(out cata, out cis, out eis); _nonTvfExtractionIdentifier = eis.Single(); _nonTvfExtractionIdentifier.IsExtractionIdentifier = true; _nonTvfExtractionIdentifier.SaveToDatabase(); _nonTvfCatalogue = cata; _nonTvfTableInfo = tbl; }
public void SetupExampleTable() { var remnantTable = DiscoveredDatabaseICanCreateRandomTablesIn.ExpectTable("ANOMigration"); if (remnantTable.Exists()) { remnantTable.Drop(); } DeleteANOEndpoint(); ANOTable remnantANO = CatalogueRepository.GetAllObjects <ANOTable>().SingleOrDefault(a => a.TableName.Equals("ANOCondition")); if (remnantANO != null) { remnantANO.DeleteInDatabase(); } //cleanup foreach (var remnant in CatalogueRepository.GetAllObjects <TableInfo>().Where(t => t.GetRuntimeName().Equals(TableName))) { remnant.DeleteInDatabase(); } const string sql = @" CREATE TABLE [ANOMigration]( [AdmissionDate] [datetime] NOT NULL, [DischargeDate] [datetime] NOT NULL, [Condition1] [varchar](4) NOT NULL, [Condition2] [varchar](4) NULL, [Condition3] [varchar](4) NULL, [Condition4] [varchar](4) NULL, [CHI] [varchar](10) NOT NULL CONSTRAINT [PK_ANOMigration] PRIMARY KEY CLUSTERED ( [AdmissionDate] ASC, [Condition1] ASC, [CHI] ASC )WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON) ON [PRIMARY] ) ON [PRIMARY] INSERT [ANOMigration] ([AdmissionDate], [DischargeDate], [Condition1], [Condition2], [Condition3], [Condition4], [CHI]) VALUES (CAST(0x000001B300000000 AS DateTime), CAST(0x000001B600000000 AS DateTime), N'Z61', N'Z29', NULL, N'Z11', N'0809003082') INSERT [ANOMigration] ([AdmissionDate], [DischargeDate], [Condition1], [Condition2], [Condition3], [Condition4], [CHI]) VALUES (CAST(0x0000021D00000000 AS DateTime), CAST(0x0000022600000000 AS DateTime), N'P024', N'Q230', NULL,N'Z11', N'1610007810') INSERT [ANOMigration] ([AdmissionDate], [DischargeDate], [Condition1], [Condition2], [Condition3], [Condition4], [CHI]) VALUES (CAST(0x0000032900000000 AS DateTime), CAST(0x0000032A00000000 AS DateTime), N'L73', NULL, NULL, NULL, N'2407011022') INSERT [ANOMigration] ([AdmissionDate], [DischargeDate], [Condition1], [Condition2], [Condition3], [Condition4], [CHI]) VALUES (CAST(0x000004EA00000000 AS DateTime), CAST(0x000004EA00000000 AS DateTime), N'Y523', N'Z29', NULL, NULL, N'1104015472') INSERT [ANOMigration] ([AdmissionDate], [DischargeDate], [Condition1], [Condition2], [Condition3], [Condition4], [CHI]) VALUES (CAST(0x0000060300000000 AS DateTime), CAST(0x0000060800000000 AS DateTime), N'F721', N'B871', NULL, NULL, N'0203025927') INSERT [ANOMigration] ([AdmissionDate], [DischargeDate], [Condition1], [Condition2], [Condition3], [Condition4], [CHI]) VALUES (CAST(0x0000065300000000 AS DateTime), CAST(0x0000065700000000 AS DateTime), N'Z914', N'J398', NULL, NULL, N'2702024715') INSERT [ANOMigration] ([AdmissionDate], [DischargeDate], [Condition1], [Condition2], [Condition3], [Condition4], [CHI]) VALUES (CAST(0x0000070100000000 AS DateTime), CAST(0x0000070800000000 AS DateTime), N'N009', N'V698', NULL, NULL, N'1610007810') INSERT [ANOMigration] ([AdmissionDate], [DischargeDate], [Condition1], [Condition2], [Condition3], [Condition4], [CHI]) VALUES (CAST(0x0000077000000000 AS DateTime), CAST(0x0000077200000000 AS DateTime), N'E44', N'J050', N'Q560', NULL, N'1610007810') INSERT [ANOMigration] ([AdmissionDate], [DischargeDate], [Condition1], [Condition2], [Condition3], [Condition4], [CHI]) VALUES (CAST(0x000007E800000000 AS DateTime), CAST(0x000007EA00000000 AS DateTime), N'Q824', NULL, NULL, NULL, N'1110029231') INSERT [ANOMigration] ([AdmissionDate], [DischargeDate], [Condition1], [Condition2], [Condition3], [Condition4], [CHI]) VALUES (CAST(0x0000087700000000 AS DateTime), CAST(0x0000087F00000000 AS DateTime), N'T020', NULL, NULL, NULL, N'2110021261') INSERT [ANOMigration] ([AdmissionDate], [DischargeDate], [Condition1], [Condition2], [Condition3], [Condition4], [CHI]) VALUES (CAST(0x0000088A00000000 AS DateTime), CAST(0x0000089300000000 AS DateTime), N'G009', NULL, NULL, NULL, N'0706013071') INSERT [ANOMigration] ([AdmissionDate], [DischargeDate], [Condition1], [Condition2], [Condition3], [Condition4], [CHI]) VALUES (CAST(0x000008CA00000000 AS DateTime), CAST(0x000008D100000000 AS DateTime), N'T47', N'H311', N'O037', NULL, N'1204057592')"; var server = DiscoveredDatabaseICanCreateRandomTablesIn.Server; using (var con = server.GetConnection()) { con.Open(); server.GetCommand(sql, con).ExecuteNonQuery(); } var table = DiscoveredDatabaseICanCreateRandomTablesIn.ExpectTable(TableName); TableInfoImporter importer = new TableInfoImporter(CatalogueRepository, table); importer.DoImport(out _tableInfo, out _columnInfos); //Configure the structure of the ANO transform we want - identifiers should have 3 characters and 2 ints and end with _C _anoConditionTable = new ANOTable(CatalogueRepository, ANOStore_ExternalDatabaseServer, "ANOCondition", "C"); _anoConditionTable.NumberOfCharactersToUseInAnonymousRepresentation = 3; _anoConditionTable.NumberOfIntegersToUseInAnonymousRepresentation = 2; _anoConditionTable.SaveToDatabase(); _anoConditionTable.PushToANOServerAsNewTable("varchar(4)", new ThrowImmediatelyCheckNotifier()); }
public void Extract_ProjectSpecificCatalogue_FilterReference() { //make the catalogue a custom catalogue for this project CustomExtractableDataSet.Project_ID = _project.ID; CustomExtractableDataSet.SaveToDatabase(); var pipe = SetupPipeline(); pipe.Name = "Extract_ProjectSpecificCatalogue_FilterReference Pipe"; pipe.SaveToDatabase(); var rootContainer = new FilterContainer(DataExportRepository); _selectedDataSet.RootFilterContainer_ID = rootContainer.ID; _selectedDataSet.SaveToDatabase(); var filter = new DeployedExtractionFilter(DataExportRepository, "monkeys only", rootContainer); filter.WhereSQL = "SuperSecretThing = 'monkeys can all secretly fly'"; filter.SaveToDatabase(); rootContainer.AddChild(filter); //get rid of any lingering joins foreach (JoinInfo j in CatalogueRepository.GetAllObjects <JoinInfo>()) { j.DeleteInDatabase(); } //add the ability to join the two tables in the query var idCol = _extractableDataSet.Catalogue.GetAllExtractionInformation(ExtractionCategory.Core).Single(c => c.IsExtractionIdentifier).ColumnInfo; var otherIdCol = CustomCatalogue.GetAllExtractionInformation(ExtractionCategory.ProjectSpecific).Single(e => e.GetRuntimeName().Equals("PrivateID")).ColumnInfo; new JoinInfo(CatalogueRepository, idCol, otherIdCol, ExtractionJoinType.Left, null); new SelectedDataSetsForcedJoin(DataExportRepository, _selectedDataSet, CustomTableInfo); //generate a new request (this will include the newly created column) _request = new ExtractDatasetCommand(_configuration, new ExtractableDatasetBundle(_extractableDataSet)); var tbl = DiscoveredDatabaseICanCreateRandomTablesIn.ExpectTable("TestTable"); tbl.Truncate(); using (var blk = tbl.BeginBulkInsert()) { var dt = new DataTable(); dt.Columns.Add("PrivateID"); dt.Columns.Add("Name"); dt.Columns.Add("DateOfBirth"); dt.Rows.Add(new object[] { "Priv_12345", "Bob", "2001-01-01" }); dt.Rows.Add(new object[] { "Priv_wtf11", "Frank", "2001-10-29" }); blk.Upload(dt); } ExtractionPipelineUseCase useCase; IExecuteDatasetExtractionDestination results; Execute(out useCase, out results); var mainDataTableCsv = results.DirectoryPopulated.GetFiles().Single(f => f.Name.Equals("TestTable.csv")); Assert.IsNotNull(mainDataTableCsv); var lines = File.ReadAllLines(mainDataTableCsv.FullName); Assert.AreEqual("ReleaseID,Name,DateOfBirth", lines[0]); Assert.AreEqual("Pub_54321,Bob,2001-01-01", lines[1]); Assert.AreEqual(2, lines.Length); }
public void Extract_ProjectSpecificCatalogue_AppendedColumn() { //make the catalogue a custom catalogue for this project CustomExtractableDataSet.Project_ID = _project.ID; CustomExtractableDataSet.SaveToDatabase(); var pipe = SetupPipeline(); pipe.Name = "Extract_ProjectSpecificCatalogue_AppendedColumn Pipe"; pipe.SaveToDatabase(); var extraColumn = CustomCatalogue.GetAllExtractionInformation(ExtractionCategory.ProjectSpecific).Single(e => e.GetRuntimeName().Equals("SuperSecretThing")); var asExtractable = new ExtractableColumn(DataExportRepository, _extractableDataSet, _configuration, extraColumn, 10, extraColumn.SelectSQL); //get rid of any lingering joins foreach (JoinInfo j in CatalogueRepository.GetAllObjects <JoinInfo>()) { j.DeleteInDatabase(); } //add the ability to join the two tables in the query var idCol = _extractableDataSet.Catalogue.GetAllExtractionInformation(ExtractionCategory.Core).Single(c => c.IsExtractionIdentifier).ColumnInfo; var otherIdCol = CustomCatalogue.GetAllExtractionInformation(ExtractionCategory.ProjectSpecific).Single(e => e.GetRuntimeName().Equals("PrivateID")).ColumnInfo; new JoinInfo(CatalogueRepository, idCol, otherIdCol, ExtractionJoinType.Left, null); //generate a new request (this will include the newly created column) _request = new ExtractDatasetCommand(_configuration, new ExtractableDatasetBundle(_extractableDataSet)); var tbl = DiscoveredDatabaseICanCreateRandomTablesIn.ExpectTable("TestTable"); tbl.Truncate(); using (var blk = tbl.BeginBulkInsert()) { var dt = new DataTable(); dt.Columns.Add("PrivateID"); dt.Columns.Add("Name"); dt.Columns.Add("DateOfBirth"); dt.Rows.Add(new object[] { "Priv_12345", "Bob", "2001-01-01" }); dt.Rows.Add(new object[] { "Priv_wtf11", "Frank", "2001-10-29" }); blk.Upload(dt); } ExtractionPipelineUseCase useCase; IExecuteDatasetExtractionDestination results; Execute(out useCase, out results); var mainDataTableCsv = results.DirectoryPopulated.GetFiles().Single(f => f.Name.Equals("TestTable.csv")); Assert.IsNotNull(mainDataTableCsv); Assert.AreEqual("TestTable.csv", mainDataTableCsv.Name); var lines = File.ReadAllLines(mainDataTableCsv.FullName); Assert.AreEqual("ReleaseID,Name,DateOfBirth,SuperSecretThing", lines[0]); var bobLine = lines.Single(l => l.StartsWith("Pub_54321,Bob")); var frankLine = lines.Single(l => l.StartsWith("Pub_11ftw,Frank")); Assert.AreEqual("Pub_54321,Bob,2001-01-01,monkeys can all secretly fly", bobLine); Assert.AreEqual("Pub_11ftw,Frank,2001-10-29,the wizard of OZ was a man behind a machine", frankLine); asExtractable.DeleteInDatabase(); }
public void CreateANOVersionTest_LookupsAndExtractionInformations() { var dbName = TestDatabaseNames.GetConsistentName("CreateANOVersionTest"); var db = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(dbName); db.Create(true); BulkTestsData bulk = new BulkTestsData(CatalogueRepository, DiscoveredDatabaseICanCreateRandomTablesIn, 100); bulk.SetupTestData(); bulk.ImportAsCatalogue(); //Create a lookup table on the server var lookupTbl = DiscoveredDatabaseICanCreateRandomTablesIn.CreateTable("z_sexLookup", new[] { new DatabaseColumnRequest("Code", "varchar(1)") { IsPrimaryKey = true }, new DatabaseColumnRequest("hb_Code", "varchar(1)") { IsPrimaryKey = true }, new DatabaseColumnRequest("Description", "varchar(100)") }); //import a reference to the table TableInfoImporter importer = new TableInfoImporter(CatalogueRepository, lookupTbl); ColumnInfo[] lookupColumnInfos; TableInfo lookupTableInfo; importer.DoImport(out lookupTableInfo, out lookupColumnInfos); //Create a Lookup reference var ciSex = bulk.catalogue.CatalogueItems.Single(c => c.Name == "sex"); var ciHb = bulk.catalogue.CatalogueItems.Single(c => c.Name == "hb_extract"); var eiChi = bulk.extractionInformations.Single(ei => ei.GetRuntimeName() == "chi"); eiChi.IsExtractionIdentifier = true; eiChi.SaveToDatabase(); var eiCentury = bulk.extractionInformations.Single(ei => ei.GetRuntimeName() == "century"); eiCentury.HashOnDataRelease = true; eiCentury.ExtractionCategory = ExtractionCategory.Internal; eiCentury.SaveToDatabase(); //add a transform var eiPostcode = bulk.extractionInformations.Single(ei => ei.GetRuntimeName() == "current_postcode"); eiPostcode.SelectSQL = string.Format("LEFT(10,{0}.[current_postcode])", eiPostcode.ColumnInfo.TableInfo.Name); eiPostcode.Alias = "MyMutilatedColumn"; eiPostcode.SaveToDatabase(); //add a combo transform var ciComboCol = new CatalogueItem(CatalogueRepository, bulk.catalogue, "ComboColumn"); var colForename = bulk.columnInfos.Single(c => c.GetRuntimeName() == "forename"); var colSurname = bulk.columnInfos.Single(c => c.GetRuntimeName() == "surname"); var eiComboCol = new ExtractionInformation(CatalogueRepository, ciComboCol, colForename, colForename + " + ' ' + " + colSurname); eiComboCol.Alias = "ComboColumn"; eiComboCol.SaveToDatabase(); var eiDataLoadRunId = bulk.extractionInformations.Single(ei => ei.GetRuntimeName().Equals(SpecialFieldNames.DataLoadRunID)); eiDataLoadRunId.DeleteInDatabase(); var lookup = new Lookup(CatalogueRepository, lookupColumnInfos[2], ciSex.ColumnInfo, lookupColumnInfos[0], ExtractionJoinType.Left, null); //now lets make it worse, lets assume the sex code changes per healthboard therefore the join to the lookup requires both fields sex and hb_extract var compositeLookup = new LookupCompositeJoinInfo(CatalogueRepository, lookup, ciHb.ColumnInfo, lookupColumnInfos[1]); //now lets make the _Desc field in the original Catalogue int orderToInsertDescriptionFieldAt = ciSex.ExtractionInformation.Order; //bump everyone down 1 foreach (var toBumpDown in bulk.catalogue.CatalogueItems.Select(ci => ci.ExtractionInformation).Where(e => e != null && e.Order > orderToInsertDescriptionFieldAt)) { toBumpDown.Order++; toBumpDown.SaveToDatabase(); } var ciDescription = new CatalogueItem(CatalogueRepository, bulk.catalogue, "Sex_Desc"); var eiDescription = new ExtractionInformation(CatalogueRepository, ciDescription, lookupColumnInfos[2], lookupColumnInfos[2].Name); eiDescription.Alias = "Sex_Desc"; eiDescription.Order = orderToInsertDescriptionFieldAt + 1; eiDescription.ExtractionCategory = ExtractionCategory.Supplemental; eiDescription.SaveToDatabase(); bulk.catalogue.ClearAllInjections(); //check it worked QueryBuilder qb = new QueryBuilder(null, null); qb.AddColumnRange(bulk.catalogue.GetAllExtractionInformation(ExtractionCategory.Any)); //The query builder should be able to succesfully create SQL Console.WriteLine(qb.SQL); //there should be 2 tables involved in the query [z_sexLookup] and [BulkData] Assert.AreEqual(2, qb.TablesUsedInQuery.Count); //the query builder should have identified the lookup Assert.AreEqual(lookup, qb.GetDistinctRequiredLookups().Single()); //////////////////////////////////////////////////////////////////////////////////////The Actual Bit Being Tested//////////////////////////////////////////////////// var planManager = new ForwardEngineerANOCataloguePlanManager(RepositoryLocator, bulk.catalogue); planManager.TargetDatabase = db; //setup test rules for migrator CreateMigrationRules(planManager, bulk); //rules should pass checks Assert.DoesNotThrow(() => planManager.Check(new ThrowImmediatelyCheckNotifier())); var engine = new ForwardEngineerANOCatalogueEngine(RepositoryLocator, planManager); engine.Execute(); //////////////////////////////////////////////////////////////////////////////////////End The Actual Bit Being Tested//////////////////////////////////////////////////// var anoCatalogue = CatalogueRepository.GetAllObjects <Catalogue>().Single(c => c.Folder.Path.StartsWith("\\ano")); Assert.IsTrue(anoCatalogue.Exists()); //The new Catalogue should have the same number of ExtractionInformations var eiSource = bulk.catalogue.GetAllExtractionInformation(ExtractionCategory.Any).OrderBy(ei => ei.Order).ToArray(); var eiDestination = anoCatalogue.GetAllExtractionInformation(ExtractionCategory.Any).OrderBy(ei => ei.Order).ToArray(); Assert.AreEqual(eiSource.Length, eiDestination.Length, "Both the new and the ANO catalogue should have the same number of ExtractionInformations (extractable columns)"); for (int i = 0; i < eiSource.Length; i++) { Assert.AreEqual(eiSource[i].Order, eiDestination[i].Order, "ExtractionInformations in the source and destination Catalogue should have the same order"); Assert.AreEqual(eiSource[i].GetRuntimeName(), eiDestination[i].GetRuntimeName().Replace("ANO", ""), "ExtractionInformations in the source and destination Catalogue should have the same names (excluding ANO prefix)"); Assert.AreEqual(eiSource[i].ExtractionCategory, eiDestination[i].ExtractionCategory, "Old / New ANO ExtractionInformations did not match on ExtractionCategory"); Assert.AreEqual(eiSource[i].IsExtractionIdentifier, eiDestination[i].IsExtractionIdentifier, "Old / New ANO ExtractionInformations did not match on IsExtractionIdentifier"); Assert.AreEqual(eiSource[i].HashOnDataRelease, eiDestination[i].HashOnDataRelease, "Old / New ANO ExtractionInformations did not match on HashOnDataRelease"); Assert.AreEqual(eiSource[i].IsPrimaryKey, eiDestination[i].IsPrimaryKey, "Old / New ANO ExtractionInformations did not match on IsPrimaryKey"); } //check it worked QueryBuilder qbdestination = new QueryBuilder(null, null); qbdestination.AddColumnRange(anoCatalogue.GetAllExtractionInformation(ExtractionCategory.Any)); //The query builder should be able to succesfully create SQL Console.WriteLine(qbdestination.SQL); var anoEiPostcode = anoCatalogue.GetAllExtractionInformation(ExtractionCategory.Any).Single(ei => ei.GetRuntimeName().Equals("MyMutilatedColumn")); //The transform on postcode should have been refactored to the new table name and preserve the scalar function LEFT... Assert.AreEqual(string.Format("LEFT(10,{0}.[current_postcode])", anoEiPostcode.ColumnInfo.TableInfo.GetFullyQualifiedName()), anoEiPostcode.SelectSQL); var anoEiComboCol = anoCatalogue.GetAllExtractionInformation(ExtractionCategory.Any).Single(ei => ei.GetRuntimeName().Equals("ComboColumn")); //The transform on postcode should have been refactored to the new table name and preserve the scalar function LEFT... Assert.AreEqual(string.Format("{0}.[forename] + ' ' + {0}.[surname]", anoEiPostcode.ColumnInfo.TableInfo.GetFullyQualifiedName()), anoEiComboCol.SelectSQL); //there should be 2 tables involved in the query [z_sexLookup] and [BulkData] Assert.AreEqual(2, qbdestination.TablesUsedInQuery.Count); //the query builder should have identified the lookup but it should be the new one not the old one Assert.AreEqual(1, qbdestination.GetDistinctRequiredLookups().Count(), "New query builder for ano catalogue did not correctly identify that there was a Lookup"); Assert.AreNotEqual(lookup, qbdestination.GetDistinctRequiredLookups().Single(), "New query builder for ano catalogue identified the OLD Lookup!"); Assert.AreEqual(1, qbdestination.GetDistinctRequiredLookups().Single().GetSupplementalJoins().Count(), "The new Lookup did not have the composite join key (sex/hb_extract)"); Assert.AreNotEqual(compositeLookup, qbdestination.GetDistinctRequiredLookups().Single().GetSupplementalJoins(), "New query builder for ano catalogue identified the OLD LookupCompositeJoinInfo!"); db.Drop(); var exports = CatalogueRepository.GetAllObjects <ObjectExport>().Count(); var imports = CatalogueRepository.GetAllObjects <ObjectImport>().Count(); Assert.AreEqual(exports, imports); Assert.IsTrue(exports > 0); }
private ExtractDatasetCommand SetupExtractDatasetCommand(string testTableName, string[] pkExtractionColumns, string[] pkColumnInfos = null, bool withLookup = false, bool withJoin = false) { DataTable dt = new DataTable(); dt.Columns.Add("PrivateID"); dt.Columns.Add("Name"); dt.Columns.Add("DateOfBirth"); if (pkColumnInfos != null) { dt.PrimaryKey = dt.Columns.Cast <DataColumn>().Where(col => pkColumnInfos.Contains(col.ColumnName)).ToArray(); } dt.Rows.Add(new object[] { _cohortKeysGenerated.Keys.First(), "Dave", "2001-01-01" }); var tbl = DiscoveredDatabaseICanCreateRandomTablesIn.CreateTable(testTableName, dt, new[] { new DatabaseColumnRequest("Name", new DatabaseTypeRequest(typeof(string), 50)) }); TableInfo tableInfo; ColumnInfo[] columnInfos; CatalogueItem[] cataItems; ExtractionInformation[] extractionInformations; _catalogue = Import(tbl, out tableInfo, out columnInfos, out cataItems, out extractionInformations); ExtractionInformation privateID = extractionInformations.First(e => e.GetRuntimeName().Equals("PrivateID")); privateID.IsExtractionIdentifier = true; privateID.SaveToDatabase(); if (withLookup) { SetupLookupTable(); } if (withJoin) { SetupJoin(); } _catalogue.ClearAllInjections(); extractionInformations = _catalogue.GetAllExtractionInformation(ExtractionCategory.Any); foreach (var pkExtractionColumn in pkExtractionColumns) { ExtractionInformation column = extractionInformations.First(e => e.GetRuntimeName().Equals(pkExtractionColumn)); column.IsPrimaryKey = true; column.SaveToDatabase(); } ExtractionConfiguration configuration; IExtractableDataSet extractableDataSet; Project project; SetupDataExport(testTableName, _catalogue, out configuration, out extractableDataSet, out project); configuration.Cohort_ID = _extractableCohort.ID; configuration.SaveToDatabase(); return(new ExtractDatasetCommand(configuration, new ExtractableDatasetBundle(extractableDataSet))); }
public void SetupExampleTables() { string sql = @"CREATE TABLE [dbo].[Tests]( [chi] [varchar](10) NULL, [Date] [datetime] NULL, [hb_extract] [varchar](1) NULL, [TestId] [int] NOT NULL, CONSTRAINT [PK_Tests] PRIMARY KEY CLUSTERED ( [TestId] ASC ) ) GO CREATE TABLE [dbo].[Results]( [TestId] [int] NOT NULL, [Measure] [varchar](10) NOT NULL, [Value] [int] NULL, CONSTRAINT [PK_Results] PRIMARY KEY CLUSTERED ( [TestId] ASC, [Measure] ASC ) ) GO ALTER TABLE [dbo].[Results] WITH CHECK ADD CONSTRAINT [FK_Results_Tests] FOREIGN KEY([TestId]) REFERENCES [dbo].[Tests] ([TestId]) GO"; var server = DiscoveredDatabaseICanCreateRandomTablesIn.Server; using (var con = server.GetConnection()) { con.Open(); UsefulStuff.ExecuteBatchNonQuery(sql, con); } var importer1 = new TableInfoImporter(CatalogueRepository, DiscoveredDatabaseICanCreateRandomTablesIn.ExpectTable("Tests")); var importer2 = new TableInfoImporter(CatalogueRepository, DiscoveredDatabaseICanCreateRandomTablesIn.ExpectTable("Results")); importer1.DoImport(out t1, out c1); importer2.DoImport(out t2, out c2); var engineer1 = new ForwardEngineerCatalogue(t1, c1, true); var engineer2 = new ForwardEngineerCatalogue(t2, c2, true); engineer1.ExecuteForwardEngineering(out cata1, out cataItems1, out eis1); engineer2.ExecuteForwardEngineering(out cata2, out cataItems2, out eis2); new JoinInfo(CatalogueRepository, c1.Single(e => e.GetRuntimeName().Equals("TestId")), c2.Single(e => e.GetRuntimeName().Equals("TestId")), ExtractionJoinType.Left, null); _anoTable = new ANOTable(CatalogueRepository, ANOStore_ExternalDatabaseServer, "ANOTes", "T"); _anoTable.NumberOfCharactersToUseInAnonymousRepresentation = 10; _anoTable.SaveToDatabase(); _anoTable.PushToANOServerAsNewTable("int", new ThrowImmediatelyCheckNotifier()); _comboCata = new Catalogue(CatalogueRepository, "Combo Catalogue"); //pk var ciTestId = new CatalogueItem(CatalogueRepository, _comboCata, "TestId"); var colTestId = c1.Single(c => c.GetRuntimeName().Equals("TestId")); ciTestId.ColumnInfo_ID = colTestId.ID; ciTestId.SaveToDatabase(); var eiTestId = new ExtractionInformation(CatalogueRepository, ciTestId, colTestId, colTestId.Name); //Measure var ciMeasure = new CatalogueItem(CatalogueRepository, _comboCata, "Measuree"); var colMeasure = c2.Single(c => c.GetRuntimeName().Equals("Measure")); ciMeasure.ColumnInfo_ID = colMeasure.ID; ciMeasure.SaveToDatabase(); var eiMeasure = new ExtractionInformation(CatalogueRepository, ciMeasure, colMeasure, colMeasure.Name); //Date var ciDate = new CatalogueItem(CatalogueRepository, _comboCata, "Dat"); var colDate = c1.Single(c => c.GetRuntimeName().Equals("Date")); ciDate.ColumnInfo_ID = colDate.ID; ciDate.SaveToDatabase(); var eiDate = new ExtractionInformation(CatalogueRepository, ciDate, colDate, colDate.Name); var destDatabaseName = TestDatabaseNames.GetConsistentName("ANOMigrationTwoTableTests"); _destinationDatabase = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(destDatabaseName); _destinationDatabase.Create(true); }
public void EndToEndTest() { var cohortDatabaseNameWillBe = TestDatabaseNames.GetConsistentName("TbvCohort"); _discoveredCohortDatabase = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(cohortDatabaseNameWillBe); //cleanup if (_discoveredCohortDatabase.Exists()) { _discoveredCohortDatabase.Drop(); } //create a normal catalogue CreateANormalCatalogue(); //create a cohort database using wizard CreateNewCohortDatabaseWizard cohortDatabaseWizard = new CreateNewCohortDatabaseWizard(_discoveredCohortDatabase, CatalogueRepository, DataExportRepository, false); _externalCohortTable = cohortDatabaseWizard.CreateDatabase( new PrivateIdentifierPrototype(_nonTvfExtractionIdentifier) , new ThrowImmediatelyCheckNotifier()); //create a table valued function CreateTvfCatalogue(cohortDatabaseNameWillBe); //Test 1 TestThatQueryBuilderWithoutParametersBeingSetThrowsQueryBuildingException(); PopulateCohortDatabaseWithRecordsFromNonTvfCatalogue(); //Test 2 TestWithParameterValueThatRowsAreReturned(); //Test 3 TestUsingTvfForAggregates(); //Test 4 TestAddingTvfToCIC(); //Test 5 TestDataExportOfTvf(); //tear down DataExportRepository.GetAllObjects <ExtractableCohort>().Single().DeleteInDatabase(); _externalCohortTable.DeleteInDatabase(); DiscoveredDatabaseICanCreateRandomTablesIn.ExpectTable("NonTVFTable").Drop(); DiscoveredDatabaseICanCreateRandomTablesIn.ExpectTableValuedFunction("GetTopXRandom").Drop(); //delete global parameter ((AnyTableSqlParameter)_aggregate.GetAllParameters().Single()).DeleteInDatabase(); //delete aggregate _aggregate.DeleteInDatabase(); ((AnyTableSqlParameter)_cicAggregate.GetAllParameters().Single()).DeleteInDatabase(); //delete aggregate _cicAggregate.DeleteInDatabase(); //get rid of the cohort identification configuration _cic.DeleteInDatabase(); _pipe.DeleteInDatabase(); //get rid of the cohort database _discoveredCohortDatabase.Drop(); _nonTvfCatalogue.DeleteInDatabase(); _nonTvfTableInfo.DeleteInDatabase(); _tvfCatalogue.DeleteInDatabase(); _tvfTableInfo.DeleteInDatabase(); }
public void TestLookupCommand(LookupTestCase testCase) { DataTable dt = new DataTable(); dt.Columns.Add("ID"); dt.Columns.Add("SendingLocation"); dt.Columns.Add("DischargeLocation"); dt.Columns.Add("Country"); var maintbl = DiscoveredDatabaseICanCreateRandomTablesIn.CreateTable("MainDataset", dt); var mainCata = Import(maintbl); DataTable dtLookup = new DataTable(); dtLookup.Columns.Add("LocationCode"); dtLookup.Columns.Add("Line1"); dtLookup.Columns.Add("Line2"); dtLookup.Columns.Add("Postcode"); dtLookup.Columns.Add("Country"); var lookuptbl = DiscoveredDatabaseICanCreateRandomTablesIn.CreateTable("Lookup", dtLookup); var lookupCata = Import(lookuptbl); ExtractionInformation fkEi = mainCata.GetAllExtractionInformation(ExtractionCategory.Any).Single(n => n.GetRuntimeName() == "SendingLocation"); ColumnInfo fk = mainCata.GetTableInfoList(false).Single().ColumnInfos.Single(n => n.GetRuntimeName() == "SendingLocation"); ColumnInfo pk = lookupCata.GetTableInfoList(false).Single().ColumnInfos.Single(n => n.GetRuntimeName() == "LocationCode"); ColumnInfo descLine1 = lookupCata.GetTableInfoList(false).Single().ColumnInfos.Single(n => n.GetRuntimeName() == "Line1"); ColumnInfo descLine2 = lookupCata.GetTableInfoList(false).Single().ColumnInfos.Single(n => n.GetRuntimeName() == "Line2"); ExecuteCommandCreateLookup cmd = null; var sqlBefore = GetSql(mainCata); switch (testCase) { case LookupTestCase.SingleKeySingleDescriptionNoVirtualColumn: cmd = new ExecuteCommandCreateLookup(CatalogueRepository, fkEi, descLine1, pk, null, false); cmd.Execute(); //sql should not have changed because we didn't create an new ExtractionInformation virtual column Assert.AreEqual(sqlBefore, GetSql(mainCata)); break; case LookupTestCase.SingleKeySingleDescription: cmd = new ExecuteCommandCreateLookup(CatalogueRepository, fkEi, descLine1, pk, null, true); cmd.Execute(); //should have the lookup join and the virtual column _Desc var sqlAfter = GetSql(mainCata); Assert.IsTrue(sqlAfter.Contains("JOIN")); Assert.IsTrue(sqlAfter.Contains("SendingLocation_Desc")); break; default: throw new ArgumentOutOfRangeException("testCase"); } foreach (var d in CatalogueRepository.GetAllObjects <Lookup>()) { d.DeleteInDatabase(); } foreach (var d in CatalogueRepository.GetAllObjects <LookupCompositeJoinInfo>()) { d.DeleteInDatabase(); } foreach (var d in CatalogueRepository.GetAllObjects <TableInfo>()) { d.DeleteInDatabase(); } foreach (var d in CatalogueRepository.GetAllObjects <Catalogue>()) { d.DeleteInDatabase(); } maintbl.Drop(); lookuptbl.Drop(); }
public void KVPAttacherTest_Attach(KVPAttacherTestCase testCase) { bool hasPk = testCase != KVPAttacherTestCase.OneFileWithoutPrimaryKey; var attacher = new KVPAttacher(); var tbl = DiscoveredDatabaseICanCreateRandomTablesIn.ExpectTable("KVPTestTable"); var workingDir = new DirectoryInfo(TestContext.CurrentContext.TestDirectory); var parentDir = workingDir.CreateSubdirectory("KVPAttacherTestProjectDirectory"); var projectDir = LoadDirectory.CreateDirectoryStructure(parentDir, "KVPAttacherTest", true); string filepk = "kvpTestFilePK.csv"; string filepk2 = "kvpTestFilePK2.csv"; string fileNoPk = "kvpTestFile_NoPK.csv"; if (testCase == KVPAttacherTestCase.OneFileWithPrimaryKey || testCase == KVPAttacherTestCase.TwoFilesWithPrimaryKey) { CopyToBin(projectDir, filepk); } if (testCase == KVPAttacherTestCase.TwoFilesWithPrimaryKey) { CopyToBin(projectDir, filepk2); } if (testCase == KVPAttacherTestCase.OneFileWithoutPrimaryKey) { CopyToBin(projectDir, fileNoPk); } if (tbl.Exists()) { tbl.Drop(); } //Create destination data table on server (where the data will ultimately end up) using (var con = (SqlConnection)tbl.Database.Server.GetConnection()) { con.Open(); string sql = hasPk ? "CREATE TABLE KVPTestTable (Person varchar(100), Test varchar(50), Result int)" : "CREATE TABLE KVPTestTable (Test varchar(50), Result int)"; new SqlCommand(sql, con).ExecuteNonQuery(); } var remnantPipeline = CatalogueRepository.GetAllObjects <Pipeline>("WHERE Name='KVPAttacherTestPipeline'").SingleOrDefault(); if (remnantPipeline != null) { remnantPipeline.DeleteInDatabase(); } //Setup the Pipeline var p = new Pipeline(CatalogueRepository, "KVPAttacherTestPipeline"); //With a CSV source var flatFileLoad = new PipelineComponent(CatalogueRepository, p, typeof(DelimitedFlatFileDataFlowSource), 0, "Data Flow Source"); //followed by a Transpose that turns columns to rows (see how the test file grows right with new records instead of down, this is common in KVP input files but not always) var transpose = new PipelineComponent(CatalogueRepository, p, typeof(Transposer), 1, "Transposer"); var saneHeaders = transpose.CreateArgumentsForClassIfNotExists(typeof(Transposer)).Single(a => a.Name.Equals("MakeHeaderNamesSane")); saneHeaders.SetValue(false); saneHeaders.SaveToDatabase(); //set the source separator to comma flatFileLoad.CreateArgumentsForClassIfNotExists(typeof(DelimitedFlatFileDataFlowSource)); var arg = flatFileLoad.PipelineComponentArguments.Single(a => a.Name.Equals("Separator")); arg.SetValue(","); arg.SaveToDatabase(); arg = flatFileLoad.PipelineComponentArguments.Single(a => a.Name.Equals("MakeHeaderNamesSane")); arg.SetValue(false); arg.SaveToDatabase(); p.SourcePipelineComponent_ID = flatFileLoad.ID; p.SaveToDatabase(); try { attacher.PipelineForReadingFromFlatFile = p; attacher.TableName = "KVPTestTable"; switch (testCase) { case KVPAttacherTestCase.OneFileWithPrimaryKey: attacher.FilePattern = filepk; break; case KVPAttacherTestCase.OneFileWithoutPrimaryKey: attacher.FilePattern = fileNoPk; break; case KVPAttacherTestCase.TwoFilesWithPrimaryKey: attacher.FilePattern = "kvpTestFilePK*.*"; break; default: throw new ArgumentOutOfRangeException("testCase"); } if (hasPk) { attacher.PrimaryKeyColumns = "Person"; } attacher.TargetDataTableKeyColumnName = "Test"; attacher.TargetDataTableValueColumnName = "Result"; attacher.Initialize(projectDir, DiscoveredDatabaseICanCreateRandomTablesIn); attacher.Attach(new ThrowImmediatelyDataLoadJob(), new GracefulCancellationToken()); //test file contains 291 values belonging to 3 different people int expectedRows = 291; //if we loaded two files (or should have done) then add the number of values in that file (54) if (testCase == KVPAttacherTestCase.TwoFilesWithPrimaryKey) { expectedRows += 54; } Assert.AreEqual(expectedRows, tbl.GetRowCount()); } finally { p.DeleteInDatabase(); tbl.Drop(); } }