protected override void OneTimeSetUp() { base.OneTimeSetUp(); Console.WriteLine("Setting SetUp bulk test data"); _bulkData = new BulkTestsData(RepositoryLocator.CatalogueRepository, GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer)); _bulkData.SetupTestData(); Console.WriteLine("Importing to Catalogue"); var tbl = _bulkData.Table; TableInfoImporter importer = new TableInfoImporter(CatalogueRepository, tbl); importer.DoImport(out tableInfoCreated, out columnInfosCreated); Console.WriteLine("Imported TableInfo " + tableInfoCreated); Console.WriteLine("Imported ColumnInfos " + string.Join(",", columnInfosCreated.Select(c => c.GetRuntimeName()))); Assert.NotNull(tableInfoCreated); ColumnInfo chi = columnInfosCreated.Single(c => c.GetRuntimeName().Equals("chi")); Console.WriteLine("CHI is primary key? (expecting true):" + chi.IsPrimaryKey); Assert.IsTrue(chi.IsPrimaryKey); tableInfoCreated.ColumnInfos.Single(c => c.GetRuntimeName().Equals("surname")).DeleteInDatabase(); tableInfoCreated.ColumnInfos.Single(c => c.GetRuntimeName().Equals("forename")).DeleteInDatabase(); tableInfoCreated.ClearAllInjections(); }
public void SetupBulkTestData() { Console.WriteLine("Cleaning up remnants"); Cleanup(); Console.WriteLine("Setting up bulk test data"); _bulkData = new BulkTestsData(RepositoryLocator.CatalogueRepository, DiscoveredDatabaseICanCreateRandomTablesIn); _bulkData.SetupTestData(); Console.WriteLine("Importing to Catalogue"); var tbl = DiscoveredDatabaseICanCreateRandomTablesIn.ExpectTable(BulkTestsData.BulkDataTable); TableInfoImporter importer = new TableInfoImporter(CatalogueRepository, tbl); importer.DoImport(out tableInfoCreated, out columnInfosCreated); Console.WriteLine("Imported TableInfo " + tableInfoCreated); Console.WriteLine("Imported ColumnInfos " + string.Join(",", columnInfosCreated.Select(c => c.GetRuntimeName()))); Assert.NotNull(tableInfoCreated); ColumnInfo chi = columnInfosCreated.Single(c => c.GetRuntimeName().Equals("chi")); Console.WriteLine("CHI is primary key? (expecting true):" + chi.IsPrimaryKey); Assert.IsTrue(chi.IsPrimaryKey); tableInfoCreated.ColumnInfos.Single(c => c.GetRuntimeName().Equals("surname")).DeleteInDatabase(); tableInfoCreated.ColumnInfos.Single(c => c.GetRuntimeName().Equals("forename")).DeleteInDatabase(); tableInfoCreated.ClearAllInjections(); }
private Catalogue SetupATestCatalogue(SqlConnectionStringBuilder builder, string database, string table) { //create a new catalogue for test data (in the test data catalogue) var cat = new Catalogue(CatalogueRepository, "DeleteMe"); TableInfoImporter importer = new TableInfoImporter(CatalogueRepository, builder.DataSource, database, table, DatabaseType.MicrosoftSQLServer, builder.UserID, builder.Password); importer.DoImport(out var tableInfo, out var columnInfos); toCleanUp.Push(cat); //push the credentials if there are any var creds = (DataAccessCredentials)tableInfo.GetCredentialsIfExists(DataAccessContext.InternalDataProcessing); if (creds != null) toCleanUp.Push(creds); //and the TableInfo toCleanUp.Push(tableInfo); //for each column we will add a new one to the foreach (ColumnInfo col in columnInfos) { //create it with the same name var cataItem = new CatalogueItem(CatalogueRepository, cat, col.Name.Substring(col.Name.LastIndexOf(".") + 1).Trim('[', ']', '`')); toCleanUp.Push(cataItem); cataItem.SetColumnInfo(col); toCleanUp.Push(col); } return cat; }
private TableInfo Import(DiscoveredTable tbl, LoadMetadata lmd, LogManager logManager) { logManager.CreateNewLoggingTaskIfNotExists(lmd.Name); //import TableInfos var importer = new TableInfoImporter(CatalogueRepository, tbl); TableInfo ti; ColumnInfo[] cis; importer.DoImport(out ti, out cis); //create Catalogue var forwardEngineer = new ForwardEngineerCatalogue(ti, cis, true); Catalogue cata; CatalogueItem[] cataItems; ExtractionInformation[] eis; forwardEngineer.ExecuteForwardEngineering(out cata, out cataItems, out eis); //make the catalogue use the load configuration cata.LoadMetadata_ID = lmd.ID; cata.LoggingDataTask = lmd.Name; Assert.IsNotNull(cata.LiveLoggingServer_ID); //catalogue should have one of these because of system defaults cata.SaveToDatabase(); return(ti); }
private void AdjustImporter() { var cataRepo = Activator.RepositoryLocator.CatalogueRepository; try { DiscoveredTable tbl = serverDatabaseTableSelector1.GetDiscoveredTable(); if (tbl == null) { btnImport.Enabled = false; return; } //if it isn't a table valued function if (tbl is DiscoveredTableValuedFunction) { Importer = new TableValuedFunctionImporter(cataRepo, (DiscoveredTableValuedFunction)tbl, (DataAccessContext)ddContext.SelectedValue); } else { Importer = new TableInfoImporter(cataRepo, tbl, (DataAccessContext)ddContext.SelectedValue); } btnImport.Enabled = true; } catch (Exception exception) { ExceptionViewer.Show(exception); } }
private void CreateANormalCatalogue() { var svr = _database.Server; using (var con = svr.GetConnection()) { con.Open(); svr.GetCommand("CREATE TABLE NonTVFTable ( chi varchar(10))", con).ExecuteNonQuery(); svr.GetCommand("INSERT INTO NonTVFTable VALUES ('0101010101')", con).ExecuteNonQuery(); svr.GetCommand("INSERT INTO NonTVFTable VALUES ('0202020202')", con).ExecuteNonQuery(); svr.GetCommand("INSERT INTO NonTVFTable VALUES ('0303030303')", con).ExecuteNonQuery(); } var importer = new TableInfoImporter(CatalogueRepository, svr.Name, _database.GetRuntimeName(), "NonTVFTable", DatabaseType.MicrosoftSQLServer, _database.Server.ExplicitUsernameIfAny, _database.Server.ExplicitPasswordIfAny); importer.DoImport(out var tbl, out var cols); var engineer = new ForwardEngineerCatalogue(tbl, cols, true); engineer.ExecuteForwardEngineering(out var cata, out var cis, out var eis); _nonTvfExtractionIdentifier = eis.Single(); _nonTvfExtractionIdentifier.IsExtractionIdentifier = true; _nonTvfExtractionIdentifier.SaveToDatabase(); _nonTvfCatalogue = cata; _nonTvfTableInfo = tbl; }
protected override void OneTimeSetUp() { base.OneTimeSetUp(); var tbl = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer).ExpectTable("ReferentialIntegrityConstraintTests"); if (tbl.Exists()) { tbl.Drop(); } var server = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer).Server; using (var con = server.GetConnection()) { con.Open(); server.GetCommand("CREATE TABLE ReferentialIntegrityConstraintTests(MyValue int)", con).ExecuteNonQuery(); server.GetCommand("INSERT INTO ReferentialIntegrityConstraintTests (MyValue) VALUES (5)", con).ExecuteNonQuery(); } TableInfoImporter importer = new TableInfoImporter(CatalogueRepository, tbl); importer.DoImport(out _tableInfo, out _columnInfo); _constraint = new ReferentialIntegrityConstraint(CatalogueRepository); _constraint.OtherColumnInfo = _columnInfo.Single(); }
private ITableInfo AddTableToCatalogue(string databaseName, string tableName, string pkName, out ColumnInfo[] ciList, bool createCatalogue = false) { var expectedTable = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(databaseName).ExpectTable(tableName); var resultsImporter = new TableInfoImporter(CatalogueRepository, expectedTable); resultsImporter.DoImport(out var ti, out ciList); var pkResult = ciList.Single(info => info.GetRuntimeName().Equals(pkName)); pkResult.IsPrimaryKey = true; pkResult.SaveToDatabase(); var forwardEngineer = new ForwardEngineerCatalogue(ti, ciList); if (createCatalogue) { CatalogueItem[] cataItems; ExtractionInformation[] extractionInformations; forwardEngineer.ExecuteForwardEngineering(out _catalogue, out cataItems, out extractionInformations); } else { forwardEngineer.ExecuteForwardEngineering(_catalogue); } return(ti); }
public override void Execute() { base.Execute(); var tbl = _importTable ?? BasicActivator.SelectTable(false, "Table to import"); if (tbl == null) { return; } var importer = new TableInfoImporter(BasicActivator.RepositoryLocator.CatalogueRepository, tbl); importer.DoImport(out var ti, out _); var c = BasicActivator.CreateAndConfigureCatalogue(ti, null, "Existing table", ProjectSpecific, TargetFolder); if (c == null || !c.Exists()) { if (BasicActivator.IsInteractive && BasicActivator.YesNo("You have cancelled Catalogue creation. Do you want to delete the TableInfo metadata reference (this will not affect any database tables)?", "Delete TableInfo", out bool chosen) && chosen) { ti.DeleteInDatabase(); } } }
public void Setup() { var tbl = DiscoveredDatabaseICanCreateRandomTablesIn.ExpectTable("ReferentialIntegrityConstraintTests"); if (tbl.Exists()) { tbl.Drop(); } var server = DiscoveredDatabaseICanCreateRandomTablesIn.Server; using (var con = server.GetConnection()) { con.Open(); server.GetCommand("CREATE TABLE ReferentialIntegrityConstraintTests(MyValue int)", con).ExecuteNonQuery(); server.GetCommand("INSERT INTO ReferentialIntegrityConstraintTests (MyValue) VALUES (5)", con).ExecuteNonQuery(); } TableInfoImporter importer = new TableInfoImporter(CatalogueRepository, tbl); importer.DoImport(out _tableInfo, out _columnInfo); _constraint = new ReferentialIntegrityConstraint(CatalogueRepository); _constraint.OtherColumnInfo = _columnInfo.Single(); }
private TableInfo ImportTableInfo(DiscoveredTable tbl) { var importer = new TableInfoImporter(_repos.CatalogueRepository, tbl); importer.DoImport(out TableInfo ti, out _); return(ti); }
public void TestImportingATable(DatabaseType dbType) { DataTable dt = new DataTable(); dt.Columns.Add("Do"); dt.Columns.Add("Ray"); dt.Columns.Add("Me"); dt.Columns.Add("Fa"); dt.Columns.Add("So"); var db = GetCleanedServer(dbType); var tbl = db.CreateTable("OmgTables", dt); var memoryRepository = new MemoryCatalogueRepository(CatalogueRepository.GetServerDefaults()); var importer1 = new TableInfoImporter(memoryRepository, tbl, DataAccessContext.Any); TableInfo memTableInfo; ColumnInfo[] memColumnInfos; Catalogue memCatalogue; CatalogueItem[] memCatalogueItems; ExtractionInformation[] memExtractionInformations; importer1.DoImport(out memTableInfo, out memColumnInfos); var forwardEngineer1 = new ForwardEngineerCatalogue(memTableInfo, memColumnInfos); forwardEngineer1.ExecuteForwardEngineering(out memCatalogue, out memCatalogueItems, out memExtractionInformations); TableInfo dbTableInfo; ColumnInfo[] dbColumnInfos; Catalogue dbCatalogue; CatalogueItem[] dbCatalogueItems; ExtractionInformation[] dbExtractionInformations; var importerdb = new TableInfoImporter(CatalogueRepository, tbl, DataAccessContext.Any); importerdb.DoImport(out dbTableInfo, out dbColumnInfos); var forwardEngineer2 = new ForwardEngineerCatalogue(dbTableInfo, dbColumnInfos); forwardEngineer2.ExecuteForwardEngineering(out dbCatalogue, out dbCatalogueItems, out dbExtractionInformations); UnitTests.AssertAreEqual(memCatalogue, dbCatalogue); UnitTests.AssertAreEqual(memTableInfo, dbTableInfo); UnitTests.AssertAreEqual(memCatalogue.CatalogueItems, dbCatalogue.CatalogueItems); UnitTests.AssertAreEqual(memCatalogue.GetAllExtractionInformation(ExtractionCategory.Any), dbCatalogue.GetAllExtractionInformation(ExtractionCategory.Any)); UnitTests.AssertAreEqual(memCatalogue.CatalogueItems.Select(ci => ci.ColumnInfo), dbCatalogue.CatalogueItems.Select(ci => ci.ColumnInfo)); }
public void TestDistincter_NoDuplicates(DatabaseType type) { var db = GetCleanedServer(type, "TestCoalescer", true); int batchCount = 1000; DataTable dt = new DataTable("TestCoalescer_RampantNullness"); dt.Columns.Add("pk"); dt.Columns.Add("f1"); dt.Columns.Add("f2"); dt.Columns.Add("f3"); dt.Columns.Add("f4"); Random r = new Random(123); for (int i = 0; i < batchCount; i++) { int randInt = r.Next(int.MaxValue); dt.Rows.Add(new object[] { randInt, randInt, randInt, randInt, randInt }); dt.Rows.Add(new object[] { randInt, randInt, randInt, randInt, randInt + 1 }); } var tbl = db.CreateTable(dt.TableName, dt); var importer = new TableInfoImporter(CatalogueRepository, tbl); TableInfo tableInfo; ColumnInfo[] colInfos; importer.DoImport(out tableInfo, out colInfos); //lie about what hte primary key is because this component is designed to run in the RAW environment and we are simulating a LIVE TableInfo (correctly) var pk = colInfos.Single(c => c.GetRuntimeName().Equals("pk")); pk.IsPrimaryKey = true; pk.SaveToDatabase(); var rowsBefore = tbl.GetRowCount(); var distincter = new Distincter(); distincter.TableRegexPattern = new Regex(".*"); distincter.Initialize(db, LoadStage.AdjustRaw); var job = Mock.Of <IDataLoadJob>(p => p.RegularTablesToLoad == new List <ITableInfo>(new[] { tableInfo }) && p.Configuration == new HICDatabaseConfiguration(db.Server, null, null, null)); distincter.Mutilate(job); var rowsAfter = tbl.GetRowCount(); Assert.AreEqual(rowsBefore, rowsAfter); db.Drop(); }
protected ICatalogue Import(DiscoveredTable tbl, out ITableInfo tableInfoCreated, out ColumnInfo[] columnInfosCreated, out CatalogueItem[] catalogueItems, out ExtractionInformation[] extractionInformations) { var importer = new TableInfoImporter(CatalogueRepository, tbl); importer.DoImport(out tableInfoCreated, out columnInfosCreated); var forwardEngineer = new ForwardEngineerCatalogue(tableInfoCreated, columnInfosCreated, true); forwardEngineer.ExecuteForwardEngineering(out var catalogue, out catalogueItems, out extractionInformations); return(catalogue); }
void ui_PipelineExecutionFinishedsuccessfully(object sender, PipelineEngineEventArgs args) { if (!_table.Exists()) { throw new Exception("Pipeline execute succesfully but the expected table '" + _table + "' did not exist"); } var importer = new TableInfoImporter(Activator.RepositoryLocator.CatalogueRepository, _table); var createCatalogue = new ConfigureCatalogueExtractabilityUI(Activator, importer, "Execution of '" + _aggregateConfiguration + "' (AggregateConfiguration ID =" + _aggregateConfiguration.ID + ")", _projectSpecific); createCatalogue.ShowDialog(); }
void ui_PipelineExecutionFinishedsuccessfully(object sender, PipelineEngineEventArgs args) { if (!_table.Exists()) { throw new Exception("Pipeline execute succesfully but the expected table '" + _table + "' did not exist"); } var importer = new TableInfoImporter(BasicActivator.RepositoryLocator.CatalogueRepository, _table); importer.DoImport(out var ti, out _); BasicActivator.CreateAndConfigureCatalogue(ti, null, "Execution of '" + _aggregateConfiguration + "' (AggregateConfiguration ID =" + _aggregateConfiguration.ID + ")", ProjectSpecific, TargetFolder); }
public void CreateDataset() { _server = DiscoveredDatabaseICanCreateRandomTablesIn.Server; using (var con = _server.GetConnection()) { con.Open(); _server.GetCommand("CREATE TABLE " + TABLE_NAME + "(Name varchar(10), Address varchar(500))", con).ExecuteNonQuery(); } var tbl = DiscoveredDatabaseICanCreateRandomTablesIn.ExpectTable("TableInfoSynchronizerTests"); TableInfoImporter importer = new TableInfoImporter(CatalogueRepository, tbl); importer.DoImport(out tableInfoCreated, out columnInfosCreated); }
/// <summary> /// Creates Rdmp metadata objects (<see cref="catalogue"/>, <see cref="tableInfo"/> etc) which point to the <see cref="BulkDataTable"/> /// </summary> /// <returns></returns> public Catalogue ImportAsCatalogue() { TableInfoImporter f = new TableInfoImporter(_repository, BulkDataDatabase.ExpectTable(BulkDataTable)); f.DoImport(out tableInfo, out columnInfos); ForwardEngineerCatalogue forwardEngineer = new ForwardEngineerCatalogue(tableInfo, columnInfos, true); forwardEngineer.ExecuteForwardEngineering(out catalogue, out catalogueItems, out extractionInformations); var chi = extractionInformations.Single(e => e.GetRuntimeName().Equals("chi")); chi.IsExtractionIdentifier = true; chi.SaveToDatabase(); return(catalogue); }
protected override void SetUp() { base.SetUp(); _database = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer); _server = _database.Server; using (var con = _server.GetConnection()) { con.Open(); _server.GetCommand("CREATE TABLE " + TABLE_NAME + "(Name varchar(10), Address varchar(500))", con).ExecuteNonQuery(); } var tbl = _database.ExpectTable("TableInfoSynchronizerTests"); TableInfoImporter importer = new TableInfoImporter(CatalogueRepository, tbl); importer.DoImport(out tableInfoCreated, out columnInfosCreated); }
public override void Execute() { base.Execute(); var tableCreator = new ImagingTableCreation(_expectedTable.Database.Server.GetQuerySyntaxHelper()); tableCreator.CreateTable(_expectedTable, _tableTemplate); var importer = new TableInfoImporter(_repositoryLocator.CatalogueRepository, _expectedTable); TableInfo tis; ColumnInfo[] cis; importer.DoImport(out tis, out cis); var engineer = new ForwardEngineerCatalogue(tis, cis, true); Catalogue cata; CatalogueItem[] cataItems; ExtractionInformation[] eis; engineer.ExecuteForwardEngineering(out cata, out cataItems, out eis); var patientIdentifier = eis.SingleOrDefault(e => e.GetRuntimeName().Equals("PatientID")); if (patientIdentifier != null) { patientIdentifier.IsExtractionIdentifier = true; patientIdentifier.SaveToDatabase(); } var seriesEi = eis.SingleOrDefault(e => e.GetRuntimeName().Equals("SeriesInstanceUID")); if (seriesEi != null) { seriesEi.IsExtractionIdentifier = true; seriesEi.SaveToDatabase(); } //make it extractable new ExtractableDataSet(_repositoryLocator.DataExportRepository, cata); NewCatalogueCreated = cata; }
public override void Execute() { base.Execute(); Catalogue c = null; var importer = new TableInfoImporter(BasicActivator.RepositoryLocator.CatalogueRepository, _table); importer.DoImport(out TableInfo ti, out ColumnInfo[] cis); BasicActivator.Show($"Successfully imported new TableInfo { ti.Name} with ID {ti.ID}"); if (_createCatalogue) { var forwardEngineer = new ForwardEngineerCatalogue(ti, cis, true); forwardEngineer.ExecuteForwardEngineering(out c, out _, out _); BasicActivator.Show($"Successfully imported new Catalogue { c.Name} with ID {c.ID}"); } Publish((DatabaseEntity)c ?? ti); }
public override void Execute() { base.Execute(); ICatalogue c = null; ITableInfoImporter importer; DiscoveredTable t; t = _table ?? SelectTable(false, "Select table to import"); if (t == null) { return; } //if it isn't a table valued function if (t is DiscoveredTableValuedFunction) { importer = new TableValuedFunctionImporter(BasicActivator.RepositoryLocator.CatalogueRepository, (DiscoveredTableValuedFunction)t); } else { importer = new TableInfoImporter(BasicActivator.RepositoryLocator.CatalogueRepository, t); } importer.DoImport(out var ti, out ColumnInfo[] cis); BasicActivator.Show($"Successfully imported new TableInfo { ti.Name} with ID {ti.ID}"); if (_createCatalogue) { var forwardEngineer = new ForwardEngineerCatalogue(ti, cis, true); forwardEngineer.ExecuteForwardEngineering(out c, out _, out _); BasicActivator.Show($"Successfully imported new Catalogue { c.Name} with ID {c.ID}"); } Publish((IMapsDirectlyToDatabaseTable)c ?? ti); }
private void CreateANormalCatalogue() { var svr = DiscoveredDatabaseICanCreateRandomTablesIn.Server; using (var con = svr.GetConnection()) { con.Open(); svr.GetCommand("CREATE TABLE NonTVFTable ( chi varchar(10))", con).ExecuteNonQuery(); svr.GetCommand("INSERT INTO NonTVFTable VALUES ('0101010101')", con).ExecuteNonQuery(); svr.GetCommand("INSERT INTO NonTVFTable VALUES ('0202020202')", con).ExecuteNonQuery(); svr.GetCommand("INSERT INTO NonTVFTable VALUES ('0303030303')", con).ExecuteNonQuery(); } var importer = new TableInfoImporter(CatalogueRepository, svr.Name, DiscoveredDatabaseICanCreateRandomTablesIn.GetRuntimeName(), "NonTVFTable", DatabaseType.MicrosoftSQLServer); TableInfo tbl; ColumnInfo[] cols; importer.DoImport(out tbl, out cols); var engineer = new ForwardEngineerCatalogue(tbl, cols, true); Catalogue cata; CatalogueItem[] cis; ExtractionInformation[] eis; engineer.ExecuteForwardEngineering(out cata, out cis, out eis); _nonTvfExtractionIdentifier = eis.Single(); _nonTvfExtractionIdentifier.IsExtractionIdentifier = true; _nonTvfExtractionIdentifier.SaveToDatabase(); _nonTvfCatalogue = cata; _nonTvfTableInfo = tbl; }
private void OnPipelineCompleted(object sender, PipelineEngineEventArgs args, DiscoveredDatabase db) { var engine = args.PipelineEngine; //todo figure out what it created var dest = engine.DestinationObject as DataTableUploadDestination; if (dest == null) { throw new Exception($"Destination of engine was unexpectedly not a DataTableUploadDestination despite use case {nameof(UploadFileUseCase)}"); } if (string.IsNullOrWhiteSpace(dest.TargetTableName)) { throw new Exception($"Destination of engine failed to populate {dest.TargetTableName}"); } var tbl = db.ExpectTable(dest.TargetTableName); if (!tbl.Exists()) { throw new Exception($"Destination of engine claimed to have created {tbl.GetFullyQualifiedName()} but it did not exist"); } var importer = new TableInfoImporter(BasicActivator.RepositoryLocator.CatalogueRepository, tbl); importer.DoImport(out var ti, out _); var cata = BasicActivator.CreateAndConfigureCatalogue(ti, null, $"Import of file '{File.FullName}' by {Environment.UserName} on {DateTime.Now}", ProjectSpecific, TargetFolder); if (cata != null) { Publish(cata); Emphasise(cata); } }
public void Test_DatabaseTypeQueryWithParameter_IntParameter(DatabaseType dbType) { //Pick the destination server var tableName = TestDatabaseNames.GetConsistentName("tbl"); //make sure there's a database ready to receive the data var db = GetCleanedServer(dbType); db.Create(true); //this is the table we are uploading var dt = new DataTable(); dt.Columns.Add("numbercol"); dt.Rows.Add(10); dt.Rows.Add(15); dt.Rows.Add(20); dt.Rows.Add(25); dt.TableName = tableName; try { ///////////////////////UPLOAD THE DataTable TO THE DESTINATION//////////////////////////////////////////// var uploader = new DataTableUploadDestination(); uploader.PreInitialize(db, new ThrowImmediatelyDataLoadJob()); uploader.ProcessPipelineData(dt, new ThrowImmediatelyDataLoadJob(), new GracefulCancellationToken()); uploader.Dispose(new ThrowImmediatelyDataLoadJob(), null); var tbl = db.ExpectTable(tableName); var importer = new TableInfoImporter(CatalogueRepository, tbl); importer.DoImport(out var ti, out var ci); var engineer = new ForwardEngineerCatalogue(ti, ci, true); engineer.ExecuteForwardEngineering(out var cata, out var cis, out var ei); ///////////////////////////////////////////////////////////////////////////////////////////////////////// /////////////////////////////////THE ACTUAL PROPER TEST//////////////////////////////////// //create an extraction filter var extractionInformation = ei.Single(); var filter = new ExtractionFilter(CatalogueRepository, "Filter by numbers", extractionInformation); filter.WhereSQL = extractionInformation.SelectSQL + " = @n"; filter.SaveToDatabase(); //create the parameters for filter (no globals, masters or scope adjacent parameters) new ParameterCreator(filter.GetFilterFactory(), null, null).CreateAll(filter, null); var p = filter.GetAllParameters().Single(); Assert.AreEqual("@n", p.ParameterName); p.ParameterSQL = p.ParameterSQL.Replace("varchar(50)", "int"); //make it int p.Value = "20"; p.SaveToDatabase(); var qb = new QueryBuilder(null, null); qb.AddColumn(extractionInformation); qb.RootFilterContainer = new SpontaneouslyInventedFilterContainer(new MemoryCatalogueRepository(), null, new[] { filter }, FilterContainerOperation.AND); using (var con = db.Server.GetConnection()) { con.Open(); string sql = qb.SQL; var cmd = db.Server.GetCommand(sql, con); var r = cmd.ExecuteReader(); Assert.IsTrue(r.Read()); Assert.AreEqual( 20, r[extractionInformation.GetRuntimeName()]); } /////////////////////////////////////////////////////////////////////////////////////// } finally { db.Drop(); } }
protected override void SetUp() { base.SetUp(); var db = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer); BlitzMainDataTables(); DeleteANOEndpoint(); ANOTable remnantANO = CatalogueRepository.GetAllObjects <ANOTable>().SingleOrDefault(a => a.TableName.Equals("ANOCondition")); if (remnantANO != null) { remnantANO.DeleteInDatabase(); } //cleanup foreach (var remnant in CatalogueRepository.GetAllObjects <TableInfo>().Where(t => t.GetRuntimeName().Equals(TableName))) { remnant.DeleteInDatabase(); } const string sql = @" CREATE TABLE [ANOMigration]( [AdmissionDate] [datetime] NOT NULL, [DischargeDate] [datetime] NOT NULL, [Condition1] [varchar](4) NOT NULL, [Condition2] [varchar](4) NULL, [Condition3] [varchar](4) NULL, [Condition4] [varchar](4) NULL, [CHI] [varchar](10) NOT NULL CONSTRAINT [PK_ANOMigration] PRIMARY KEY CLUSTERED ( [AdmissionDate] ASC, [Condition1] ASC, [CHI] ASC )WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON) ON [PRIMARY] ) ON [PRIMARY] INSERT [ANOMigration] ([AdmissionDate], [DischargeDate], [Condition1], [Condition2], [Condition3], [Condition4], [CHI]) VALUES (CAST(0x000001B300000000 AS DateTime), CAST(0x000001B600000000 AS DateTime), N'Z61', N'Z29', NULL, N'Z11', N'0809003082') INSERT [ANOMigration] ([AdmissionDate], [DischargeDate], [Condition1], [Condition2], [Condition3], [Condition4], [CHI]) VALUES (CAST(0x0000021D00000000 AS DateTime), CAST(0x0000022600000000 AS DateTime), N'P024', N'Q230', NULL,N'Z11', N'1610007810') INSERT [ANOMigration] ([AdmissionDate], [DischargeDate], [Condition1], [Condition2], [Condition3], [Condition4], [CHI]) VALUES (CAST(0x0000032900000000 AS DateTime), CAST(0x0000032A00000000 AS DateTime), N'L73', NULL, NULL, NULL, N'2407011022') INSERT [ANOMigration] ([AdmissionDate], [DischargeDate], [Condition1], [Condition2], [Condition3], [Condition4], [CHI]) VALUES (CAST(0x000004EA00000000 AS DateTime), CAST(0x000004EA00000000 AS DateTime), N'Y523', N'Z29', NULL, NULL, N'1104015472') INSERT [ANOMigration] ([AdmissionDate], [DischargeDate], [Condition1], [Condition2], [Condition3], [Condition4], [CHI]) VALUES (CAST(0x0000060300000000 AS DateTime), CAST(0x0000060800000000 AS DateTime), N'F721', N'B871', NULL, NULL, N'0203025927') INSERT [ANOMigration] ([AdmissionDate], [DischargeDate], [Condition1], [Condition2], [Condition3], [Condition4], [CHI]) VALUES (CAST(0x0000065300000000 AS DateTime), CAST(0x0000065700000000 AS DateTime), N'Z914', N'J398', NULL, NULL, N'2702024715') INSERT [ANOMigration] ([AdmissionDate], [DischargeDate], [Condition1], [Condition2], [Condition3], [Condition4], [CHI]) VALUES (CAST(0x0000070100000000 AS DateTime), CAST(0x0000070800000000 AS DateTime), N'N009', N'V698', NULL, NULL, N'1610007810') INSERT [ANOMigration] ([AdmissionDate], [DischargeDate], [Condition1], [Condition2], [Condition3], [Condition4], [CHI]) VALUES (CAST(0x0000077000000000 AS DateTime), CAST(0x0000077200000000 AS DateTime), N'E44', N'J050', N'Q560', NULL, N'1610007810') INSERT [ANOMigration] ([AdmissionDate], [DischargeDate], [Condition1], [Condition2], [Condition3], [Condition4], [CHI]) VALUES (CAST(0x000007E800000000 AS DateTime), CAST(0x000007EA00000000 AS DateTime), N'Q824', NULL, NULL, NULL, N'1110029231') INSERT [ANOMigration] ([AdmissionDate], [DischargeDate], [Condition1], [Condition2], [Condition3], [Condition4], [CHI]) VALUES (CAST(0x0000087700000000 AS DateTime), CAST(0x0000087F00000000 AS DateTime), N'T020', NULL, NULL, NULL, N'2110021261') INSERT [ANOMigration] ([AdmissionDate], [DischargeDate], [Condition1], [Condition2], [Condition3], [Condition4], [CHI]) VALUES (CAST(0x0000088A00000000 AS DateTime), CAST(0x0000089300000000 AS DateTime), N'G009', NULL, NULL, NULL, N'0706013071') INSERT [ANOMigration] ([AdmissionDate], [DischargeDate], [Condition1], [Condition2], [Condition3], [Condition4], [CHI]) VALUES (CAST(0x000008CA00000000 AS DateTime), CAST(0x000008D100000000 AS DateTime), N'T47', N'H311', N'O037', NULL, N'1204057592')"; var server = db.Server; using (var con = server.GetConnection()) { con.Open(); server.GetCommand(sql, con).ExecuteNonQuery(); } var table = db.ExpectTable(TableName); TableInfoImporter importer = new TableInfoImporter(CatalogueRepository, table); importer.DoImport(out _tableInfo, out _columnInfos); //Configure the structure of the ANO transform we want - identifiers should have 3 characters and 2 ints and end with _C _anoConditionTable = new ANOTable(CatalogueRepository, ANOStore_ExternalDatabaseServer, "ANOCondition", "C"); _anoConditionTable.NumberOfCharactersToUseInAnonymousRepresentation = 3; _anoConditionTable.NumberOfIntegersToUseInAnonymousRepresentation = 2; _anoConditionTable.SaveToDatabase(); _anoConditionTable.PushToANOServerAsNewTable("varchar(4)", new ThrowImmediatelyCheckNotifier()); }
public void Execute() { if (_planManager.TargetDatabase == null) { throw new Exception("PlanManager has no TargetDatabase set"); } var memoryRepo = new MemoryCatalogueRepository(); using (_catalogueRepository.BeginNewTransactedConnection()) { try { //for each skipped table foreach (var skippedTable in _planManager.SkippedTables) { //we might have to refactor or port JoinInfos to these tables so we should establish what the parenthood of them was foreach (ColumnInfo columnInfo in skippedTable.ColumnInfos) { GetNewColumnInfoForOld(columnInfo, true); } } //for each table that isn't being skipped foreach (var oldTableInfo in _planManager.TableInfos.Except(_planManager.SkippedTables)) { List <DatabaseColumnRequest> columnsToCreate = new List <DatabaseColumnRequest>(); Dictionary <string, ColumnInfo> migratedColumns = new Dictionary <string, ColumnInfo>(StringComparer.CurrentCultureIgnoreCase); var querybuilderForMigratingTable = new QueryBuilder(null, null); //for each column we are not skipping (Drop) work out the endpoint datatype (planner knows this) foreach (ColumnInfo columnInfo in oldTableInfo.ColumnInfos) { var columnPlan = _planManager.GetPlanForColumnInfo(columnInfo); if (columnPlan.Plan != Plan.Drop) { //add the column verbatim to the query builder because we know we have to read it from source querybuilderForMigratingTable.AddColumn(new ColumnInfoToIColumn(memoryRepo, columnInfo)); string colName = columnInfo.GetRuntimeName(); //if it is being ano tabled then give the table name ANO as a prefix if (columnPlan.Plan == Plan.ANO) { colName = "ANO" + colName; } migratedColumns.Add(colName, columnInfo); columnsToCreate.Add(new DatabaseColumnRequest(colName, columnPlan.GetEndpointDataType(), !columnInfo.IsPrimaryKey) { IsPrimaryKey = columnInfo.IsPrimaryKey }); } } SelectSQLForMigrations.Add(oldTableInfo, querybuilderForMigratingTable); //Create the actual table var tbl = _planManager.TargetDatabase.CreateTable(oldTableInfo.GetRuntimeName(), columnsToCreate.ToArray()); //import the created table TableInfoImporter importer = new TableInfoImporter(_catalogueRepository, tbl); importer.DoImport(out var newTableInfo, out var newColumnInfos); //Audit the parenthood of the TableInfo/ColumnInfos AuditParenthood(oldTableInfo, newTableInfo); foreach (ColumnInfo newColumnInfo in newColumnInfos) { var oldColumnInfo = migratedColumns[newColumnInfo.GetRuntimeName()]; var columnPlan = _planManager.GetPlanForColumnInfo(oldColumnInfo); if (columnPlan.Plan == Plan.ANO) { newColumnInfo.ANOTable_ID = columnPlan.ANOTable.ID; newColumnInfo.SaveToDatabase(); } //if there was a dilution configured we need to setup a virtual DLE load only column of the input type (this ensures RAW has a valid datatype) if (columnPlan.Plan == Plan.Dilute) { //Create a discarded (load only) column with name matching the new columninfo var discard = new PreLoadDiscardedColumn(_catalogueRepository, newTableInfo, newColumnInfo.GetRuntimeName()); //record that it exists to support dilution and that the data type matches the input (old) ColumnInfo (i.e. not the new data type!) discard.Destination = DiscardedColumnDestination.Dilute; discard.SqlDataType = oldColumnInfo.Data_type; discard.SaveToDatabase(); DilutionOperationsForMigrations.Add(discard, columnPlan.Dilution); } AuditParenthood(oldColumnInfo, newColumnInfo); } if (DilutionOperationsForMigrations.Any()) { newTableInfo.IdentifierDumpServer_ID = _planManager.GetIdentifierDumpServer().ID; newTableInfo.SaveToDatabase(); } } NewCatalogue = _planManager.Catalogue.ShallowClone(); NewCatalogue.Name = "ANO" + _planManager.Catalogue.Name; NewCatalogue.Folder = new CatalogueFolder(NewCatalogue, "\\anonymous" + NewCatalogue.Folder.Path); NewCatalogue.SaveToDatabase(); AuditParenthood(_planManager.Catalogue, NewCatalogue); //For each of the old ExtractionInformations (95% of the time that's just a reference to a ColumnInfo e.g. '[People].[Height]' but 5% of the time it's some horrible aliased transform e.g. 'dbo.RunMyCoolFunction([People].[Height]) as BigHeight' foreach (CatalogueItem oldCatalogueItem in _planManager.Catalogue.CatalogueItems) { var oldColumnInfo = oldCatalogueItem.ColumnInfo; //catalogue item is not connected to any ColumnInfo if (oldColumnInfo == null) { continue; } var columnPlan = _planManager.GetPlanForColumnInfo(oldColumnInfo); //we are not migrating it anyway if (columnPlan.Plan == Plan.Drop) { continue; } ColumnInfo newColumnInfo = GetNewColumnInfoForOld(oldColumnInfo); var newCatalogueItem = oldCatalogueItem.ShallowClone(NewCatalogue); //and rewire it's ColumnInfo to the cloned child one newCatalogueItem.ColumnInfo_ID = newColumnInfo.ID; //If the old CatalogueItem had the same name as it's underlying ColumnInfo then we should use the new one otherwise just copy the old name whatever it was newCatalogueItem.Name = oldCatalogueItem.Name.Equals(oldColumnInfo.Name) ? newColumnInfo.GetRuntimeName() : oldCatalogueItem.Name; //add ANO to the front if the underlying column was annoed if (newColumnInfo.GetRuntimeName().StartsWith("ANO") && !newCatalogueItem.Name.StartsWith("ANO")) { newCatalogueItem.Name = "ANO" + newCatalogueItem.Name; } newCatalogueItem.SaveToDatabase(); var oldExtractionInformation = oldCatalogueItem.ExtractionInformation; //if the plan is to make the ColumnInfo extractable if (columnPlan.ExtractionCategoryIfAny != null) { //Create a new ExtractionInformation for the new Catalogue var newExtractionInformation = new ExtractionInformation(_catalogueRepository, newCatalogueItem, newColumnInfo, newColumnInfo.Name); newExtractionInformation.ExtractionCategory = columnPlan.ExtractionCategoryIfAny.Value; newExtractionInformation.SaveToDatabase(); //if it was previously extractable if (oldExtractionInformation != null) { var refactorer = new SelectSQLRefactorer(); //restore the old SQL as it existed in the origin table newExtractionInformation.SelectSQL = oldExtractionInformation.SelectSQL; //do a refactor on the old column name for the new column name refactorer.RefactorColumnName(newExtractionInformation, oldColumnInfo, newColumnInfo.Name, true); //also refactor any other column names that might be referenced by the transform SQL e.g. it could be a combo column name where forename + surname is the value of the ExtractionInformation foreach (var kvpOtherCols in _parenthoodDictionary.Where(kvp => kvp.Key is ColumnInfo)) { //if it's one we have already done, dont do it again if (Equals(kvpOtherCols.Value, newColumnInfo)) { continue; } //otherwise do a non strict refactoring (don't worry if you don't finda ny references) refactorer.RefactorColumnName(newExtractionInformation, (ColumnInfo)kvpOtherCols.Key, ((ColumnInfo)(kvpOtherCols.Value)).Name, false); } //make the new one exactly as extractable newExtractionInformation.Order = oldExtractionInformation.Order; newExtractionInformation.Alias = oldExtractionInformation.Alias; newExtractionInformation.IsExtractionIdentifier = oldExtractionInformation.IsExtractionIdentifier; newExtractionInformation.HashOnDataRelease = oldExtractionInformation.HashOnDataRelease; newExtractionInformation.IsPrimaryKey = oldExtractionInformation.IsPrimaryKey; newExtractionInformation.SaveToDatabase(); } AuditParenthood(oldCatalogueItem, newCatalogueItem); if (oldExtractionInformation != null) { AuditParenthood(oldExtractionInformation, newExtractionInformation); } } } var existingJoinInfos = _catalogueRepository.GetAllObjects <JoinInfo>(); var existingLookups = _catalogueRepository.GetAllObjects <Lookup>(); var existingLookupComposites = _catalogueRepository.GetAllObjects <LookupCompositeJoinInfo>(); //migrate join infos foreach (JoinInfo joinInfo in _planManager.GetJoinInfosRequiredCatalogue()) { var newFk = GetNewColumnInfoForOld(joinInfo.ForeignKey); var newPk = GetNewColumnInfoForOld(joinInfo.PrimaryKey); //already exists if (!existingJoinInfos.Any(ej => ej.ForeignKey_ID == newFk.ID && ej.PrimaryKey_ID == newPk.ID)) { new JoinInfo(_catalogueRepository, newFk, newPk, joinInfo.ExtractionJoinType, joinInfo.Collation); //create it } } //migrate Lookups foreach (Lookup lookup in _planManager.GetLookupsRequiredCatalogue()) { //Find the new columns in the ANO table that match the old lookup columns var newDesc = GetNewColumnInfoForOld(lookup.Description); var newFk = GetNewColumnInfoForOld(lookup.ForeignKey); var newPk = GetNewColumnInfoForOld(lookup.PrimaryKey); //see if we already have a Lookup declared for the NEW columns (unlikely) Lookup newLookup = existingLookups.SingleOrDefault(l => l.Description_ID == newDesc.ID && l.ForeignKey_ID == newFk.ID); //create new Lookup that mirrors the old but references the ANO columns instead if (newLookup == null) { newLookup = new Lookup(_catalogueRepository, newDesc, newFk, newPk, lookup.ExtractionJoinType, lookup.Collation); } //also mirror any composite (secondary, tertiary join column pairs needed for the Lookup to operate correclty e.g. where TestCode 'HAB1' means 2 different things depending on healthboard) foreach (LookupCompositeJoinInfo compositeJoin in lookup.GetSupplementalJoins().Cast <LookupCompositeJoinInfo>()) { var newCompositeFk = GetNewColumnInfoForOld(compositeJoin.ForeignKey); var newCompositePk = GetNewColumnInfoForOld(compositeJoin.PrimaryKey); if (!existingLookupComposites.Any(c => c.ForeignKey_ID == newCompositeFk.ID && c.PrimaryKey_ID == newCompositePk.ID)) { new LookupCompositeJoinInfo(_catalogueRepository, newLookup, newCompositeFk, newCompositePk, compositeJoin.Collation); } } } //create new data load confguration LoadMetadata = new LoadMetadata(_catalogueRepository, "Anonymising " + NewCatalogue); LoadMetadata.EnsureLoggingWorksFor(NewCatalogue); NewCatalogue.LoadMetadata_ID = LoadMetadata.ID; NewCatalogue.SaveToDatabase(); if (_planManager.DateColumn != null) { LoadProgressIfAny = new LoadProgress(_catalogueRepository, LoadMetadata); LoadProgressIfAny.OriginDate = _planManager.StartDate; LoadProgressIfAny.SaveToDatabase(); //date column based migration only works for single TableInfo migrations (see Plan Manager checks) var qb = SelectSQLForMigrations.Single(kvp => !kvp.Key.IsLookupTable()).Value; qb.RootFilterContainer = new SpontaneouslyInventedFilterContainer(memoryRepo, null, new[] { new SpontaneouslyInventedFilter(memoryRepo, null, _planManager.DateColumn + " >= @startDate", "After batch start date", "", null), new SpontaneouslyInventedFilter(memoryRepo, null, _planManager.DateColumn + " <= @endDate", "Before batch end date", "", null), } , FilterContainerOperation.AND); } try { foreach (QueryBuilder qb in SelectSQLForMigrations.Values) { Console.WriteLine(qb.SQL); } } catch (Exception e) { throw new Exception("Failed to generate migration SQL", e); } _catalogueRepository.EndTransactedConnection(true); } catch (Exception ex) { _catalogueRepository.EndTransactedConnection(false); throw new Exception("Failed to create ANO version, transaction rolled back succesfully", ex); } } }
public override void Execute() { base.Execute(); var db = SelectDatabase(false, "Import all Tables form Database..."); if (db == null) { return; } ShareManager shareManager = new ShareManager(Activator.RepositoryLocator, LocalReferenceGetter); List <ICatalogue> catalogues = new List <ICatalogue>(); //don't do any double importing! var existing = Activator.RepositoryLocator.CatalogueRepository.GetAllObjects <TableInfo>(); var ignoredTables = new List <TableInfo>(); if (YesNo("Would you also like to import ShareDefinitions (metadata)?", "Import Metadata From File(s)")) { OpenFileDialog ofd = new OpenFileDialog() { Multiselect = true }; ofd.Filter = "Share Definitions|*.sd"; if (ofd.ShowDialog() == DialogResult.OK) { foreach (var f in ofd.FileNames) { using (var stream = File.Open(f, FileMode.Open)) { var newObjects = shareManager.ImportSharedObject(stream); if (newObjects != null) { catalogues.AddRange(newObjects.OfType <ICatalogue>()); } } } } } bool generateCatalogues = false; if (YesNo("Would you like to try to guess non-matching Catalogues by Name?", "Guess by name")) { catalogues.AddRange(Activator.RepositoryLocator.CatalogueRepository.GetAllObjects <Catalogue>()); } else if (YesNo("Would you like to generate empty Catalogues for non-matching tables instead?", "Generate New Catalogues")) { generateCatalogues = true; } var married = new Dictionary <CatalogueItem, ColumnInfo>(); TableInfo anyNewTable = null; foreach (DiscoveredTable discoveredTable in db.DiscoverTables(includeViews: false)) { var collide = existing.FirstOrDefault(t => t.Is(discoveredTable)); if (collide != null) { ignoredTables.Add(collide); continue; } var importer = new TableInfoImporter(Activator.RepositoryLocator.CatalogueRepository, discoveredTable); TableInfo ti; ColumnInfo[] cis; //import the table importer.DoImport(out ti, out cis); anyNewTable = anyNewTable ?? ti; //find a Catalogue of the same name (possibly imported from Share Definition) var matchingCatalogues = catalogues.Where(c => c.Name.Equals(ti.GetRuntimeName(), StringComparison.CurrentCultureIgnoreCase)).ToArray(); //if there's 1 Catalogue with the same name if (matchingCatalogues.Length == 1) { //we know we want to import all these ColumnInfos var unmatched = new List <ColumnInfo>(cis); //But hopefully most already have orphan CatalogueItems we can hook them together to foreach (var cataItem in matchingCatalogues[0].CatalogueItems) { if (cataItem.ColumnInfo_ID == null) { var matches = cataItem.GuessAssociatedColumn(cis, allowPartial: false).ToArray(); if (matches.Length == 1) { cataItem.SetColumnInfo(matches[0]); unmatched.Remove(matches[0]); //we married them together married.Add(cataItem, matches[0]); } } } //is anyone unmarried? i.e. new ColumnInfos that don't have CatalogueItems with the same name foreach (ColumnInfo columnInfo in unmatched) { var cataItem = new CatalogueItem(Activator.RepositoryLocator.CatalogueRepository, (Catalogue)matchingCatalogues[0], columnInfo.GetRuntimeName()); cataItem.ColumnInfo_ID = columnInfo.ID; cataItem.SaveToDatabase(); married.Add(cataItem, columnInfo); } } else if (generateCatalogues) { new ForwardEngineerCatalogue(ti, cis).ExecuteForwardEngineering(); } } if (married.Any() && YesNo("Found " + married.Count + " columns, make them all extractable?", "Make Extractable")) { foreach (var kvp in married) { //yup thats how we roll, the database is main memory! var ei = new ExtractionInformation(Activator.RepositoryLocator.CatalogueRepository, kvp.Key, kvp.Value, kvp.Value.Name); } } if (ignoredTables.Any()) { WideMessageBox.Show("Ignored some tables", "Ignored " + ignoredTables.Count + " tables because they already existed as TableInfos:" + string.Join(Environment.NewLine, ignoredTables.Select(ti => ti.GetRuntimeName()))); } if (anyNewTable != null) { Publish(anyNewTable); Emphasise(anyNewTable); } }
public void CreateANOVersion_TestSkippingTables(bool tableInfoAlreadyExistsForSkippedTable, bool putPlanThroughSerialization) { var dbFrom = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(TestDatabaseNames.GetConsistentName("CreateANOVersion_TestSkippingTables_From")); var dbTo = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(TestDatabaseNames.GetConsistentName("CreateANOVersion_TestSkippingTables_To")); dbFrom.Create(true); dbTo.Create(true); try { var tblFromHeads = dbFrom.CreateTable("Heads", new[] { new DatabaseColumnRequest("SkullColor", "varchar(10)"), new DatabaseColumnRequest("Vertebrae", "varchar(25)") }); var cols = new[] { new DatabaseColumnRequest("SpineColor", "varchar(10)"), new DatabaseColumnRequest("Vertebrae", "varchar(25)") }; var tblFromNeck = dbFrom.CreateTable("Necks", cols); //Necks table already exists in the destination so will be skipped for migration but still needs to be imported var tblToNeck = dbTo.CreateTable("Necks", cols); TableInfo fromHeadsTableInfo; ColumnInfo[] fromHeadsColumnInfo; TableInfo fromNeckTableInfo; ColumnInfo[] fromNeckColumnInfo; TableInfo toNecksTableInfo = null; ColumnInfo[] toNecksColumnInfo = null; TableInfoImporter i1 = new TableInfoImporter(CatalogueRepository, tblFromHeads); i1.DoImport(out fromHeadsTableInfo, out fromHeadsColumnInfo); TableInfoImporter i2 = new TableInfoImporter(CatalogueRepository, tblFromNeck); i2.DoImport(out fromNeckTableInfo, out fromNeckColumnInfo); //Table already exists but does the in Catalogue reference exist? if (tableInfoAlreadyExistsForSkippedTable) { TableInfoImporter i3 = new TableInfoImporter(CatalogueRepository, tblToNeck); i3.DoImport(out toNecksTableInfo, out toNecksColumnInfo); } //Create a JoinInfo so the query builder knows how to connect the tables new JoinInfo(CatalogueRepository, fromHeadsColumnInfo.Single(c => c.GetRuntimeName().Equals("Vertebrae")), fromNeckColumnInfo.Single(c => c.GetRuntimeName().Equals("Vertebrae")), ExtractionJoinType.Inner, null ); var cataEngineer = new ForwardEngineerCatalogue(fromHeadsTableInfo, fromHeadsColumnInfo, true); Catalogue cata; CatalogueItem[] cataItems; ExtractionInformation[] extractionInformations; cataEngineer.ExecuteForwardEngineering(out cata, out cataItems, out extractionInformations); var cataEngineer2 = new ForwardEngineerCatalogue(fromNeckTableInfo, fromNeckColumnInfo, true); cataEngineer2.ExecuteForwardEngineering(cata); //4 extraction informations in from Catalogue (2 from Heads and 2 from Necks) Assert.AreEqual(cata.GetAllExtractionInformation(ExtractionCategory.Any).Count(), 4); //setup ANOTable on head var anoTable = new ANOTable(CatalogueRepository, ANOStore_ExternalDatabaseServer, "ANOSkullColor", "C"); anoTable.NumberOfCharactersToUseInAnonymousRepresentation = 10; anoTable.SaveToDatabase(); anoTable.PushToANOServerAsNewTable("varchar(10)", new ThrowImmediatelyCheckNotifier()); //////////////////The actual test!///////////////// var planManager = new ForwardEngineerANOCataloguePlanManager(RepositoryLocator, cata); //ano the table SkullColor var scPlan = planManager.GetPlanForColumnInfo(fromHeadsColumnInfo.Single(col => col.GetRuntimeName().Equals("SkullColor"))); scPlan.ANOTable = anoTable; scPlan.Plan = Plan.ANO; if (putPlanThroughSerialization) { var asString = JsonConvertExtensions.SerializeObject(planManager, RepositoryLocator); planManager = (ForwardEngineerANOCataloguePlanManager)JsonConvertExtensions.DeserializeObject(asString, typeof(ForwardEngineerANOCataloguePlanManager), RepositoryLocator); } //not part of serialization planManager.TargetDatabase = dbTo; planManager.SkippedTables.Add(fromNeckTableInfo);//skip the necks table because it already exists (ColumnInfos may or may not exist but physical table definetly does) var engine = new ForwardEngineerANOCatalogueEngine(RepositoryLocator, planManager); if (!tableInfoAlreadyExistsForSkippedTable) { var ex = Assert.Throws <Exception>(engine.Execute); Assert.IsTrue(Regex.IsMatch(ex.InnerException.Message, "Found '0' ColumnInfos called")); Assert.IsTrue(Regex.IsMatch(ex.InnerException.Message, "[Necks].[SpineColor]")); return; } else { engine.Execute(); } var newCata = CatalogueRepository.GetAllObjects <Catalogue>().Single(c => c.Name.Equals("ANOHeads")); Assert.IsTrue(newCata.Exists()); var newCataItems = newCata.CatalogueItems; Assert.AreEqual(newCataItems.Count(), 4); //should be extraction informations //all extraction informations should point to the new table location Assert.IsTrue(newCataItems.All(ci => ci.ExtractionInformation.SelectSQL.Contains(dbTo.GetRuntimeName()))); //these columns should all exist Assert.IsTrue(newCataItems.Any(ci => ci.ExtractionInformation.SelectSQL.Contains("SkullColor"))); Assert.IsTrue(newCataItems.Any(ci => ci.ExtractionInformation.SelectSQL.Contains("SpineColor"))); Assert.IsTrue(newCataItems.Any(ci => ci.ExtractionInformation.SelectSQL.Contains("Vertebrae"))); //actually there will be 2 copies of this one from Necks one from Heads //new ColumnInfo should have a reference to the anotable Assert.IsTrue(newCataItems.Single(ci => ci.Name.Equals("ANOSkullColor")).ColumnInfo.ANOTable_ID == anoTable.ID); var newSpineColorColumnInfo = newCataItems.Single(ci => ci.Name.Equals("ANOSkullColor")).ColumnInfo; //table info already existed, make sure the new CatalogueItems point to the same columninfos / table infos Assert.IsTrue(newCataItems.Select(ci => ci.ColumnInfo).Contains(newSpineColorColumnInfo)); } finally { dbFrom.Drop(); dbTo.Drop(); } }
public void CreateANOVersionTest_LookupsAndExtractionInformations() { var dbName = TestDatabaseNames.GetConsistentName("CreateANOVersionTest"); var db = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(dbName); db.Create(true); BulkTestsData bulk = new BulkTestsData(CatalogueRepository, DiscoveredDatabaseICanCreateRandomTablesIn, 100); bulk.SetupTestData(); bulk.ImportAsCatalogue(); //Create a lookup table on the server var lookupTbl = DiscoveredDatabaseICanCreateRandomTablesIn.CreateTable("z_sexLookup", new[] { new DatabaseColumnRequest("Code", "varchar(1)") { IsPrimaryKey = true }, new DatabaseColumnRequest("hb_Code", "varchar(1)") { IsPrimaryKey = true }, new DatabaseColumnRequest("Description", "varchar(100)") }); //import a reference to the table TableInfoImporter importer = new TableInfoImporter(CatalogueRepository, lookupTbl); ColumnInfo[] lookupColumnInfos; TableInfo lookupTableInfo; importer.DoImport(out lookupTableInfo, out lookupColumnInfos); //Create a Lookup reference var ciSex = bulk.catalogue.CatalogueItems.Single(c => c.Name == "sex"); var ciHb = bulk.catalogue.CatalogueItems.Single(c => c.Name == "hb_extract"); var eiChi = bulk.extractionInformations.Single(ei => ei.GetRuntimeName() == "chi"); eiChi.IsExtractionIdentifier = true; eiChi.SaveToDatabase(); var eiCentury = bulk.extractionInformations.Single(ei => ei.GetRuntimeName() == "century"); eiCentury.HashOnDataRelease = true; eiCentury.ExtractionCategory = ExtractionCategory.Internal; eiCentury.SaveToDatabase(); //add a transform var eiPostcode = bulk.extractionInformations.Single(ei => ei.GetRuntimeName() == "current_postcode"); eiPostcode.SelectSQL = string.Format("LEFT(10,{0}.[current_postcode])", eiPostcode.ColumnInfo.TableInfo.Name); eiPostcode.Alias = "MyMutilatedColumn"; eiPostcode.SaveToDatabase(); //add a combo transform var ciComboCol = new CatalogueItem(CatalogueRepository, bulk.catalogue, "ComboColumn"); var colForename = bulk.columnInfos.Single(c => c.GetRuntimeName() == "forename"); var colSurname = bulk.columnInfos.Single(c => c.GetRuntimeName() == "surname"); var eiComboCol = new ExtractionInformation(CatalogueRepository, ciComboCol, colForename, colForename + " + ' ' + " + colSurname); eiComboCol.Alias = "ComboColumn"; eiComboCol.SaveToDatabase(); var eiDataLoadRunId = bulk.extractionInformations.Single(ei => ei.GetRuntimeName().Equals(SpecialFieldNames.DataLoadRunID)); eiDataLoadRunId.DeleteInDatabase(); var lookup = new Lookup(CatalogueRepository, lookupColumnInfos[2], ciSex.ColumnInfo, lookupColumnInfos[0], ExtractionJoinType.Left, null); //now lets make it worse, lets assume the sex code changes per healthboard therefore the join to the lookup requires both fields sex and hb_extract var compositeLookup = new LookupCompositeJoinInfo(CatalogueRepository, lookup, ciHb.ColumnInfo, lookupColumnInfos[1]); //now lets make the _Desc field in the original Catalogue int orderToInsertDescriptionFieldAt = ciSex.ExtractionInformation.Order; //bump everyone down 1 foreach (var toBumpDown in bulk.catalogue.CatalogueItems.Select(ci => ci.ExtractionInformation).Where(e => e != null && e.Order > orderToInsertDescriptionFieldAt)) { toBumpDown.Order++; toBumpDown.SaveToDatabase(); } var ciDescription = new CatalogueItem(CatalogueRepository, bulk.catalogue, "Sex_Desc"); var eiDescription = new ExtractionInformation(CatalogueRepository, ciDescription, lookupColumnInfos[2], lookupColumnInfos[2].Name); eiDescription.Alias = "Sex_Desc"; eiDescription.Order = orderToInsertDescriptionFieldAt + 1; eiDescription.ExtractionCategory = ExtractionCategory.Supplemental; eiDescription.SaveToDatabase(); bulk.catalogue.ClearAllInjections(); //check it worked QueryBuilder qb = new QueryBuilder(null, null); qb.AddColumnRange(bulk.catalogue.GetAllExtractionInformation(ExtractionCategory.Any)); //The query builder should be able to succesfully create SQL Console.WriteLine(qb.SQL); //there should be 2 tables involved in the query [z_sexLookup] and [BulkData] Assert.AreEqual(2, qb.TablesUsedInQuery.Count); //the query builder should have identified the lookup Assert.AreEqual(lookup, qb.GetDistinctRequiredLookups().Single()); //////////////////////////////////////////////////////////////////////////////////////The Actual Bit Being Tested//////////////////////////////////////////////////// var planManager = new ForwardEngineerANOCataloguePlanManager(RepositoryLocator, bulk.catalogue); planManager.TargetDatabase = db; //setup test rules for migrator CreateMigrationRules(planManager, bulk); //rules should pass checks Assert.DoesNotThrow(() => planManager.Check(new ThrowImmediatelyCheckNotifier())); var engine = new ForwardEngineerANOCatalogueEngine(RepositoryLocator, planManager); engine.Execute(); //////////////////////////////////////////////////////////////////////////////////////End The Actual Bit Being Tested//////////////////////////////////////////////////// var anoCatalogue = CatalogueRepository.GetAllObjects <Catalogue>().Single(c => c.Folder.Path.StartsWith("\\ano")); Assert.IsTrue(anoCatalogue.Exists()); //The new Catalogue should have the same number of ExtractionInformations var eiSource = bulk.catalogue.GetAllExtractionInformation(ExtractionCategory.Any).OrderBy(ei => ei.Order).ToArray(); var eiDestination = anoCatalogue.GetAllExtractionInformation(ExtractionCategory.Any).OrderBy(ei => ei.Order).ToArray(); Assert.AreEqual(eiSource.Length, eiDestination.Length, "Both the new and the ANO catalogue should have the same number of ExtractionInformations (extractable columns)"); for (int i = 0; i < eiSource.Length; i++) { Assert.AreEqual(eiSource[i].Order, eiDestination[i].Order, "ExtractionInformations in the source and destination Catalogue should have the same order"); Assert.AreEqual(eiSource[i].GetRuntimeName(), eiDestination[i].GetRuntimeName().Replace("ANO", ""), "ExtractionInformations in the source and destination Catalogue should have the same names (excluding ANO prefix)"); Assert.AreEqual(eiSource[i].ExtractionCategory, eiDestination[i].ExtractionCategory, "Old / New ANO ExtractionInformations did not match on ExtractionCategory"); Assert.AreEqual(eiSource[i].IsExtractionIdentifier, eiDestination[i].IsExtractionIdentifier, "Old / New ANO ExtractionInformations did not match on IsExtractionIdentifier"); Assert.AreEqual(eiSource[i].HashOnDataRelease, eiDestination[i].HashOnDataRelease, "Old / New ANO ExtractionInformations did not match on HashOnDataRelease"); Assert.AreEqual(eiSource[i].IsPrimaryKey, eiDestination[i].IsPrimaryKey, "Old / New ANO ExtractionInformations did not match on IsPrimaryKey"); } //check it worked QueryBuilder qbdestination = new QueryBuilder(null, null); qbdestination.AddColumnRange(anoCatalogue.GetAllExtractionInformation(ExtractionCategory.Any)); //The query builder should be able to succesfully create SQL Console.WriteLine(qbdestination.SQL); var anoEiPostcode = anoCatalogue.GetAllExtractionInformation(ExtractionCategory.Any).Single(ei => ei.GetRuntimeName().Equals("MyMutilatedColumn")); //The transform on postcode should have been refactored to the new table name and preserve the scalar function LEFT... Assert.AreEqual(string.Format("LEFT(10,{0}.[current_postcode])", anoEiPostcode.ColumnInfo.TableInfo.GetFullyQualifiedName()), anoEiPostcode.SelectSQL); var anoEiComboCol = anoCatalogue.GetAllExtractionInformation(ExtractionCategory.Any).Single(ei => ei.GetRuntimeName().Equals("ComboColumn")); //The transform on postcode should have been refactored to the new table name and preserve the scalar function LEFT... Assert.AreEqual(string.Format("{0}.[forename] + ' ' + {0}.[surname]", anoEiPostcode.ColumnInfo.TableInfo.GetFullyQualifiedName()), anoEiComboCol.SelectSQL); //there should be 2 tables involved in the query [z_sexLookup] and [BulkData] Assert.AreEqual(2, qbdestination.TablesUsedInQuery.Count); //the query builder should have identified the lookup but it should be the new one not the old one Assert.AreEqual(1, qbdestination.GetDistinctRequiredLookups().Count(), "New query builder for ano catalogue did not correctly identify that there was a Lookup"); Assert.AreNotEqual(lookup, qbdestination.GetDistinctRequiredLookups().Single(), "New query builder for ano catalogue identified the OLD Lookup!"); Assert.AreEqual(1, qbdestination.GetDistinctRequiredLookups().Single().GetSupplementalJoins().Count(), "The new Lookup did not have the composite join key (sex/hb_extract)"); Assert.AreNotEqual(compositeLookup, qbdestination.GetDistinctRequiredLookups().Single().GetSupplementalJoins(), "New query builder for ano catalogue identified the OLD LookupCompositeJoinInfo!"); db.Drop(); var exports = CatalogueRepository.GetAllObjects <ObjectExport>().Count(); var imports = CatalogueRepository.GetAllObjects <ObjectImport>().Count(); Assert.AreEqual(exports, imports); Assert.IsTrue(exports > 0); }