private TableInfo Import(DiscoveredTable tbl, LoadMetadata lmd, LogManager logManager) { logManager.CreateNewLoggingTaskIfNotExists(lmd.Name); //import TableInfos var importer = new TableInfoImporter(CatalogueRepository, tbl); TableInfo ti; ColumnInfo[] cis; importer.DoImport(out ti, out cis); //create Catalogue var forwardEngineer = new ForwardEngineerCatalogue(ti, cis, true); Catalogue cata; CatalogueItem[] cataItems; ExtractionInformation[] eis; forwardEngineer.ExecuteForwardEngineering(out cata, out cataItems, out eis); //make the catalogue use the load configuration cata.LoadMetadata_ID = lmd.ID; cata.LoggingDataTask = lmd.Name; Assert.IsNotNull(cata.LiveLoggingServer_ID); //catalogue should have one of these because of system defaults cata.SaveToDatabase(); return(ti); }
public void Create(DiscoveredDatabase databaseICanCreateRandomTablesIn, ICatalogueRepository catalogueRepository) { CreateFunctionSQL = @" if exists (select 1 from sys.objects where name = 'MyAwesomeFunction') drop function MyAwesomeFunction GO CREATE FUNCTION MyAwesomeFunction ( -- Add the parameters for the function here @startNumber int , @stopNumber int, @name varchar(50) ) RETURNS @ReturnTable TABLE ( -- Add the column definitions for the TABLE variable here Number int, Name varchar(50) ) AS BEGIN -- Fill the table variable with the rows for your result set DECLARE @i int; set @i = @startNumber while(@i < @stopNumber) begin INSERT INTO @ReturnTable(Name,Number) VALUES (@name,@i); set @i = @i + 1; end RETURN END "; using (var con = databaseICanCreateRandomTablesIn.Server.GetConnection()) { con.Open(); UsefulStuff.ExecuteBatchNonQuery(CreateFunctionSQL, con); } var tbl = databaseICanCreateRandomTablesIn.ExpectTableValuedFunction("MyAwesomeFunction"); TableValuedFunctionImporter importer = new TableValuedFunctionImporter(catalogueRepository, tbl); importer.DoImport(out TableInfoCreated, out ColumnInfosCreated); importer.ParametersCreated[0].Value = "5"; importer.ParametersCreated[0].SaveToDatabase(); importer.ParametersCreated[1].Value = "10"; importer.ParametersCreated[1].SaveToDatabase(); importer.ParametersCreated[2].Value = "'fish'"; importer.ParametersCreated[2].SaveToDatabase(); ForwardEngineerCatalogue forwardEngineerCatalogue = new ForwardEngineerCatalogue(TableInfoCreated, ColumnInfosCreated, true); forwardEngineerCatalogue.ExecuteForwardEngineering(out Cata, out CataItems, out ExtractionInformations); }
private void CreateANormalCatalogue() { var svr = _database.Server; using (var con = svr.GetConnection()) { con.Open(); svr.GetCommand("CREATE TABLE NonTVFTable ( chi varchar(10))", con).ExecuteNonQuery(); svr.GetCommand("INSERT INTO NonTVFTable VALUES ('0101010101')", con).ExecuteNonQuery(); svr.GetCommand("INSERT INTO NonTVFTable VALUES ('0202020202')", con).ExecuteNonQuery(); svr.GetCommand("INSERT INTO NonTVFTable VALUES ('0303030303')", con).ExecuteNonQuery(); } var importer = new TableInfoImporter(CatalogueRepository, svr.Name, _database.GetRuntimeName(), "NonTVFTable", DatabaseType.MicrosoftSQLServer, _database.Server.ExplicitUsernameIfAny, _database.Server.ExplicitPasswordIfAny); importer.DoImport(out var tbl, out var cols); var engineer = new ForwardEngineerCatalogue(tbl, cols, true); engineer.ExecuteForwardEngineering(out var cata, out var cis, out var eis); _nonTvfExtractionIdentifier = eis.Single(); _nonTvfExtractionIdentifier.IsExtractionIdentifier = true; _nonTvfExtractionIdentifier.SaveToDatabase(); _nonTvfCatalogue = cata; _nonTvfTableInfo = tbl; }
private ITableInfo AddTableToCatalogue(string databaseName, string tableName, string pkName, out ColumnInfo[] ciList, bool createCatalogue = false) { var expectedTable = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(databaseName).ExpectTable(tableName); var resultsImporter = new TableInfoImporter(CatalogueRepository, expectedTable); resultsImporter.DoImport(out var ti, out ciList); var pkResult = ciList.Single(info => info.GetRuntimeName().Equals(pkName)); pkResult.IsPrimaryKey = true; pkResult.SaveToDatabase(); var forwardEngineer = new ForwardEngineerCatalogue(ti, ciList); if (createCatalogue) { CatalogueItem[] cataItems; ExtractionInformation[] extractionInformations; forwardEngineer.ExecuteForwardEngineering(out _catalogue, out cataItems, out extractionInformations); } else { forwardEngineer.ExecuteForwardEngineering(_catalogue); } return(ti); }
private void CreateTvfCatalogue(string cohortDatabaseName) { var svr = _database.Server; using (var con = svr.GetConnection()) { con.Open(); //create the newID view svr.GetCommand("create view getNewID as select newid() as new_id", con).ExecuteNonQuery(); var sql = string.Format( @"create function GetTopXRandom (@numberOfRecords int) RETURNS @retTable TABLE ( chi varchar(10), definitionID int ) AS BEGIN while(@numberOfRecords >0) begin insert into @retTable select top 1 chi,cohortDefinition_id from {0}..Cohort order by (select new_id from getNewID) set @numberOfRecords = @numberOfRecords - 1 end return end ", cohortDatabaseName); svr.GetCommand(sql, con).ExecuteNonQuery(); } var tblvf = _database.ExpectTableValuedFunction("GetTopXRandom"); var importer = new TableValuedFunctionImporter(CatalogueRepository, tblvf); TableInfo tbl; ColumnInfo[] cols; importer.DoImport(out tbl, out cols); var engineer = new ForwardEngineerCatalogue(tbl, cols, true); Catalogue cata; CatalogueItem[] cis; ExtractionInformation[] eis; engineer.ExecuteForwardEngineering(out cata, out cis, out eis); Assert.AreEqual("chi", eis[0].GetRuntimeName()); eis[0].IsExtractionIdentifier = true; eis[0].SaveToDatabase(); _tvfCatalogue = cata; _tvfTableInfo = tbl; }
public void TestImportingATable(DatabaseType dbType) { DataTable dt = new DataTable(); dt.Columns.Add("Do"); dt.Columns.Add("Ray"); dt.Columns.Add("Me"); dt.Columns.Add("Fa"); dt.Columns.Add("So"); var db = GetCleanedServer(dbType); var tbl = db.CreateTable("OmgTables", dt); var memoryRepository = new MemoryCatalogueRepository(CatalogueRepository.GetServerDefaults()); var importer1 = new TableInfoImporter(memoryRepository, tbl, DataAccessContext.Any); TableInfo memTableInfo; ColumnInfo[] memColumnInfos; Catalogue memCatalogue; CatalogueItem[] memCatalogueItems; ExtractionInformation[] memExtractionInformations; importer1.DoImport(out memTableInfo, out memColumnInfos); var forwardEngineer1 = new ForwardEngineerCatalogue(memTableInfo, memColumnInfos); forwardEngineer1.ExecuteForwardEngineering(out memCatalogue, out memCatalogueItems, out memExtractionInformations); TableInfo dbTableInfo; ColumnInfo[] dbColumnInfos; Catalogue dbCatalogue; CatalogueItem[] dbCatalogueItems; ExtractionInformation[] dbExtractionInformations; var importerdb = new TableInfoImporter(CatalogueRepository, tbl, DataAccessContext.Any); importerdb.DoImport(out dbTableInfo, out dbColumnInfos); var forwardEngineer2 = new ForwardEngineerCatalogue(dbTableInfo, dbColumnInfos); forwardEngineer2.ExecuteForwardEngineering(out dbCatalogue, out dbCatalogueItems, out dbExtractionInformations); UnitTests.AssertAreEqual(memCatalogue, dbCatalogue); UnitTests.AssertAreEqual(memTableInfo, dbTableInfo); UnitTests.AssertAreEqual(memCatalogue.CatalogueItems, dbCatalogue.CatalogueItems); UnitTests.AssertAreEqual(memCatalogue.GetAllExtractionInformation(ExtractionCategory.Any), dbCatalogue.GetAllExtractionInformation(ExtractionCategory.Any)); UnitTests.AssertAreEqual(memCatalogue.CatalogueItems.Select(ci => ci.ColumnInfo), dbCatalogue.CatalogueItems.Select(ci => ci.ColumnInfo)); }
protected ICatalogue Import(DiscoveredTable tbl, out ITableInfo tableInfoCreated, out ColumnInfo[] columnInfosCreated, out CatalogueItem[] catalogueItems, out ExtractionInformation[] extractionInformations) { var importer = new TableInfoImporter(CatalogueRepository, tbl); importer.DoImport(out tableInfoCreated, out columnInfosCreated); var forwardEngineer = new ForwardEngineerCatalogue(tableInfoCreated, columnInfosCreated, true); forwardEngineer.ExecuteForwardEngineering(out var catalogue, out catalogueItems, out extractionInformations); return(catalogue); }
public void SynchronizationTests_ColumnAddedWithCatalogue(bool acceptChanges) { ForwardEngineerCatalogue cataEngineer = new ForwardEngineerCatalogue(tableInfoCreated, columnInfosCreated, true); Catalogue cata; CatalogueItem[] cataItems; ExtractionInformation[] extractionInformations; cataEngineer.ExecuteForwardEngineering(out cata, out cataItems, out extractionInformations); try { Assert.AreEqual(TABLE_NAME, cata.Name); Assert.AreEqual(2, cataItems.Length); Assert.AreEqual(2, extractionInformations.Length); using (var con = _server.GetConnection()) { con.Open(); _server.GetCommand("ALTER TABLE " + TABLE_NAME + " ADD Birthday datetime not null", con).ExecuteNonQuery(); } TableInfoSynchronizer synchronizer = new TableInfoSynchronizer(tableInfoCreated); if (acceptChanges) { //accept changes should result in a synchronized table Assert.AreEqual(true, synchronizer.Synchronize(new AcceptAllCheckNotifier())); Assert.AreEqual(3, tableInfoCreated.ColumnInfos.Length); //should 3 now Assert.AreEqual(3, cata.CatalogueItems.Length); //should 3 now Assert.AreEqual(3, cata.GetAllExtractionInformation(ExtractionCategory.Any).Length); //should 3 now Assert.AreEqual(1, cata.GetAllExtractionInformation(ExtractionCategory.Any).Count(e => e.SelectSQL.Contains("Birthday"))); Assert.AreEqual(1, cata.CatalogueItems.Count(ci => ci.Name.Contains("Birthday"))); } else { var ex = Assert.Throws <Exception>(() => synchronizer.Synchronize(new ThrowImmediatelyCheckNotifier())); Assert.AreEqual("The following columns are missing from the TableInfo:Birthday", ex.Message); } } finally { cata.DeleteInDatabase(); } }
/// <summary> /// Creates Rdmp metadata objects (<see cref="catalogue"/>, <see cref="tableInfo"/> etc) which point to the <see cref="BulkDataTable"/> /// </summary> /// <returns></returns> public Catalogue ImportAsCatalogue() { TableInfoImporter f = new TableInfoImporter(_repository, BulkDataDatabase.ExpectTable(BulkDataTable)); f.DoImport(out tableInfo, out columnInfos); ForwardEngineerCatalogue forwardEngineer = new ForwardEngineerCatalogue(tableInfo, columnInfos, true); forwardEngineer.ExecuteForwardEngineering(out catalogue, out catalogueItems, out extractionInformations); var chi = extractionInformations.Single(e => e.GetRuntimeName().Equals("chi")); chi.IsExtractionIdentifier = true; chi.SaveToDatabase(); return(catalogue); }
/// <inheritdoc/> public virtual ICatalogue CreateAndConfigureCatalogue(ITableInfo tableInfo, ColumnInfo[] extractionIdentifierColumns, string initialDescription, IProject projectSpecific, CatalogueFolder catalogueFolder) { // Create a new Catalogue based on the table info var engineer = new ForwardEngineerCatalogue(tableInfo, tableInfo.ColumnInfos, true); engineer.ExecuteForwardEngineering(out ICatalogue cata, out _, out ExtractionInformation[] eis); // if we know the linkable private identifier column(s) if (extractionIdentifierColumns != null && extractionIdentifierColumns.Any()) { // Make the Catalogue extractable var eds = new ExtractableDataSet(RepositoryLocator.DataExportRepository, cata); // Mark the columns specified IsExtractionIdentifier foreach (var col in extractionIdentifierColumns) { var match = eis.FirstOrDefault(ei => ei.ColumnInfo?.ID == col.ID); if (match == null) { throw new ArgumentException($"Supplied ColumnInfo {col.GetRuntimeName()} was not found amongst the columns created"); } match.IsExtractionIdentifier = true; match.SaveToDatabase(); } // Catalogue must be extractable to be project specific if (projectSpecific != null) { eds.Project_ID = projectSpecific.ID; eds.SaveToDatabase(); } } if (catalogueFolder != null) { cata.Folder = catalogueFolder; cata.SaveToDatabase(); } return(cata); }
public override void Execute() { base.Execute(); var tableCreator = new ImagingTableCreation(_expectedTable.Database.Server.GetQuerySyntaxHelper()); tableCreator.CreateTable(_expectedTable, _tableTemplate); var importer = new TableInfoImporter(_repositoryLocator.CatalogueRepository, _expectedTable); TableInfo tis; ColumnInfo[] cis; importer.DoImport(out tis, out cis); var engineer = new ForwardEngineerCatalogue(tis, cis, true); Catalogue cata; CatalogueItem[] cataItems; ExtractionInformation[] eis; engineer.ExecuteForwardEngineering(out cata, out cataItems, out eis); var patientIdentifier = eis.SingleOrDefault(e => e.GetRuntimeName().Equals("PatientID")); if (patientIdentifier != null) { patientIdentifier.IsExtractionIdentifier = true; patientIdentifier.SaveToDatabase(); } var seriesEi = eis.SingleOrDefault(e => e.GetRuntimeName().Equals("SeriesInstanceUID")); if (seriesEi != null) { seriesEi.IsExtractionIdentifier = true; seriesEi.SaveToDatabase(); } //make it extractable new ExtractableDataSet(_repositoryLocator.DataExportRepository, cata); NewCatalogueCreated = cata; }
public override void Execute() { base.Execute(); Catalogue c = null; var importer = new TableInfoImporter(BasicActivator.RepositoryLocator.CatalogueRepository, _table); importer.DoImport(out TableInfo ti, out ColumnInfo[] cis); BasicActivator.Show($"Successfully imported new TableInfo { ti.Name} with ID {ti.ID}"); if (_createCatalogue) { var forwardEngineer = new ForwardEngineerCatalogue(ti, cis, true); forwardEngineer.ExecuteForwardEngineering(out c, out _, out _); BasicActivator.Show($"Successfully imported new Catalogue { c.Name} with ID {c.ID}"); } Publish((DatabaseEntity)c ?? ti); }
public override void Execute() { base.Execute(); ICatalogue c = null; ITableInfoImporter importer; DiscoveredTable t; t = _table ?? SelectTable(false, "Select table to import"); if (t == null) { return; } //if it isn't a table valued function if (t is DiscoveredTableValuedFunction) { importer = new TableValuedFunctionImporter(BasicActivator.RepositoryLocator.CatalogueRepository, (DiscoveredTableValuedFunction)t); } else { importer = new TableInfoImporter(BasicActivator.RepositoryLocator.CatalogueRepository, t); } importer.DoImport(out var ti, out ColumnInfo[] cis); BasicActivator.Show($"Successfully imported new TableInfo { ti.Name} with ID {ti.ID}"); if (_createCatalogue) { var forwardEngineer = new ForwardEngineerCatalogue(ti, cis, true); forwardEngineer.ExecuteForwardEngineering(out c, out _, out _); BasicActivator.Show($"Successfully imported new Catalogue { c.Name} with ID {c.ID}"); } Publish((IMapsDirectlyToDatabaseTable)c ?? ti); }
private Catalogue ImportCatalogue(TableInfo ti) { var forwardEngineer = new ForwardEngineerCatalogue(ti, ti.ColumnInfos, true); forwardEngineer.ExecuteForwardEngineering(out Catalogue cata, out _, out ExtractionInformation[] eis); //get descriptions of the columns from BadMedicine var desc = new Descriptions(); cata.Description = Trim(desc.Get(cata.Name)); if (cata.Description != null) { cata.SaveToDatabase(); foreach (var ci in cata.CatalogueItems) { var ciDescription = Trim(desc.Get(cata.Name, ci.Name)); if (ciDescription != null) { ci.Description = ciDescription; ci.SaveToDatabase(); } } } var chi = eis.SingleOrDefault(e => e.GetRuntimeName().Equals("chi", StringComparison.CurrentCultureIgnoreCase)); if (chi != null) { chi.IsExtractionIdentifier = true; chi.SaveToDatabase(); var eds = new ExtractableDataSet(_repos.DataExportRepository, cata); } return(cata); }
private void CreateANormalCatalogue() { var svr = DiscoveredDatabaseICanCreateRandomTablesIn.Server; using (var con = svr.GetConnection()) { con.Open(); svr.GetCommand("CREATE TABLE NonTVFTable ( chi varchar(10))", con).ExecuteNonQuery(); svr.GetCommand("INSERT INTO NonTVFTable VALUES ('0101010101')", con).ExecuteNonQuery(); svr.GetCommand("INSERT INTO NonTVFTable VALUES ('0202020202')", con).ExecuteNonQuery(); svr.GetCommand("INSERT INTO NonTVFTable VALUES ('0303030303')", con).ExecuteNonQuery(); } var importer = new TableInfoImporter(CatalogueRepository, svr.Name, DiscoveredDatabaseICanCreateRandomTablesIn.GetRuntimeName(), "NonTVFTable", DatabaseType.MicrosoftSQLServer); TableInfo tbl; ColumnInfo[] cols; importer.DoImport(out tbl, out cols); var engineer = new ForwardEngineerCatalogue(tbl, cols, true); Catalogue cata; CatalogueItem[] cis; ExtractionInformation[] eis; engineer.ExecuteForwardEngineering(out cata, out cis, out eis); _nonTvfExtractionIdentifier = eis.Single(); _nonTvfExtractionIdentifier.IsExtractionIdentifier = true; _nonTvfExtractionIdentifier.SaveToDatabase(); _nonTvfCatalogue = cata; _nonTvfTableInfo = tbl; }
public void CreateANOVersion_TestSkippingTables(bool tableInfoAlreadyExistsForSkippedTable, bool putPlanThroughSerialization) { var dbFrom = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(TestDatabaseNames.GetConsistentName("CreateANOVersion_TestSkippingTables_From")); var dbTo = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(TestDatabaseNames.GetConsistentName("CreateANOVersion_TestSkippingTables_To")); dbFrom.Create(true); dbTo.Create(true); try { var tblFromHeads = dbFrom.CreateTable("Heads", new[] { new DatabaseColumnRequest("SkullColor", "varchar(10)"), new DatabaseColumnRequest("Vertebrae", "varchar(25)") }); var cols = new[] { new DatabaseColumnRequest("SpineColor", "varchar(10)"), new DatabaseColumnRequest("Vertebrae", "varchar(25)") }; var tblFromNeck = dbFrom.CreateTable("Necks", cols); //Necks table already exists in the destination so will be skipped for migration but still needs to be imported var tblToNeck = dbTo.CreateTable("Necks", cols); TableInfo fromHeadsTableInfo; ColumnInfo[] fromHeadsColumnInfo; TableInfo fromNeckTableInfo; ColumnInfo[] fromNeckColumnInfo; TableInfo toNecksTableInfo = null; ColumnInfo[] toNecksColumnInfo = null; TableInfoImporter i1 = new TableInfoImporter(CatalogueRepository, tblFromHeads); i1.DoImport(out fromHeadsTableInfo, out fromHeadsColumnInfo); TableInfoImporter i2 = new TableInfoImporter(CatalogueRepository, tblFromNeck); i2.DoImport(out fromNeckTableInfo, out fromNeckColumnInfo); //Table already exists but does the in Catalogue reference exist? if (tableInfoAlreadyExistsForSkippedTable) { TableInfoImporter i3 = new TableInfoImporter(CatalogueRepository, tblToNeck); i3.DoImport(out toNecksTableInfo, out toNecksColumnInfo); } //Create a JoinInfo so the query builder knows how to connect the tables new JoinInfo(CatalogueRepository, fromHeadsColumnInfo.Single(c => c.GetRuntimeName().Equals("Vertebrae")), fromNeckColumnInfo.Single(c => c.GetRuntimeName().Equals("Vertebrae")), ExtractionJoinType.Inner, null ); var cataEngineer = new ForwardEngineerCatalogue(fromHeadsTableInfo, fromHeadsColumnInfo, true); Catalogue cata; CatalogueItem[] cataItems; ExtractionInformation[] extractionInformations; cataEngineer.ExecuteForwardEngineering(out cata, out cataItems, out extractionInformations); var cataEngineer2 = new ForwardEngineerCatalogue(fromNeckTableInfo, fromNeckColumnInfo, true); cataEngineer2.ExecuteForwardEngineering(cata); //4 extraction informations in from Catalogue (2 from Heads and 2 from Necks) Assert.AreEqual(cata.GetAllExtractionInformation(ExtractionCategory.Any).Count(), 4); //setup ANOTable on head var anoTable = new ANOTable(CatalogueRepository, ANOStore_ExternalDatabaseServer, "ANOSkullColor", "C"); anoTable.NumberOfCharactersToUseInAnonymousRepresentation = 10; anoTable.SaveToDatabase(); anoTable.PushToANOServerAsNewTable("varchar(10)", new ThrowImmediatelyCheckNotifier()); //////////////////The actual test!///////////////// var planManager = new ForwardEngineerANOCataloguePlanManager(RepositoryLocator, cata); //ano the table SkullColor var scPlan = planManager.GetPlanForColumnInfo(fromHeadsColumnInfo.Single(col => col.GetRuntimeName().Equals("SkullColor"))); scPlan.ANOTable = anoTable; scPlan.Plan = Plan.ANO; if (putPlanThroughSerialization) { var asString = JsonConvertExtensions.SerializeObject(planManager, RepositoryLocator); planManager = (ForwardEngineerANOCataloguePlanManager)JsonConvertExtensions.DeserializeObject(asString, typeof(ForwardEngineerANOCataloguePlanManager), RepositoryLocator); } //not part of serialization planManager.TargetDatabase = dbTo; planManager.SkippedTables.Add(fromNeckTableInfo);//skip the necks table because it already exists (ColumnInfos may or may not exist but physical table definetly does) var engine = new ForwardEngineerANOCatalogueEngine(RepositoryLocator, planManager); if (!tableInfoAlreadyExistsForSkippedTable) { var ex = Assert.Throws <Exception>(engine.Execute); Assert.IsTrue(Regex.IsMatch(ex.InnerException.Message, "Found '0' ColumnInfos called")); Assert.IsTrue(Regex.IsMatch(ex.InnerException.Message, "[Necks].[SpineColor]")); return; } else { engine.Execute(); } var newCata = CatalogueRepository.GetAllObjects <Catalogue>().Single(c => c.Name.Equals("ANOHeads")); Assert.IsTrue(newCata.Exists()); var newCataItems = newCata.CatalogueItems; Assert.AreEqual(newCataItems.Count(), 4); //should be extraction informations //all extraction informations should point to the new table location Assert.IsTrue(newCataItems.All(ci => ci.ExtractionInformation.SelectSQL.Contains(dbTo.GetRuntimeName()))); //these columns should all exist Assert.IsTrue(newCataItems.Any(ci => ci.ExtractionInformation.SelectSQL.Contains("SkullColor"))); Assert.IsTrue(newCataItems.Any(ci => ci.ExtractionInformation.SelectSQL.Contains("SpineColor"))); Assert.IsTrue(newCataItems.Any(ci => ci.ExtractionInformation.SelectSQL.Contains("Vertebrae"))); //actually there will be 2 copies of this one from Necks one from Heads //new ColumnInfo should have a reference to the anotable Assert.IsTrue(newCataItems.Single(ci => ci.Name.Equals("ANOSkullColor")).ColumnInfo.ANOTable_ID == anoTable.ID); var newSpineColorColumnInfo = newCataItems.Single(ci => ci.Name.Equals("ANOSkullColor")).ColumnInfo; //table info already existed, make sure the new CatalogueItems point to the same columninfos / table infos Assert.IsTrue(newCataItems.Select(ci => ci.ColumnInfo).Contains(newSpineColorColumnInfo)); } finally { dbFrom.Drop(); dbTo.Drop(); } }
public void Test_DatabaseTypeQueryWithParameter_IntParameter(DatabaseType dbType) { //Pick the destination server var tableName = TestDatabaseNames.GetConsistentName("tbl"); //make sure there's a database ready to receive the data var db = GetCleanedServer(dbType); db.Create(true); //this is the table we are uploading var dt = new DataTable(); dt.Columns.Add("numbercol"); dt.Rows.Add(10); dt.Rows.Add(15); dt.Rows.Add(20); dt.Rows.Add(25); dt.TableName = tableName; try { ///////////////////////UPLOAD THE DataTable TO THE DESTINATION//////////////////////////////////////////// var uploader = new DataTableUploadDestination(); uploader.PreInitialize(db, new ThrowImmediatelyDataLoadJob()); uploader.ProcessPipelineData(dt, new ThrowImmediatelyDataLoadJob(), new GracefulCancellationToken()); uploader.Dispose(new ThrowImmediatelyDataLoadJob(), null); var tbl = db.ExpectTable(tableName); var importer = new TableInfoImporter(CatalogueRepository, tbl); importer.DoImport(out var ti, out var ci); var engineer = new ForwardEngineerCatalogue(ti, ci, true); engineer.ExecuteForwardEngineering(out var cata, out var cis, out var ei); ///////////////////////////////////////////////////////////////////////////////////////////////////////// /////////////////////////////////THE ACTUAL PROPER TEST//////////////////////////////////// //create an extraction filter var extractionInformation = ei.Single(); var filter = new ExtractionFilter(CatalogueRepository, "Filter by numbers", extractionInformation); filter.WhereSQL = extractionInformation.SelectSQL + " = @n"; filter.SaveToDatabase(); //create the parameters for filter (no globals, masters or scope adjacent parameters) new ParameterCreator(filter.GetFilterFactory(), null, null).CreateAll(filter, null); var p = filter.GetAllParameters().Single(); Assert.AreEqual("@n", p.ParameterName); p.ParameterSQL = p.ParameterSQL.Replace("varchar(50)", "int"); //make it int p.Value = "20"; p.SaveToDatabase(); var qb = new QueryBuilder(null, null); qb.AddColumn(extractionInformation); qb.RootFilterContainer = new SpontaneouslyInventedFilterContainer(new MemoryCatalogueRepository(), null, new[] { filter }, FilterContainerOperation.AND); using (var con = db.Server.GetConnection()) { con.Open(); string sql = qb.SQL; var cmd = db.Server.GetCommand(sql, con); var r = cmd.ExecuteReader(); Assert.IsTrue(r.Read()); Assert.AreEqual( 20, r[extractionInformation.GetRuntimeName()]); } /////////////////////////////////////////////////////////////////////////////////////// } finally { db.Drop(); } }
private void Initialize(IActivateItems activator, string initialDescription, Project projectSpecificIfAny) { CommonFunctionality.SetItemActivator(activator); var cols = _tableInfo.ColumnInfos; var forwardEngineer = new ForwardEngineerCatalogue(_tableInfo, cols, false); ExtractionInformation[] eis; forwardEngineer.ExecuteForwardEngineering(out _catalogue, out _catalogueItems, out eis); tbDescription.Text = initialDescription + " (" + Environment.UserName + " - " + DateTime.Now + ")"; tbTableName.Text = _tableInfo.Name; _catalogue.SaveToDatabase(); objectSaverButton1.SetupFor(this, _catalogue, activator); if (_binder == null) { _binder = new BinderWithErrorProviderFactory(activator); _binder.Bind(tbCatalogueName, "Text", _catalogue, "Name", false, DataSourceUpdateMode.OnPropertyChanged, c => c.Name); _binder.Bind(tbAcronym, "Text", _catalogue, "Acronym", false, DataSourceUpdateMode.OnPropertyChanged, c => c.Acronym); _binder.Bind(tbDescription, "Text", _catalogue, "Description", false, DataSourceUpdateMode.OnPropertyChanged, c => c.Description); } //Every CatalogueItem is either mapped to a ColumnInfo (not extractable) or a ExtractionInformation (extractable). To start out with they are not extractable foreach (CatalogueItem ci in _catalogueItems) { olvColumnExtractability.AddObject(new ColPair(ci, cols.Single(col => ci.ColumnInfo_ID == col.ID))); } _extractionCategories = new object[] { NotExtractable, ExtractionCategory.Core, ExtractionCategory.Supplemental, ExtractionCategory.SpecialApprovalRequired, ExtractionCategory.Internal, ExtractionCategory.Deprecated }; ddCategoriseMany.Items.AddRange(_extractionCategories); olvExtractionCategory.AspectGetter += ExtractionCategoryAspectGetter; olvColumnExtractability.AlwaysGroupByColumn = olvExtractionCategory; olvColumnExtractability.CellEditStarting += TlvColumnExtractabilityOnCellEditStarting; olvColumnExtractability.CellEditFinishing += TlvColumnExtractabilityOnCellEditFinishing; olvColumnExtractability.CellEditActivation = ObjectListView.CellEditActivateMode.SingleClick; olvIsExtractionIdentifier.AspectPutter += IsExtractionIdentifier_AspectPutter; olvIsExtractionIdentifier.AspectGetter += IsExtractionIdentifier_AspectGetter; olvColumnInfoName.ImageGetter = ImageGetter; olvColumnExtractability.RebuildColumns(); if (Activator.RepositoryLocator.DataExportRepository == null) { gbProjectSpecific.Enabled = false; } else { SelectProject(projectSpecificIfAny); pbProject.Image = activator.CoreIconProvider.GetImage(RDMPConcept.Project); } ddIsExtractionIdentifier.Items.Add("<<None>>"); ddIsExtractionIdentifier.Items.AddRange(olvColumnExtractability.Objects.OfType <ColPair>().ToArray()); CommonFunctionality.AddHelp(btnPickProject, "IExtractableDataSet.Project_ID", "Project Specific Datasets"); CommonFunctionality.AddHelpString(btnAddToExisting, "Add to existing catalogue", "Use this option if you want to create a Catalogue which extracts from multiple tables (via a JOIN). Once used you will still need to configure a JoinInfo between column(s) in all the tables the Catalogue draws data from."); }
public void CohortIdentificationConfiguration_Join_PatientIndexTable() { DataTable header = new DataTable(); header.Columns.Add("ID"); header.Columns.Add("Chi"); header.Columns.Add("Age"); header.Columns.Add("Date"); header.Columns.Add("Healthboard"); header.PrimaryKey = new [] { header.Columns["ID"] }; header.Rows.Add("1", "0101010101", 50, new DateTime(2001, 1, 1), "T"); header.Rows.Add("2", "0202020202", 50, new DateTime(2002, 2, 2), "T"); var hTbl = From.CreateTable("header", header); var cata = Import(hTbl, out TableInfo hTi, out _); cata.Name = "My Combo Join Catalogue"; cata.SaveToDatabase(); var scripter = new MasterDatabaseScriptExecutor(To); var patcher = new QueryCachingPatcher(); scripter.CreateAndPatchDatabase(patcher, new AcceptAllCheckNotifier()); var edsCache = new ExternalDatabaseServer(CatalogueRepository, "Cache", new QueryCachingPatcher()); edsCache.SetProperties(To); DataTable results = new DataTable(); results.Columns.Add("Header_ID"); results.Columns.Add("TestCode"); results.Columns.Add("Result"); results.Rows.Add("1", "HBA1C", 50); results.Rows.Add("1", "ECOM", "Hi fellas"); results.Rows.Add("1", "ALB", 100); results.Rows.Add("2", "ALB", 50); var rTbl = From.CreateTable("results", results); var importer = new TableInfoImporter(CatalogueRepository, rTbl); importer.DoImport(out TableInfo rTi, out ColumnInfo[] rColInfos); var fe = new ForwardEngineerCatalogue(rTi, rColInfos, true); fe.ExecuteForwardEngineering(cata); //Should now be 1 Catalogue with all the columns (tables will have to be joined to build the query though) Assert.AreEqual(8, cata.GetAllExtractionInformation(ExtractionCategory.Core).Length); var ji = new JoinInfo(CatalogueRepository, rTi.ColumnInfos.Single(ci => ci.GetRuntimeName().Equals("Header_ID", StringComparison.CurrentCultureIgnoreCase)), hTi.ColumnInfos.Single(ci => ci.GetRuntimeName().Equals("ID", StringComparison.CurrentCultureIgnoreCase)), ExtractionJoinType.Right, null ); //setup a cic that uses the cache var cic = new CohortIdentificationConfiguration(CatalogueRepository, "MyCic"); cic.CreateRootContainerIfNotExists(); cic.QueryCachingServer_ID = edsCache.ID; cic.SaveToDatabase(); //create a patient index table that shows all the times that they had a test in any HB (with the HB being part of the result set) var acPatIndex = new AggregateConfiguration(CatalogueRepository, cata, "My PatIndes"); var eiChi = cata.GetAllExtractionInformation(ExtractionCategory.Core).Single(ei => ei.GetRuntimeName().Equals("Chi")); eiChi.IsExtractionIdentifier = true; acPatIndex.CountSQL = null; eiChi.SaveToDatabase(); acPatIndex.AddDimension(eiChi); acPatIndex.AddDimension(cata.GetAllExtractionInformation(ExtractionCategory.Core).Single(ei => ei.GetRuntimeName().Equals("Date"))); acPatIndex.AddDimension(cata.GetAllExtractionInformation(ExtractionCategory.Core).Single(ei => ei.GetRuntimeName().Equals("Healthboard"))); cic.EnsureNamingConvention(acPatIndex); var joinable = new JoinableCohortAggregateConfiguration(CatalogueRepository, cic, acPatIndex); Assert.IsTrue(acPatIndex.IsCohortIdentificationAggregate); Assert.IsTrue(acPatIndex.IsJoinablePatientIndexTable()); var compiler = new CohortCompiler(cic); var runner = new CohortCompilerRunner(compiler, 50); var cancellation = new System.Threading.CancellationToken(); runner.Run(cancellation); //they should not be executing and should be completed Assert.IsFalse(compiler.Tasks.Any(t => t.Value.IsExecuting)); Assert.AreEqual(Phase.Finished, runner.ExecutionPhase); var manager = new CachedAggregateConfigurationResultsManager(edsCache); var cacheTableName = manager.GetLatestResultsTableUnsafe(acPatIndex, AggregateOperation.JoinableInceptionQuery); Assert.IsNotNull(cacheTableName, "No results were cached!"); var cacheTable = To.ExpectTable(cacheTableName.GetRuntimeName()); //chi, Date and TestCode Assert.AreEqual(3, cacheTable.DiscoverColumns().Length); //healthboard should be a string Assert.AreEqual(typeof(string), cacheTable.DiscoverColumn("Healthboard").DataType.GetCSharpDataType()); /* Query Cache contains this: * * Chi Date Healthboard * 0101010101 2001-01-01 00:00:00.0000000 T * 0202020202 2002-02-02 00:00:00.0000000 T */ Assert.AreEqual(2, cacheTable.GetRowCount()); //Now we could add a new AggregateConfiguration that uses the joinable! }
protected override void SetUp() { base.SetUp(); string sql = @"CREATE TABLE [dbo].[Tests]( [chi] [varchar](10) NULL, [Date] [datetime] NULL, [hb_extract] [varchar](1) NULL, [TestId] [int] NOT NULL, CONSTRAINT [PK_Tests] PRIMARY KEY CLUSTERED ( [TestId] ASC ) ) GO CREATE TABLE [dbo].[Results]( [TestId] [int] NOT NULL, [Measure] [varchar](10) NOT NULL, [Value] [int] NULL, CONSTRAINT [PK_Results] PRIMARY KEY CLUSTERED ( [TestId] ASC, [Measure] ASC ) ) GO ALTER TABLE [dbo].[Results] WITH CHECK ADD CONSTRAINT [FK_Results_Tests] FOREIGN KEY([TestId]) REFERENCES [dbo].[Tests] ([TestId]) GO"; var server = From.Server; using (var con = server.GetConnection()) { con.Open(); UsefulStuff.ExecuteBatchNonQuery(sql, con); } var importer1 = new TableInfoImporter(CatalogueRepository, From.ExpectTable("Tests")); var importer2 = new TableInfoImporter(CatalogueRepository, From.ExpectTable("Results")); importer1.DoImport(out t1, out c1); importer2.DoImport(out t2, out c2); var engineer1 = new ForwardEngineerCatalogue(t1, c1, true); var engineer2 = new ForwardEngineerCatalogue(t2, c2, true); engineer1.ExecuteForwardEngineering(out cata1, out cataItems1, out eis1); engineer2.ExecuteForwardEngineering(out cata2, out cataItems2, out eis2); new JoinInfo(CatalogueRepository, c1.Single(e => e.GetRuntimeName().Equals("TestId")), c2.Single(e => e.GetRuntimeName().Equals("TestId")), ExtractionJoinType.Left, null); _anoTable = new ANOTable(CatalogueRepository, ANOStore_ExternalDatabaseServer, "ANOTes", "T"); _anoTable.NumberOfCharactersToUseInAnonymousRepresentation = 10; _anoTable.SaveToDatabase(); _anoTable.PushToANOServerAsNewTable("int", new ThrowImmediatelyCheckNotifier()); _comboCata = new Catalogue(CatalogueRepository, "Combo Catalogue"); //pk var ciTestId = new CatalogueItem(CatalogueRepository, _comboCata, "TestId"); var colTestId = c1.Single(c => c.GetRuntimeName().Equals("TestId")); ciTestId.ColumnInfo_ID = colTestId.ID; ciTestId.SaveToDatabase(); var eiTestId = new ExtractionInformation(CatalogueRepository, ciTestId, colTestId, colTestId.Name); //Measure var ciMeasure = new CatalogueItem(CatalogueRepository, _comboCata, "Measuree"); var colMeasure = c2.Single(c => c.GetRuntimeName().Equals("Measure")); ciMeasure.ColumnInfo_ID = colMeasure.ID; ciMeasure.SaveToDatabase(); var eiMeasure = new ExtractionInformation(CatalogueRepository, ciMeasure, colMeasure, colMeasure.Name); //Date var ciDate = new CatalogueItem(CatalogueRepository, _comboCata, "Dat"); var colDate = c1.Single(c => c.GetRuntimeName().Equals("Date")); ciDate.ColumnInfo_ID = colDate.ID; ciDate.SaveToDatabase(); var eiDate = new ExtractionInformation(CatalogueRepository, ciDate, colDate, colDate.Name); _destinationDatabase = To; }
public void TestAddTag_WithArchive(DatabaseType type) { var db = GetCleanedServer(type); // Create a nice template with lots of columns var template = new ImageTableTemplate(); template.TableName = "Fish"; template.Columns = new[] { new ImageColumnTemplate { IsPrimaryKey = true, AllowNulls = true, ColumnName = "RelativeFileArchiveURI" }, new ImageColumnTemplate { IsPrimaryKey = false, AllowNulls = true, ColumnName = "SeriesInstanceUID" }, new ImageColumnTemplate { IsPrimaryKey = false, AllowNulls = true, ColumnName = "StudyDate" }, }; // use it to create a table var tbl = db.ExpectTable(template.TableName); IAtomicCommand cmd = new ExecuteCommandCreateNewImagingDataset(RepositoryLocator, tbl, template); Assert.IsFalse(cmd.IsImpossible); cmd.Execute(); Assert.IsTrue(tbl.Exists()); // import RDMP reference to the table var importer = new TableInfoImporter(CatalogueRepository, tbl); importer.DoImport(out TableInfo ti, out ColumnInfo[] cols); var forward = new ForwardEngineerCatalogue(ti, cols); forward.ExecuteForwardEngineering(out Catalogue catalogue, out _, out _); // Create an archive table and backup trigger like we would have if this were the target of a data load var triggerImplementerFactory = new TriggerImplementerFactory(type); var implementer = triggerImplementerFactory.Create(tbl); implementer.CreateTrigger(new ThrowImmediatelyCheckNotifier()); var archive = tbl.Database.ExpectTable(tbl.GetRuntimeName() + "_Archive"); Assert.IsTrue(archive.Exists()); var activator = new ConsoleInputManager(RepositoryLocator, new ThrowImmediatelyCheckNotifier()) { DisallowInput = true }; // Test the actual commands cmd = new ExecuteCommandAddTag(activator, catalogue, "ffffff", "int"); Assert.IsFalse(cmd.IsImpossible, cmd.ReasonCommandImpossible); cmd.Execute(); cmd = new ExecuteCommandAddTag(activator, catalogue, "EchoTime", null); Assert.IsFalse(cmd.IsImpossible, cmd.ReasonCommandImpossible); cmd.Execute(); var ex = Assert.Throws <Exception>(() => new ExecuteCommandAddTag(activator, catalogue, "StudyDate", null).Execute()); StringAssert.StartsWith("Failed check with message: There is already a column called 'StudyDate' in TableInfo ", ex.Message); cmd = new ExecuteCommandAddTag(activator, catalogue, "SeriesDate", null); Assert.IsFalse(cmd.IsImpossible, cmd.ReasonCommandImpossible); cmd.Execute(); Assert.AreEqual("int", tbl.DiscoverColumn("ffffff").DataType.SQLType); Assert.AreEqual("decimal(38,19)", tbl.DiscoverColumn("EchoTime").DataType.SQLType); Assert.AreEqual(typeof(DateTime), tbl.DiscoverColumn("SeriesDate").DataType.GetCSharpDataType()); Assert.AreEqual("int", archive.DiscoverColumn("ffffff").DataType.SQLType); Assert.AreEqual("decimal(38,19)", archive.DiscoverColumn("EchoTime").DataType.SQLType); Assert.AreEqual(typeof(DateTime), archive.DiscoverColumn("SeriesDate").DataType.GetCSharpDataType()); }