public override void Dispose(IDataLoadEventListener listener, Exception pipelineFailureExceptionIfAny) { listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "About to release Semaphore OneCrossServerExtractionAtATime")); OneCrossServerExtractionAtATime.Release(1); listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Released Semaphore OneCrossServerExtractionAtATime")); if (_hadToCreate) { //we created the db in the first place listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "About to drop database '" + _tempDb + "'")); _tempDb.Drop(); listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Dropped database '" + _tempDb + "' Successfully")); } else { //we did not create the database but we did create the table foreach (DiscoveredTable table in tablesToCleanup) { listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "About to drop table '" + table + "'")); table.Drop(); listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Dropped table '" + table + "'")); } } base.Dispose(listener, pipelineFailureExceptionIfAny); }
public void DropDatabases() { if (_destinationDatabase.Exists()) { _destinationDatabase.Drop(); } }
public void DestroyStagingIfExists() { if (_stagingDatabase != null && _stagingDatabase.Exists()) { _stagingDatabase.Drop(); } }
public void AfterEachTest() { _stagingDatabase.Drop(); _liveDatabase.Drop(); CleanCatalogueDatabase(); }
private void CreateScratchArea() { var scratchDatabaseName = TestDatabaseNames.GetConsistentName("ScratchArea"); DiscoveredDatabaseICanCreateRandomTablesIn = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(scratchDatabaseName); //if it already exists drop it if (DiscoveredDatabaseICanCreateRandomTablesIn.Exists()) { DiscoveredDatabaseICanCreateRandomTablesIn.Drop(); } //create it DiscoveredServerICanCreateRandomDatabasesAndTablesOn.CreateDatabase(scratchDatabaseName); }
public void Setup() { DiscoveredQueryCachingDatabase = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(QueryCachingDatabaseName); if (DiscoveredQueryCachingDatabase.Exists()) { DiscoveredQueryCachingDatabase.Drop(); } MasterDatabaseScriptExecutor scripter = new MasterDatabaseScriptExecutor(DiscoveredQueryCachingDatabase); var p = new QueryCachingPatcher(); scripter.CreateAndPatchDatabase(p, new ThrowImmediatelyCheckNotifier()); QueryCachingDatabaseServer = new ExternalDatabaseServer(CatalogueRepository, QueryCachingDatabaseName, p); QueryCachingDatabaseServer.SetProperties(DiscoveredQueryCachingDatabase); }
public void Setup_IdentifierDump() { IdentifierDump_Database = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(IdentifierDump_DatabaseName); if (IdentifierDump_Database.Exists()) { IdentifierDump_Database.Drop(); } var scriptCreate = new MasterDatabaseScriptExecutor(IdentifierDump_Database); var p = new IdentifierDumpDatabasePatcher(); scriptCreate.CreateAndPatchDatabase(p, new ThrowImmediatelyCheckNotifier()); //now create a new reference! IdentifierDump_ExternalDatabaseServer = new ExternalDatabaseServer(CatalogueRepository, IdentifierDump_DatabaseName, p); IdentifierDump_ExternalDatabaseServer.SetProperties(IdentifierDump_Database); CatalogueRepository.GetServerDefaults().SetDefault(PermissableDefaults.IdentifierDumpServer_ID, IdentifierDump_ExternalDatabaseServer); }
public void EndToEndTest() { var cohortDatabaseNameWillBe = TestDatabaseNames.GetConsistentName("TbvCohort"); _discoveredCohortDatabase = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(cohortDatabaseNameWillBe); //cleanup if (_discoveredCohortDatabase.Exists()) { _discoveredCohortDatabase.Drop(); } //create a normal catalogue CreateANormalCatalogue(); //create a cohort database using wizard CreateNewCohortDatabaseWizard cohortDatabaseWizard = new CreateNewCohortDatabaseWizard(_discoveredCohortDatabase, CatalogueRepository, DataExportRepository, false); _externalCohortTable = cohortDatabaseWizard.CreateDatabase( new PrivateIdentifierPrototype(_nonTvfExtractionIdentifier) , new ThrowImmediatelyCheckNotifier()); //create a table valued function CreateTvfCatalogue(cohortDatabaseNameWillBe); //Test 1 TestThatQueryBuilderWithoutParametersBeingSetThrowsQueryBuildingException(); PopulateCohortDatabaseWithRecordsFromNonTvfCatalogue(); //Test 2 TestWithParameterValueThatRowsAreReturned(); //Test 3 TestUsingTvfForAggregates(); //Test 4 TestAddingTvfToCIC(); //Test 5 TestDataExportOfTvf(); //tear down DataExportRepository.GetAllObjects <ExtractableCohort>().Single().DeleteInDatabase(); _externalCohortTable.DeleteInDatabase(); _database.ExpectTable("NonTVFTable").Drop(); _database.ExpectTableValuedFunction("GetTopXRandom").Drop(); //delete global parameter ((AnyTableSqlParameter)_aggregate.GetAllParameters().Single()).DeleteInDatabase(); //delete aggregate _aggregate.DeleteInDatabase(); ((AnyTableSqlParameter)_cicAggregate.GetAllParameters().Single()).DeleteInDatabase(); //delete aggregate _cicAggregate.DeleteInDatabase(); //get rid of the cohort identification configuration _cic.DeleteInDatabase(); _pipe.DeleteInDatabase(); //get rid of the cohort database _discoveredCohortDatabase.Drop(); _nonTvfCatalogue.DeleteInDatabase(); _nonTvfTableInfo.DeleteInDatabase(); _tvfCatalogue.DeleteInDatabase(); _tvfTableInfo.DeleteInDatabase(); }
public void RefreshCohort_WithCaching() { ExtractionPipelineUseCase useCase; IExecuteDatasetExtractionDestination results; var pipe = new Pipeline(CatalogueRepository, "RefreshPipeWithCaching"); var source = new PipelineComponent(CatalogueRepository, pipe, typeof(CohortIdentificationConfigurationSource), 0); var args = source.CreateArgumentsForClassIfNotExists <CohortIdentificationConfigurationSource>(); var freezeArg = args.Single(a => a.Name.Equals("FreezeAfterSuccessfulImport")); freezeArg.SetValue(false); freezeArg.SaveToDatabase(); var dest = new PipelineComponent(CatalogueRepository, pipe, typeof(BasicCohortDestination), 0); var argsDest = dest.CreateArgumentsForClassIfNotExists <BasicCohortDestination>(); var allocatorArg = argsDest.Single(a => a.Name.Equals("ReleaseIdentifierAllocator")); allocatorArg.SetValue(null); allocatorArg.SaveToDatabase(); pipe.SourcePipelineComponent_ID = source.ID; pipe.DestinationPipelineComponent_ID = dest.ID; pipe.SaveToDatabase(); Execute(out useCase, out results); var oldcohort = _configuration.Cohort; //Create a query cache var p = new QueryCachingPatcher(); ExternalDatabaseServer queryCacheServer = new ExternalDatabaseServer(CatalogueRepository, "TestCohortRefreshing_CacheTest", p); DiscoveredDatabase cachedb = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase("TestCohortRefreshing_CacheTest"); if (cachedb.Exists()) { cachedb.Drop(); } new MasterDatabaseScriptExecutor(cachedb).CreateAndPatchDatabase(p, new ThrowImmediatelyCheckNotifier()); queryCacheServer.SetProperties(cachedb); //Create a Cohort Identification configuration (query) that will identify the cohort CohortIdentificationConfiguration cic = new CohortIdentificationConfiguration(RepositoryLocator.CatalogueRepository, "RefreshCohort.cs");; try { //make it use the cache cic.QueryCachingServer_ID = queryCacheServer.ID; cic.SaveToDatabase(); //give it a single table query to fetch distinct chi from test data var agg = cic.CreateNewEmptyConfigurationForCatalogue(_catalogue, null); //add the sub query as the only entry in the cic (in the root container) cic.CreateRootContainerIfNotExists(); cic.RootCohortAggregateContainer.AddChild(agg, 1); //make the ExtractionConfiguration refresh cohort query be the cic _configuration.CohortIdentificationConfiguration_ID = cic.ID; _configuration.CohortRefreshPipeline_ID = pipe.ID; _configuration.SaveToDatabase(); //get a refreshing engine var engine = new CohortRefreshEngine(new ThrowImmediatelyDataLoadEventListener(), _configuration); engine.Execute(); Assert.NotNull(engine.Request.NewCohortDefinition); var oldData = oldcohort.GetExternalData(); Assert.AreEqual(oldData.ExternalDescription, engine.Request.NewCohortDefinition.Description); Assert.AreEqual(oldData.ExternalVersion + 1, engine.Request.NewCohortDefinition.Version); Assert.AreNotEqual(oldcohort.CountDistinct, engine.Request.CohortCreatedIfAny.CountDistinct); //now nuke all data in the catalogue so the cic returns nobody (except that the identifiers are cached eh?) DataAccessPortal.GetInstance().ExpectDatabase(_tableInfo, DataAccessContext.InternalDataProcessing).ExpectTable(_tableInfo.GetRuntimeName()).Truncate(); var toMem = new ToMemoryDataLoadEventListener(false); //get a new engine engine = new CohortRefreshEngine(toMem, _configuration); //execute it var ex = Assert.Throws <Exception>(() => engine.Execute()); Assert.IsTrue(ex.InnerException.InnerException.Message.Contains("CohortIdentificationCriteria execution resulted in an empty dataset")); //expected this message to happen //that it did clear the cache Assert.AreEqual(1, toMem.EventsReceivedBySender.SelectMany(kvp => kvp.Value).Count(msg => msg.Message.Equals("Clearing Cohort Identifier Cache"))); } finally { //make the ExtractionConfiguration not use the cic query _configuration.CohortRefreshPipeline_ID = null; _configuration.CohortIdentificationConfiguration_ID = null; _configuration.SaveToDatabase(); //delete the cic query cic.QueryCachingServer_ID = null; cic.SaveToDatabase(); cic.DeleteInDatabase(); //delete the caching database queryCacheServer.DeleteInDatabase(); cachedb.Drop(); } }
public void SQLServerDestination() { DiscoveredDatabase dbToExtractTo = null; var ci = new CatalogueItem(CatalogueRepository, _catalogue, "YearOfBirth"); var columnToTransform = _columnInfos.Single(c => c.GetRuntimeName().Equals("DateOfBirth", StringComparison.CurrentCultureIgnoreCase)); string transform = "YEAR(" + columnToTransform.Name + ")"; var ei = new ExtractionInformation(CatalogueRepository, ci, columnToTransform, transform); ei.Alias = "YearOfBirth"; ei.ExtractionCategory = ExtractionCategory.Core; ei.SaveToDatabase(); //make it part of the ExtractionConfiguration var newColumn = new ExtractableColumn(DataExportRepository, _selectedDataSet.ExtractableDataSet, (ExtractionConfiguration)_selectedDataSet.ExtractionConfiguration, ei, 0, ei.SelectSQL); newColumn.Alias = ei.Alias; newColumn.SaveToDatabase(); _extractableColumns.Add(newColumn); //recreate the extraction command so it gets updated with the new column too. _request = new ExtractDatasetCommand(_configuration, _extractableCohort, new ExtractableDatasetBundle(_extractableDataSet), _extractableColumns, new HICProjectSalt(_project), new ExtractionDirectory(@"C:\temp\", _configuration)); try { _configuration.Name = "ExecuteFullExtractionToDatabaseMSSqlDestinationTest"; _configuration.SaveToDatabase(); ExtractionPipelineUseCase execute; IExecuteDatasetExtractionDestination result; var dbname = TestDatabaseNames.GetConsistentName(_project.Name + "_" + _project.ProjectNumber); dbToExtractTo = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(dbname); if (dbToExtractTo.Exists()) { dbToExtractTo.Drop(); } base.Execute(out execute, out result); var destinationTable = dbToExtractTo.ExpectTable(_expectedTableName); Assert.IsTrue(destinationTable.Exists()); var dt = destinationTable.GetDataTable(); Assert.AreEqual(1, dt.Rows.Count); Assert.AreEqual(_cohortKeysGenerated[_cohortKeysGenerated.Keys.First()].Trim(), dt.Rows[0]["ReleaseID"]); Assert.AreEqual(new DateTime(2001, 1, 1), dt.Rows[0]["DateOfBirth"]); Assert.AreEqual(2001, dt.Rows[0]["YearOfBirth"]); Assert.AreEqual(columnToTransform.Data_type, destinationTable.DiscoverColumn("DateOfBirth").DataType.SQLType); Assert.AreEqual("int", destinationTable.DiscoverColumn("YearOfBirth").DataType.SQLType); } finally { if (_extractionServer != null) { _extractionServer.DeleteInDatabase(); } if (dbToExtractTo != null) { dbToExtractTo.Drop(); } } }
internal void Create(DiscoveredDatabase db, ICheckNotifier notifier, PlatformDatabaseCreationOptions options) { if (db.Exists()) { if (options.DropDatabases) { db.Drop(); } else { throw new Exception("Database " + db.GetRuntimeName() + " already exists and allowDrop option was not specified"); } } notifier.OnCheckPerformed(new CheckEventArgs("About to create " + db.GetRuntimeName(), CheckResult.Success)); //create a new database for the datasets db.Create(); notifier.OnCheckPerformed(new CheckEventArgs("Succesfully created " + db.GetRuntimeName(), CheckResult.Success)); //fixed seed so everyone gets the same datasets var r = new Random(options.Seed); notifier.OnCheckPerformed(new CheckEventArgs("Generating people", CheckResult.Success)); //people var people = new PersonCollection(); people.GeneratePeople(options.NumberOfPeople, r); //datasets var biochem = ImportCatalogue(Create <Biochemistry>(db, people, r, notifier, options.NumberOfRowsPerDataset, "chi", "Healthboard", "SampleDate", "TestCode")); var demography = ImportCatalogue(Create <Demography>(db, people, r, notifier, options.NumberOfRowsPerDataset, "chi", "dtCreated", "hb_extract")); var prescribing = ImportCatalogue(Create <Prescribing>(db, people, r, notifier, options.NumberOfRowsPerDataset, "chi", "PrescribedDate", "Name")); //<- this is slooo! var admissions = ImportCatalogue(Create <HospitalAdmissions>(db, people, r, notifier, options.NumberOfRowsPerDataset, "chi", "AdmissionDate")); var carotid = Create <CarotidArteryScan>(db, people, r, notifier, options.NumberOfRowsPerDataset, "RECORD_NUMBER"); //the following should not be extractable ForExtractionInformations(demography, e => e.DeleteInDatabase(), "chi_num_of_curr_record", "surname", "forename", "current_address_L1", "current_address_L2", "current_address_L3", "current_address_L4", "birth_surname", "previous_surname", "midname", "alt_forename", "other_initials", "previous_address_L1", "previous_address_L2", "previous_address_L3", "previous_address_L4", "previous_postcode", "date_address_changed", "adr", "previous_gp_accept_date", "hic_dataLoadRunID"); //the following should be special approval only ForExtractionInformations(demography, e => { e.ExtractionCategory = ExtractionCategory.SpecialApprovalRequired; e.SaveToDatabase(); }, "current_postcode", "current_gp", "previous_gp", "date_of_birth"); CreateAdmissionsViews(db); var vConditions = ImportCatalogue(db.ExpectTable("vConditions")); var vOperations = ImportCatalogue(db.ExpectTable("vOperations")); CreateGraph(biochem, "Test Codes", "TestCode", false, null); CreateGraph(biochem, "Test Codes By Date", "SampleDate", true, "TestCode"); CreateFilter(biochem, "Creatinine", "TestCode", "TestCode like '%CRE%'", @"Serum creatinine is a blood measurement. It is an indicator of renal health."); CreateFilter(biochem, "Test Code", "TestCode", "TestCode like @code", "Filters any test code set"); CreateExtractionInformation(demography, "Age", "date_of_birth", "FLOOR(DATEDIFF(DAY, date_of_birth, GETDATE()) / 365.25) As Age"); var fAge = CreateFilter(demography, "Older at least x years", "Age", "FLOOR(DATEDIFF(DAY, date_of_birth, GETDATE()) / 365.25) >= @age", "Patients age is greater than or equal to the provided @age"); SetParameter(fAge, "@age", "int", "16"); CreateGraph(demography, "Patient Ages", "Age", false, null); CreateGraph(prescribing, "Approved Name", "ApprovedName", false, null); CreateGraph(prescribing, "Approved Name Over Time", "PrescribedDate", true, "ApprovedName"); CreateGraph(prescribing, "Bnf", "FormattedBnfCode", false, null); CreateGraph(prescribing, "Bnf Over Time", "PrescribedDate", true, "FormattedBnfCode"); CreateFilter( CreateGraph(vConditions, "Conditions frequency", "Field", false, "Condition"), "Common Conditions Only", @"(Condition in (select top 40 Condition from vConditions c WHERE Condition <> 'NULL' AND Condition <> 'Nul' group by Condition order by count(*) desc))"); CreateFilter( CreateGraph(vOperations, "Operation frequency", "Field", false, "Operation"), "Common Operation Only", @"(Operation in (select top 40 Operation from vOperations c WHERE Operation <> 'NULL' AND Operation <> 'Nul' group by Operation order by count(*) desc))"); //group these all into the same folder admissions.Folder = new CatalogueFolder(admissions, @"\admissions"); admissions.SaveToDatabase(); vConditions.Folder = new CatalogueFolder(vConditions, @"\admissions"); vConditions.SaveToDatabase(); vOperations.Folder = new CatalogueFolder(vOperations, @"\admissions"); vOperations.SaveToDatabase(); //Create cohort store database var wizard = new CreateNewCohortDatabaseWizard(db, _repos.CatalogueRepository, _repos.DataExportRepository, false); var externalCohortTable = wizard.CreateDatabase(new PrivateIdentifierPrototype("chi", "varchar(10)"), new ThrowImmediatelyCheckNotifier()); //Find the pipeline for committing cohorts var cohortCreationPipeline = _repos.CatalogueRepository.GetAllObjects <Pipeline>().FirstOrDefault(p => p?.Source?.Class == typeof(CohortIdentificationConfigurationSource).FullName); if (cohortCreationPipeline == null) { throw new Exception("Could not find a cohort committing pipeline"); } //A cohort creation query var f = CreateFilter(vConditions, "Lung Cancer Condition", "Condition", "Condition like 'C349'", "ICD-10-CM Diagnosis Code C34.9 Malignant neoplasm of unspecified part of bronchus or lung"); var cic = CreateCohortIdentificationConfiguration((ExtractionFilter)f); var cohort = CommitCohortToNewProject(cic, externalCohortTable, cohortCreationPipeline, "Lung Cancer Project", "P1 Lung Cancer Patients", 123, out Project project); var cohortTable = cohort.ExternalCohortTable.DiscoverCohortTable(); using (var con = cohortTable.Database.Server.GetConnection()) { con.Open(); //delete half the records (so we can simulate cohort refresh) var cmd = cohortTable.Database.Server.GetCommand(string.Format("DELETE TOP (10) PERCENT from {0}", cohortTable.GetFullyQualifiedName()), con); cmd.ExecuteNonQuery(); } var ec1 = CreateExtractionConfiguration(project, cohort, "First Extraction (2016 - project 123)", true, notifier, biochem, prescribing, demography); var ec2 = CreateExtractionConfiguration(project, cohort, "Project 123 - 2017 Refresh", true, notifier, biochem, prescribing, demography, admissions); var ec3 = CreateExtractionConfiguration(project, cohort, "Project 123 - 2018 Refresh", true, notifier, biochem, prescribing, demography, admissions); ReleaseAllConfigurations(notifier, ec1, ec2, ec3); }
public void SQLServerDestination(bool lookupsEtc) { DiscoveredDatabase dbToExtractTo = null; var ci = new CatalogueItem(CatalogueRepository, _catalogue, "YearOfBirth"); _columnToTransform = _columnInfos.Single(c => c.GetRuntimeName().Equals("DateOfBirth", StringComparison.CurrentCultureIgnoreCase)); string transform = "YEAR(" + _columnToTransform.Name + ")"; if (_catalogue.GetAllExtractionInformation(ExtractionCategory.Any).All(ei => ei.GetRuntimeName() != "YearOfBirth")) { var ei = new ExtractionInformation(CatalogueRepository, ci, _columnToTransform, transform); ei.Alias = "YearOfBirth"; ei.ExtractionCategory = ExtractionCategory.Core; ei.SaveToDatabase(); //make it part of the ExtractionConfiguration var newColumn = new ExtractableColumn(DataExportRepository, _selectedDataSet.ExtractableDataSet, (ExtractionConfiguration)_selectedDataSet.ExtractionConfiguration, ei, 0, ei.SelectSQL); newColumn.Alias = ei.Alias; newColumn.SaveToDatabase(); _extractableColumns.Add(newColumn); } if (lookupsEtc) { CreateLookupsEtc(); } try { _configuration.Name = "ExecuteFullExtractionToDatabaseMSSqlDestinationTest"; _configuration.SaveToDatabase(); var dbname = TestDatabaseNames.GetConsistentName(_project.Name + "_" + _project.ProjectNumber); dbToExtractTo = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(dbname); if (dbToExtractTo.Exists()) { dbToExtractTo.Drop(); } base.ExecuteRunner(); var destinationTable = dbToExtractTo.ExpectTable(_expectedTableName); Assert.IsTrue(destinationTable.Exists()); var dt = destinationTable.GetDataTable(); Assert.AreEqual(1, dt.Rows.Count); Assert.AreEqual(_cohortKeysGenerated[_cohortKeysGenerated.Keys.First()].Trim(), dt.Rows[0]["ReleaseID"]); Assert.AreEqual(new DateTime(2001, 1, 1), dt.Rows[0]["DateOfBirth"]); Assert.AreEqual(2001, dt.Rows[0]["YearOfBirth"]); Assert.AreEqual(_columnToTransform.Data_type, destinationTable.DiscoverColumn("DateOfBirth").DataType.SQLType); Assert.AreEqual("int", destinationTable.DiscoverColumn("YearOfBirth").DataType.SQLType); if (lookupsEtc) { AssertLookupsEtcExist(dbToExtractTo); } } finally { if (dbToExtractTo != null && dbToExtractTo.Exists()) { dbToExtractTo.Drop(); } _pipeline?.DeleteInDatabase(); } }
public void DropDatabases() { queryCacheDatabase.Drop(); externalDatabaseServer.DeleteInDatabase(); }
/// <summary> /// Drops the <see cref="BulkDataDatabase"/> /// </summary> public void Destroy() { BulkDataDatabase.Drop(); }
public void JoinablesWithCache() { const string queryCachingDatabaseName = "MyQueryCachingDatabase"; var builder = new CohortQueryBuilder(aggregate1, null); //make aggregate 2 a joinable var joinable2 = new JoinableCohortAggregateConfiguration(CatalogueRepository, cohortIdentificationConfiguration, aggregate2); joinable2.AddUser(aggregate1); //make aggregate 2 have an additional column (dtCreated) var anotherCol = aggregate2.Catalogue.GetAllExtractionInformation(ExtractionCategory.Any).Single(e => e.GetRuntimeName().Equals("dtCreated")); aggregate2.AddDimension(anotherCol); _queryCachingDatabase = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(queryCachingDatabaseName); if (_queryCachingDatabase.Exists()) { _queryCachingDatabase.Drop(); //make sure it doesn't exist } MasterDatabaseScriptExecutor scripter = new MasterDatabaseScriptExecutor(_queryCachingDatabase); scripter.CreateAndPatchDatabase(new QueryCachingPatcher(), new AcceptAllCheckNotifier()); var queryCachingDatabaseServer = new ExternalDatabaseServer(CatalogueRepository, queryCachingDatabaseName, null); queryCachingDatabaseServer.SetProperties(_queryCachingDatabase); //make the builder use the query cache we just set up builder.CacheServer = queryCachingDatabaseServer; try { var builderForCaching = new CohortQueryBuilder(aggregate2, null, true); var cacheDt = new DataTable(); using (SqlConnection con = (SqlConnection)DiscoveredDatabaseICanCreateRandomTablesIn.Server.GetConnection()) { con.Open(); SqlDataAdapter da = new SqlDataAdapter(new SqlCommand(builderForCaching.SQL, con)); da.Fill(cacheDt); } var cacheManager = new CachedAggregateConfigurationResultsManager(queryCachingDatabaseServer); cacheManager.CommitResults(new CacheCommitJoinableInceptionQuery(aggregate2, builderForCaching.SQL, cacheDt, null, 30)); try { Console.WriteLine(builder.SQL); using (var con = (SqlConnection)DiscoveredDatabaseICanCreateRandomTablesIn.Server.GetConnection()) { con.Open(); var dbReader = new SqlCommand(builder.SQL, con).ExecuteReader(); //can read at least one row Assert.IsTrue(dbReader.Read()); } string expectedTableAlias = "ix" + joinable2.ID; //after joinables Assert.AreEqual( CollapseWhitespace( string.Format( @"/*cic_{2}_UnitTestAggregate1*/ SELECT distinct [" + TestDatabaseNames.Prefix + @"ScratchArea]..[BulkData].[chi] FROM [" + TestDatabaseNames.Prefix + @"ScratchArea]..[BulkData] LEFT Join ( /*Cached:cic_{2}_UnitTestAggregate2*/ select * from [MyQueryCachingDatabase]..[JoinableInceptionQuery_AggregateConfiguration{1}] ){0} on [" + TestDatabaseNames.Prefix + @"ScratchArea]..[BulkData].[chi] = {0}.chi", expectedTableAlias, aggregate2.ID, cohortIdentificationConfiguration.ID)), CollapseWhitespace(builder.SQL)); } finally { joinable2.Users[0].DeleteInDatabase(); joinable2.DeleteInDatabase(); } } finally { queryCachingDatabaseServer.DeleteInDatabase(); DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(queryCachingDatabaseName).Drop(); } }