private void btnCreate_Click(object sender, EventArgs e) { var preview = new SQLPreviewWindow("Confirm happiness with SQL", "The following SQL is about to be executed:", _createSql); MasterDatabaseScriptExecutor executor = null; if (string.IsNullOrWhiteSpace(tbDatabase.Text) || string.IsNullOrWhiteSpace(tbServer.Text)) { MessageBox.Show("You must specify both a Server and a Database"); return; } else { executor = new MasterDatabaseScriptExecutor(tbServer.Text, tbDatabase.Text, tbUsername.Text, tbPassword.Text); } if (_completed) { MessageBox.Show("Setup completed already, review progress messages then close Form"); return; } if (_tCreateDatabase != null && _tCreateDatabase.IsAlive) { MessageBox.Show("Setup already underaway, Thread State is:" + _tCreateDatabase.ThreadState); return; } if (preview.ShowDialog() == DialogResult.OK) { _tCreateDatabase = new Thread( () => { var memory = new ToMemoryCheckNotifier(checksUI1); if (executor.CreateDatabase(_createSql, _initialVersionNumber, memory)) { _completed = executor.PatchDatabase(_patches, memory, silentlyApplyPatchCallback); GenerateConnectionStringThenCopy(); var worst = memory.GetWorst(); if (worst == CheckResult.Success || worst == CheckResult.Warning) { if (MessageBox.Show("Succesfully created database, close form?", "Success", MessageBoxButtons.YesNo) == DialogResult.Yes) { _programaticClose = true; Invoke(new MethodInvoker(Close)); } } } else { _completed = false; //failed to create database } } ); _tCreateDatabase.Start(); } }
public void MasterDatabaseScriptExecutor_CreateDatabase() { string dbName = "CreateANewCatalogueDatabaseWithMasterDatabaseScriptExecutor"; var database = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(dbName); if (database.Exists()) { database.Drop(); } MasterDatabaseScriptExecutor executor = new MasterDatabaseScriptExecutor(database); executor.CreateDatabase(@" CREATE TABLE Bob ( age int ) GO", "1.0.0.0", new ThrowImmediatelyCheckNotifier()); var versionTable = database.ExpectTable("Version"); var bobTable = database.ExpectTable("Bob"); Assert.IsTrue(versionTable.Exists()); Assert.IsTrue(bobTable.Exists()); database.Drop(); }
private void CreateANODatabase() { DropANODatabase(); var scriptCreate = new MasterDatabaseScriptExecutor(ANOStore_Database); scriptCreate.CreateAndPatchDatabase(new ANOStorePatcher(), new ThrowImmediatelyCheckNotifier()); }
public void Create_QueryCache(DatabaseType dbType, Type patcherType) { var db = GetCleanedServer(dbType); var patcher = (Patcher)Activator.CreateInstance(patcherType); var mds = new MasterDatabaseScriptExecutor(db); mds.CreateAndPatchDatabase(patcher, new AcceptAllCheckNotifier()); }
public void SetUpCache() { queryCacheDatabase = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(TestDatabaseNames.Prefix + "QueryCache"); MasterDatabaseScriptExecutor executor = new MasterDatabaseScriptExecutor(queryCacheDatabase); var p = new QueryCachingPatcher(); executor.CreateAndPatchDatabase(p, new ThrowImmediatelyCheckNotifier()); externalDatabaseServer = new ExternalDatabaseServer(CatalogueRepository, "QueryCacheForUnitTests", p); externalDatabaseServer.SetProperties(queryCacheDatabase); }
private ExternalDatabaseServer CreateCache(DiscoveredDatabase db) { var patcher = new QueryCachingPatcher(); var mds = new MasterDatabaseScriptExecutor(db); mds.CreateAndPatchDatabase(patcher, new AcceptAllCheckNotifier()); var server = new ExternalDatabaseServer(CatalogueRepository, "Cache", patcher); server.SetProperties(db); return(server); }
public void TestCreatingSchemaTwice() { var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); var mds = new MasterDatabaseScriptExecutor(db); //setup as DQE mds.CreateAndPatchDatabase(new DataQualityEnginePatcher(), new AcceptAllCheckNotifier()); //now try to setup same db as Logging var ex = Assert.Throws <Exception>(() => mds.CreateAndPatchDatabase(new LoggingDatabasePatcher(), new AcceptAllCheckNotifier())); StringAssert.Contains("is already set up as a platform database for another schema (it has the 'ScriptsRun' table)", ex.InnerException.Message); }
public void Check(ICheckNotifier notifier) { lock (CreateServersOneAtATime) { if (UIDMappingServer == null) { throw new Exception($"{nameof(UIDMappingServer)} not set, set it existing UID mapping server or to an empty database to create a new one"); } var patcher = new SMIDatabasePatcher(); if (!UIDMappingServer.WasCreatedBy(patcher)) { if (string.IsNullOrWhiteSpace(UIDMappingServer.CreatedByAssembly)) { bool create = notifier.OnCheckPerformed(new CheckEventArgs($"{nameof(UIDMappingServer)} is not set up yet", CheckResult.Warning, null, "Attempt to create UID mapping schema")); if (create) { var db = UIDMappingServer.Discover(ReusableLibraryCode.DataAccess.DataAccessContext.DataExport); if (!db.Exists()) { notifier.OnCheckPerformed(new CheckEventArgs($"About to create {db}", CheckResult.Success)); db.Create(); } notifier.OnCheckPerformed(new CheckEventArgs($"Creating UID Mapping schema in {db}", CheckResult.Success)); var scripter = new MasterDatabaseScriptExecutor(db); scripter.CreateAndPatchDatabase(patcher, new AcceptAllCheckNotifier()); UIDMappingServer.CreatedByAssembly = patcher.Name; UIDMappingServer.SaveToDatabase(); } else { return; } } else { notifier.OnCheckPerformed(new CheckEventArgs($"{nameof(UIDMappingServer)} '{UIDMappingServer}' was created by '{UIDMappingServer.CreatedByAssembly}' not a UID patcher. Try creating a new server reference to a blank database", CheckResult.Fail)); return; } } } }
public void Setup() { DiscoveredQueryCachingDatabase = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(QueryCachingDatabaseName); if (DiscoveredQueryCachingDatabase.Exists()) { DiscoveredQueryCachingDatabase.Drop(); } MasterDatabaseScriptExecutor scripter = new MasterDatabaseScriptExecutor(DiscoveredQueryCachingDatabase); var p = new QueryCachingPatcher(); scripter.CreateAndPatchDatabase(p, new ThrowImmediatelyCheckNotifier()); QueryCachingDatabaseServer = new ExternalDatabaseServer(CatalogueRepository, QueryCachingDatabaseName, p); QueryCachingDatabaseServer.SetProperties(DiscoveredQueryCachingDatabase); }
public virtual ExternalDatabaseServer CreateNewPlatformDatabase(ICatalogueRepository catalogueRepository, PermissableDefaults defaultToSet, IPatcher patcher, DiscoveredDatabase db) { if (db == null) { throw new ArgumentException($"Database must be picked before calling {nameof(CreateNewPlatformDatabase)} when using {nameof(BasicActivateItems)}", nameof(db)); } MasterDatabaseScriptExecutor executor = new MasterDatabaseScriptExecutor(db); executor.CreateAndPatchDatabase(patcher, new AcceptAllCheckNotifier()); var eds = new ExternalDatabaseServer(catalogueRepository, "New " + (defaultToSet == PermissableDefaults.None ? "" : defaultToSet.ToString()) + "Server", patcher); eds.SetProperties(db); return(eds); }
public void MasterDatabaseScriptExecutor_CreateDatabase() { var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); MasterDatabaseScriptExecutor executor = new MasterDatabaseScriptExecutor(db); executor.CreateDatabase(@" CREATE TABLE Bob ( age int ) GO", "1.0.0.0", new AcceptAllCheckNotifier()); //database already exists so accept creating it into an existing (empty) db var versionTable = db.ExpectTable("Version"); var bobTable = db.ExpectTable("Bob"); Assert.IsTrue(versionTable.Exists()); Assert.IsTrue(bobTable.Exists()); }
protected override void OneTimeSetUp() { base.OneTimeSetUp(); queryCacheDatabase = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(TestDatabaseNames.Prefix + "QueryCache"); if (queryCacheDatabase.Exists()) { base.DeleteTables(queryCacheDatabase); } MasterDatabaseScriptExecutor executor = new MasterDatabaseScriptExecutor(queryCacheDatabase); var p = new QueryCachingPatcher(); executor.CreateAndPatchDatabase(p, new AcceptAllCheckNotifier()); externalDatabaseServer = new ExternalDatabaseServer(CatalogueRepository, "QueryCacheForUnitTests", p); externalDatabaseServer.SetProperties(queryCacheDatabase); }
public void Setup_IdentifierDump() { IdentifierDump_Database = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(IdentifierDump_DatabaseName); if (IdentifierDump_Database.Exists()) { IdentifierDump_Database.Drop(); } var scriptCreate = new MasterDatabaseScriptExecutor(IdentifierDump_Database); var p = new IdentifierDumpDatabasePatcher(); scriptCreate.CreateAndPatchDatabase(p, new ThrowImmediatelyCheckNotifier()); //now create a new reference! IdentifierDump_ExternalDatabaseServer = new ExternalDatabaseServer(CatalogueRepository, IdentifierDump_DatabaseName, p); IdentifierDump_ExternalDatabaseServer.SetProperties(IdentifierDump_Database); CatalogueRepository.GetServerDefaults().SetDefault(PermissableDefaults.IdentifierDumpServer_ID, IdentifierDump_ExternalDatabaseServer); }
private void btnAttemptPatching_Click(object sender, EventArgs e) { try { var toMem = new ToMemoryCheckNotifier(checksUI1); var mds = new MasterDatabaseScriptExecutor(_database); mds.PatchDatabase(_patcher, toMem, PreviewPatch, () => MessageBox.Show("Backup Database First", "Backup", MessageBoxButtons.YesNo) == DialogResult.Yes); //if it crashed during patching if (toMem.GetWorst() == CheckResult.Fail) { btnAttemptPatching.Enabled = true; return; } toMem.OnCheckPerformed(new CheckEventArgs("Patching completed without exception, disabling the patching button", CheckResult.Success, null)); //patching worked so prevent them doing it again! btnAttemptPatching.Enabled = false; if (_repository != null) { _repository.ClearUpdateCommandCache(); checksUI1.OnCheckPerformed(new CheckEventArgs("Cleared UPDATE commands cache", CheckResult.Success, null)); } checksUI1.OnCheckPerformed(new CheckEventArgs("Patching Succesful", CheckResult.Success, null)); if (MessageBox.Show("Application will now restart", "Close?", MessageBoxButtons.YesNo) == DialogResult.Yes) { ApplicationRestarter.Restart(); } } catch (Exception exception) { checksUI1.OnCheckPerformed(new CheckEventArgs("Patching failed", CheckResult.Fail, exception)); } }
private SqlConnectionStringBuilder Create(string databaseName, IPatcher patcher, PlatformDatabaseCreationOptions options) { SqlConnection.ClearAllPools(); var builder = options.GetBuilder(databaseName); DiscoveredDatabase db = new DiscoveredServer(builder).ExpectDatabase(builder.InitialCatalog); if (options.DropDatabases && db.Exists()) { Console.WriteLine("Dropping Database:" + builder.InitialCatalog); db.Drop(); } MasterDatabaseScriptExecutor executor = new MasterDatabaseScriptExecutor(db); executor.BinaryCollation = options.BinaryCollation; executor.CreateAndPatchDatabase(patcher, new AcceptAllCheckNotifier()); Console.WriteLine("Created " + builder.InitialCatalog + " on server " + builder.DataSource); return(builder); }
public void CacheIdentifierListWithRunner_WithCaching() { DiscoveredDatabase db; CohortIdentificationConfiguration cic; DataTable dt; SetupCohort(out db, out cic, out dt); MasterDatabaseScriptExecutor e = new MasterDatabaseScriptExecutor(db); var p = new QueryCachingPatcher(); e.CreateAndPatchDatabase(p, new AcceptAllCheckNotifier()); var serverReference = new ExternalDatabaseServer(CatalogueRepository, "Cache", p); serverReference.SetProperties(db); cic.QueryCachingServer_ID = serverReference.ID; cic.SaveToDatabase(); var compiler = new CohortCompiler(cic); var runner = new CohortCompilerRunner(compiler, 5000); runner.Run(new CancellationToken()); Assert.AreEqual(CohortCompilerRunner.Phase.Finished, runner.ExecutionPhase); var rootTask = runner.Compiler.Tasks.Single(t => t.Key is AggregationContainerTask); Assert.IsTrue(rootTask.Value.IsResultsForRootContainer); Assert.IsNull(rootTask.Key.CrashMessage); Assert.AreEqual(CompilationState.Finished, rootTask.Key.State); Assert.IsTrue(runner.Compiler.AreaAllQueriesCached(rootTask.Key)); Assert.AreEqual(dt.Rows.Count, rootTask.Value.Identifiers.Rows.Count); }
private static int Run(PatchDatabaseOptions opts) { opts.PopulateConnectionStringsFromYamlIfMissing(); var repo = opts.GetRepositoryLocator(); if (!CheckRepo(repo)) { return(REPO_ERROR); } var checker = new NLogICheckNotifier(true, false); var start = new Startup.Startup(GetEnvironmentInfo(), repo); bool badTimes = false; start.DatabaseFound += (s, e) => { var db = e.Repository.DiscoveredServer.GetCurrentDatabase(); if (e.Status == Startup.Events.RDMPPlatformDatabaseStatus.RequiresPatching) { var mds = new MasterDatabaseScriptExecutor(db); mds.PatchDatabase(e.Patcher, checker, (p) => true, () => opts.BackupDatabase); } if (e.Status <= Startup.Events.RDMPPlatformDatabaseStatus.Broken) { checker.OnCheckPerformed(new CheckEventArgs($"Database {db} had status {e.Status}", CheckResult.Fail)); badTimes = true; } }; start.DoStartup(new IgnoreAllErrorsCheckNotifier()); return(badTimes ? -1 :0); }
private void btnCreate_Click(object sender, EventArgs e) { var db = serverDatabaseTableSelector1.GetDiscoveredDatabase(); if (db == null) { MessageBox.Show( "You must pick an empty database or enter the name of a new one (that doesn't exist yet)"); return; } if (_completed) { MessageBox.Show("Setup completed already, review progress messages then close Form"); return; } if (_tCreateDatabase != null && !_tCreateDatabase.IsCompleted) { MessageBox.Show("Setup already underaway"); return; } var createSql = _patcher.GetInitialCreateScriptContents(db); var patches = _patcher.GetAllPatchesInAssembly(db); var preview = new SQLPreviewWindow("Confirm happiness with SQL", "The following SQL is about to be executed:", createSql.EntireScript); var executor = new MasterDatabaseScriptExecutor(db); if (preview.ShowDialog() == DialogResult.OK) { _tCreateDatabase = Task.Run(() => { var memory = new ToMemoryCheckNotifier(checksUI1); if (executor.CreateDatabase(createSql, memory)) { _completed = executor.PatchDatabase(patches, memory, silentlyApplyPatchCallback); DatabaseCreatedIfAny = db; var worst = memory.GetWorst(); if (worst == CheckResult.Success || worst == CheckResult.Warning) { if (MessageBox.Show("Succesfully created database, close form?", "Success", MessageBoxButtons.YesNo) == DialogResult.Yes) { _programaticClose = true; Invoke(new MethodInvoker(Close)); } } } else { _completed = false; //failed to create database } } ); } }
public void JoinablesWithCache() { string queryCachingDatabaseName = To.GetRuntimeName(); _queryCachingDatabase = To; var builder = new CohortQueryBuilder(aggregate1, null); //make aggregate 2 a joinable var joinable2 = new JoinableCohortAggregateConfiguration(CatalogueRepository, cohortIdentificationConfiguration, aggregate2); joinable2.AddUser(aggregate1); //make aggregate 2 have an additional column (dtCreated) var anotherCol = aggregate2.Catalogue.GetAllExtractionInformation(ExtractionCategory.Any).Single(e => e.GetRuntimeName().Equals("dtCreated")); aggregate2.AddDimension(anotherCol); MasterDatabaseScriptExecutor scripter = new MasterDatabaseScriptExecutor(_queryCachingDatabase); scripter.CreateAndPatchDatabase(new QueryCachingPatcher(), new AcceptAllCheckNotifier()); var queryCachingDatabaseServer = new ExternalDatabaseServer(CatalogueRepository, queryCachingDatabaseName, null); queryCachingDatabaseServer.SetProperties(_queryCachingDatabase); //make the builder use the query cache we just set SetUp builder.CacheServer = queryCachingDatabaseServer; try { var builderForCaching = new CohortQueryBuilder(aggregate2, null, true); var cacheDt = new DataTable(); using (SqlConnection con = (SqlConnection)Database.Server.GetConnection()) { con.Open(); SqlDataAdapter da = new SqlDataAdapter(new SqlCommand(builderForCaching.SQL, con)); da.Fill(cacheDt); } var cacheManager = new CachedAggregateConfigurationResultsManager(queryCachingDatabaseServer); cacheManager.CommitResults(new CacheCommitJoinableInceptionQuery(aggregate2, builderForCaching.SQL, cacheDt, null, 30)); try { Console.WriteLine(builder.SQL); using (var con = (SqlConnection)Database.Server.GetConnection()) { con.Open(); var dbReader = new SqlCommand(builder.SQL, con).ExecuteReader(); //can read at least one row Assert.IsTrue(dbReader.Read()); } string expectedTableAlias = "ix" + joinable2.ID; //after joinables Assert.AreEqual( CollapseWhitespace( string.Format( @"/*cic_{2}_UnitTestAggregate1*/ SELECT distinct [" + TestDatabaseNames.Prefix + @"ScratchArea]..[BulkData].[chi] FROM [" + TestDatabaseNames.Prefix + @"ScratchArea]..[BulkData] LEFT Join ( /*Cached:cic_{2}_UnitTestAggregate2*/ select * from [{3}]..[JoinableInceptionQuery_AggregateConfiguration{1}] ){0} on [" + TestDatabaseNames.Prefix + @"ScratchArea]..[BulkData].[chi] = {0}.chi", expectedTableAlias, //{0} aggregate2.ID, //{1} cohortIdentificationConfiguration.ID, //{2} queryCachingDatabaseName) //{3} ), CollapseWhitespace(builder.SQL)); } finally { joinable2.Users[0].DeleteInDatabase(); joinable2.DeleteInDatabase(); } } finally { queryCachingDatabaseServer.DeleteInDatabase(); DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(queryCachingDatabaseName).Drop(); } }
private void btnAttemptPatching_Click(object sender, EventArgs e) { bool stop = false; //start with the assumption that we will apply all patches SortedDictionary <string, Patch> toApply = new SortedDictionary <string, Patch>(); foreach (Patch potentialInstallable in _allPatchesInAssembly.Values.Except(_patchesInDatabase)) { toApply.Add(potentialInstallable.locationInAssembly, potentialInstallable); } var listener = new ToMemoryCheckNotifier(checksUI1); checksUI1.BeginUpdate(); try { //make sure the existing patches in the live database are not freaky phantom patches foreach (Patch patch in _patchesInDatabase) { //if patch is not in database assembly if (!_allPatchesInAssembly.Any(a => a.Value.Equals(patch))) { listener.OnCheckPerformed(new CheckEventArgs( "The database contains an unexplained patch called " + patch.locationInAssembly + " (it is not in " + _patcher.GetDbAssembly().FullName + " ) so how did it get there?", CheckResult.Warning, null)); } else if (!_allPatchesInAssembly[patch.locationInAssembly].GetScriptBody().Equals(patch.GetScriptBody())) { listener.OnCheckPerformed(new CheckEventArgs( "The contents of patch " + patch.locationInAssembly + " are different between live database and the database patching assembly", CheckResult.Warning, null)); //do not apply this patch toApply.Remove(patch.locationInAssembly); } else { //we found it and it was intact listener.OnCheckPerformed(new CheckEventArgs("Patch " + patch.locationInAssembly + " was previously installed successfully so no need to touch it", CheckResult.Success, null)); //do not apply this patch toApply.Remove(patch.locationInAssembly); } } } catch (Exception exception) { listener.OnCheckPerformed(new CheckEventArgs("Patch evaluation failed", CheckResult.Fail, exception)); stop = true; } finally { checksUI1.EndUpdate(); } //if any of the patches we are trying to apply are earlier than the latest in the database IEnumerable <Patch> missedOppertunities = toApply.Values.Where(p => p.DatabaseVersionNumber < _patchesInDatabase.Max(p2 => p2.DatabaseVersionNumber)); foreach (Patch missedOppertunity in missedOppertunities) { stop = true; listener.OnCheckPerformed(new CheckEventArgs( "Patch " + missedOppertunity.locationInAssembly + " cannot be applied because it's version number is " + missedOppertunity.DatabaseVersionNumber + " but the current database is at version " + _databaseVersion + Environment.NewLine + " Contents of patch was:" + Environment.NewLine + missedOppertunity.EntireScript , CheckResult.Fail, null)); } //if the patches to be applied would bring the version number above that of the host Library foreach (Patch futurePatch in toApply.Values.Where(patch => patch.DatabaseVersionNumber > _hostAssemblyVersion)) { listener.OnCheckPerformed(new CheckEventArgs( "Cannot apply patch " + futurePatch.locationInAssembly + " because it's database version number is " + futurePatch.DatabaseVersionNumber + " which is higher than the currently loaded host assembly (" + _patcher.GetDbAssembly().FullName + "). ", CheckResult.Fail, null)); stop = true; } if (stop) { listener.OnCheckPerformed(new CheckEventArgs("Abandonning patching process (no patches have been applied) because of one or more previous errors", CheckResult.Fail, null)); return; } try { MasterDatabaseScriptExecutor executor = new MasterDatabaseScriptExecutor(_database); //todo: Only ms has a backup implementation in FAnsi currently bool backupDatabase = _database.Server.DatabaseType == DatabaseType.MicrosoftSQLServer && MessageBox.Show("Backup Database First", "Backup", MessageBoxButtons.YesNo) == DialogResult.Yes; executor.PatchDatabase(toApply, listener, PreviewPatch, backupDatabase); //if it crashed during patching if (listener.GetWorst() == CheckResult.Fail) { btnAttemptPatching.Enabled = true; return; } listener.OnCheckPerformed(new CheckEventArgs("Patching completed without exception, disabling the patching button", CheckResult.Success, null)); //patching worked so prevent them doing it again! btnAttemptPatching.Enabled = false; if (_repository != null) { _repository.ClearUpdateCommandCache(); checksUI1.OnCheckPerformed(new CheckEventArgs("Cleared UPDATE commands cache", CheckResult.Success, null)); } checksUI1.OnCheckPerformed(new CheckEventArgs("Patching Succesful", CheckResult.Success, null)); if (MessageBox.Show("Application will now restart", "Close?", MessageBoxButtons.YesNo) == DialogResult.Yes) { Application.Restart(); } } catch (Exception exception) { checksUI1.OnCheckPerformed(new CheckEventArgs("Patching failed", CheckResult.Fail, exception)); } }
public void TestAnonymisingDataset(Type putterType, bool keepDates) { var uidMapDb = GetCleanedServer(DatabaseType.MicrosoftSQLServer, "TESTUIDMapp"); MasterDatabaseScriptExecutor e = new MasterDatabaseScriptExecutor(uidMapDb); var patcher = new SMIDatabasePatcher(); e.CreateAndPatchDatabase(patcher, new AcceptAllCheckNotifier()); var eds = new ExternalDatabaseServer(CatalogueRepository, "eds", patcher); eds.SetProperties(uidMapDb); Dictionary <DicomTag, string> thingThatShouldDisappear = new Dictionary <DicomTag, string> { //Things we would want to disappear { DicomTag.PatientName, "Moscow" }, { DicomTag.PatientBirthDate, "20010101" }, { DicomTag.StudyDescription, "Frank has lots of problems, he lives at 60 Pancake road" }, { DicomTag.SeriesDescription, "Coconuts" }, { DicomTag.StudyDate, "20020101" }, }; Dictionary <DicomTag, string> thingsThatShouldRemain = new Dictionary <DicomTag, string> { //Things we would want to remain //{DicomTag.SmokingStatus,"YES"}, }; var dicom = new DicomDataset { { DicomTag.SOPInstanceUID, "123.4.4" }, { DicomTag.SeriesInstanceUID, "123.4.5" }, { DicomTag.StudyInstanceUID, "123.4.6" }, { DicomTag.SOPClassUID, "1" }, }; foreach (var(key, value) in thingThatShouldDisappear) { dicom.AddOrUpdate(key, value); } foreach (var(key, value) in thingsThatShouldRemain) { dicom.AddOrUpdate(key, value); } dicom.AddOrUpdate(DicomTag.StudyDate, new DateTime(2002, 01, 01)); var fi = new FileInfo(Path.Combine(TestContext.CurrentContext.WorkDirectory, "madness.dcm")); DicomFile df = new DicomFile(dicom); df.Save(fi.FullName); var dt = new DataTable(); dt.Columns.Add("Filepath"); dt.Columns.Add("SOPInstanceUID"); dt.Columns.Add("SeriesInstanceUID"); dt.Columns.Add("StudyInstanceUID"); dt.Columns.Add("Pat"); //note we don't have series dt.Rows.Add(fi.Name, "123.4.4", "123.4.5", "123.4.6", "Hank"); var anonymiser = new FoDicomAnonymiser(); IExtractCommand cmd = MockExtractionCommand(); //give the mock to anonymiser anonymiser.PreInitialize(cmd, new ThrowImmediatelyDataLoadEventListener()); anonymiser.PutterType = putterType; anonymiser.ArchiveRootIfAny = TestContext.CurrentContext.WorkDirectory; anonymiser.RelativeArchiveColumnName = "Filepath"; anonymiser.UIDMappingServer = eds; anonymiser.RetainDates = keepDates; var anoDt = anonymiser.ProcessPipelineData(dt, new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken()); Assert.AreEqual(1, anoDt.Rows.Count); //Data table should contain new UIDs Assert.AreNotEqual("123.4.4", anoDt.Rows[0]["SOPInstanceUID"]); Assert.AreEqual(56, anoDt.Rows[0]["SOPInstanceUID"].ToString().Length); Assert.AreNotEqual("123.4.6", anoDt.Rows[0]["StudyInstanceUID"]); Assert.AreEqual(56, anoDt.Rows[0]["StudyInstanceUID"].ToString().Length); FileInfo expectedFile = null; if (putterType == typeof(PutInRoot)) { expectedFile = new FileInfo(Path.Combine(TestContext.CurrentContext.WorkDirectory, "Images", anoDt.Rows[0]["SOPInstanceUID"] + ".dcm")); } if (putterType == typeof(PutInReleaseIdentifierSubfolders)) { expectedFile = new FileInfo(Path.Combine(TestContext.CurrentContext.WorkDirectory, "Images", "Hank", anoDt.Rows[0]["SOPInstanceUID"] + ".dcm")); } if (putterType == typeof(PutInUidSeriesFolders)) { expectedFile = new FileInfo(Path.Combine(TestContext.CurrentContext.WorkDirectory, "Images", "Hank", anoDt.Rows[0]["SeriesInstanceUID"].ToString(), anoDt.Rows[0]["SOPInstanceUID"] + ".dcm")); } if (putterType == typeof(PutInUidStudySeriesFolders)) { expectedFile = new FileInfo(Path.Combine(TestContext.CurrentContext.WorkDirectory, "Images", "Hank", anoDt.Rows[0]["StudyInstanceUID"].ToString(), anoDt.Rows[0]["SeriesInstanceUID"].ToString(), anoDt.Rows[0]["SOPInstanceUID"] + ".dcm")); } Assert.IsTrue(expectedFile.Exists); var anoDicom = DicomFile.Open(expectedFile.FullName); Assert.AreEqual("Hank", anoDicom.Dataset.GetValue <string>(DicomTag.PatientID, 0)); Assert.AreEqual(anoDt.Rows[0]["SOPInstanceUID"], anoDicom.Dataset.GetValue <string>(DicomTag.SOPInstanceUID, 0)); Assert.AreEqual(56, anoDicom.Dataset.GetValue <string>(DicomTag.SeriesInstanceUID, 0).Length); Assert.AreEqual(anoDt.Rows[0]["StudyInstanceUID"], anoDicom.Dataset.GetValue <string>(DicomTag.StudyInstanceUID, 0)); foreach (var(key, _) in thingThatShouldDisappear) { //if it chopped out the entire tag if (!anoDicom.Dataset.Contains(key)) { continue; } if (anoDicom.Dataset.GetValueCount(key) == 0) { continue; } var value = anoDicom.Dataset.GetSingleValue <string>(key); switch (value) { //allowed values case "ANONYMOUS": continue; //anonymous date case "00010101": Assert.IsFalse(keepDates); continue; case "20020101": Assert.IsTrue(keepDates); continue; default: Assert.Fail("Unexpected value for " + key + ":" + value); break; } } foreach (var(key, value) in thingsThatShouldRemain) { Assert.AreEqual(value, anoDicom.Dataset.GetValue <string>(key, 0)); } }
public void CohortIdentificationConfiguration_Join_PatientIndexTable() { DataTable header = new DataTable(); header.Columns.Add("ID"); header.Columns.Add("Chi"); header.Columns.Add("Age"); header.Columns.Add("Date"); header.Columns.Add("Healthboard"); header.PrimaryKey = new [] { header.Columns["ID"] }; header.Rows.Add("1", "0101010101", 50, new DateTime(2001, 1, 1), "T"); header.Rows.Add("2", "0202020202", 50, new DateTime(2002, 2, 2), "T"); var hTbl = From.CreateTable("header", header); var cata = Import(hTbl, out TableInfo hTi, out _); cata.Name = "My Combo Join Catalogue"; cata.SaveToDatabase(); var scripter = new MasterDatabaseScriptExecutor(To); var patcher = new QueryCachingPatcher(); scripter.CreateAndPatchDatabase(patcher, new AcceptAllCheckNotifier()); var edsCache = new ExternalDatabaseServer(CatalogueRepository, "Cache", new QueryCachingPatcher()); edsCache.SetProperties(To); DataTable results = new DataTable(); results.Columns.Add("Header_ID"); results.Columns.Add("TestCode"); results.Columns.Add("Result"); results.Rows.Add("1", "HBA1C", 50); results.Rows.Add("1", "ECOM", "Hi fellas"); results.Rows.Add("1", "ALB", 100); results.Rows.Add("2", "ALB", 50); var rTbl = From.CreateTable("results", results); var importer = new TableInfoImporter(CatalogueRepository, rTbl); importer.DoImport(out TableInfo rTi, out ColumnInfo[] rColInfos); var fe = new ForwardEngineerCatalogue(rTi, rColInfos, true); fe.ExecuteForwardEngineering(cata); //Should now be 1 Catalogue with all the columns (tables will have to be joined to build the query though) Assert.AreEqual(8, cata.GetAllExtractionInformation(ExtractionCategory.Core).Length); var ji = new JoinInfo(CatalogueRepository, rTi.ColumnInfos.Single(ci => ci.GetRuntimeName().Equals("Header_ID", StringComparison.CurrentCultureIgnoreCase)), hTi.ColumnInfos.Single(ci => ci.GetRuntimeName().Equals("ID", StringComparison.CurrentCultureIgnoreCase)), ExtractionJoinType.Right, null ); //setup a cic that uses the cache var cic = new CohortIdentificationConfiguration(CatalogueRepository, "MyCic"); cic.CreateRootContainerIfNotExists(); cic.QueryCachingServer_ID = edsCache.ID; cic.SaveToDatabase(); //create a patient index table that shows all the times that they had a test in any HB (with the HB being part of the result set) var acPatIndex = new AggregateConfiguration(CatalogueRepository, cata, "My PatIndes"); var eiChi = cata.GetAllExtractionInformation(ExtractionCategory.Core).Single(ei => ei.GetRuntimeName().Equals("Chi")); eiChi.IsExtractionIdentifier = true; acPatIndex.CountSQL = null; eiChi.SaveToDatabase(); acPatIndex.AddDimension(eiChi); acPatIndex.AddDimension(cata.GetAllExtractionInformation(ExtractionCategory.Core).Single(ei => ei.GetRuntimeName().Equals("Date"))); acPatIndex.AddDimension(cata.GetAllExtractionInformation(ExtractionCategory.Core).Single(ei => ei.GetRuntimeName().Equals("Healthboard"))); cic.EnsureNamingConvention(acPatIndex); var joinable = new JoinableCohortAggregateConfiguration(CatalogueRepository, cic, acPatIndex); Assert.IsTrue(acPatIndex.IsCohortIdentificationAggregate); Assert.IsTrue(acPatIndex.IsJoinablePatientIndexTable()); var compiler = new CohortCompiler(cic); var runner = new CohortCompilerRunner(compiler, 50); var cancellation = new System.Threading.CancellationToken(); runner.Run(cancellation); //they should not be executing and should be completed Assert.IsFalse(compiler.Tasks.Any(t => t.Value.IsExecuting)); Assert.AreEqual(Phase.Finished, runner.ExecutionPhase); var manager = new CachedAggregateConfigurationResultsManager(edsCache); var cacheTableName = manager.GetLatestResultsTableUnsafe(acPatIndex, AggregateOperation.JoinableInceptionQuery); Assert.IsNotNull(cacheTableName, "No results were cached!"); var cacheTable = To.ExpectTable(cacheTableName.GetRuntimeName()); //chi, Date and TestCode Assert.AreEqual(3, cacheTable.DiscoverColumns().Length); //healthboard should be a string Assert.AreEqual(typeof(string), cacheTable.DiscoverColumn("Healthboard").DataType.GetCSharpDataType()); /* Query Cache contains this: * * Chi Date Healthboard * 0101010101 2001-01-01 00:00:00.0000000 T * 0202020202 2002-02-02 00:00:00.0000000 T */ Assert.AreEqual(2, cacheTable.GetRowCount()); //Now we could add a new AggregateConfiguration that uses the joinable! }