Exemple #1
0
        public void PlanManagementTest()
        {
            var dbName = TestDatabaseNames.GetConsistentName("PlanManagementTests");

            var db = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(dbName);

            db.Create(true);

            BulkTestsData bulk = new BulkTestsData(CatalogueRepository, GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer), 100);

            bulk.SetupTestData();
            bulk.ImportAsCatalogue();

            var planManager = new ForwardEngineerANOCataloguePlanManager(RepositoryLocator, bulk.catalogue);

            planManager.TargetDatabase = db;

            //no operations are as yet configured
            Assert.DoesNotThrow(() => planManager.Check(new ThrowImmediatelyCheckNotifier()));

            //create a table with the same name in the endpoint database to confirm that that's a problem
            db.CreateTable(bulk.tableInfo.GetRuntimeName(), new DatabaseColumnRequest[]
            {
                new DatabaseColumnRequest("fish", "varchar(100)")
            });

            //throws because table already exists
            Assert.Throws <Exception>(() => planManager.Check(new ThrowImmediatelyCheckNotifier()));

            db.ExpectTable(bulk.tableInfo.GetRuntimeName()).Drop();

            //back to being fine again
            Assert.DoesNotThrow(() => planManager.Check(new ThrowImmediatelyCheckNotifier()));

            //setup test rules for migrator
            CreateMigrationRules(planManager, bulk);

            //rules should pass
            Assert.DoesNotThrow(() => planManager.Check(new ThrowImmediatelyCheckNotifier()));

            var chi = bulk.GetColumnInfo("chi");

            Assert.Throws <Exception>(() =>
            {
                planManager.GetPlanForColumnInfo(chi).Plan = Plan.Drop;
                planManager.GetPlanForColumnInfo(chi).Check(new ThrowImmediatelyCheckNotifier());
            }

                                      , "Should not be able to drop primary key column");

            db.Drop();
        }
        public void SetUpCache()
        {
            queryCacheDatabase = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(TestDatabaseNames.Prefix + "QueryCache");

            MasterDatabaseScriptExecutor executor = new MasterDatabaseScriptExecutor(queryCacheDatabase);

            var p = new QueryCachingPatcher();

            executor.CreateAndPatchDatabase(p, new ThrowImmediatelyCheckNotifier());

            externalDatabaseServer = new ExternalDatabaseServer(CatalogueRepository, "QueryCacheForUnitTests", p);
            externalDatabaseServer.SetProperties(queryCacheDatabase);
        }
Exemple #3
0
        public void CleanupOnStart()
        {
            _projectStub = Mock.Of <IProject>();
            _projectStub.ProjectNumber = -123;

            var cfg = Mock.Of <IExtractionConfiguration>();

            _commandStub = Mock.Of <IExtractCommand>(cmd => cmd.Configuration == cfg);

            var db = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase("FictionalDatabase");

            if (db.Exists())
            {
                db.Drop();
            }
        }
        public void Setup()
        {
            DiscoveredQueryCachingDatabase = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(QueryCachingDatabaseName);

            if (DiscoveredQueryCachingDatabase.Exists())
            {
                DiscoveredQueryCachingDatabase.Drop();
            }

            MasterDatabaseScriptExecutor scripter = new MasterDatabaseScriptExecutor(DiscoveredQueryCachingDatabase);
            var p = new QueryCachingPatcher();

            scripter.CreateAndPatchDatabase(p, new ThrowImmediatelyCheckNotifier());

            QueryCachingDatabaseServer = new ExternalDatabaseServer(CatalogueRepository, QueryCachingDatabaseName, p);
            QueryCachingDatabaseServer.SetProperties(DiscoveredQueryCachingDatabase);
        }
        public void TestCheckUpdateTrigger()
        {
            // set SetUp a test database
            const string tableName = "TestTable";
            var          db        = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer);

            var databaseName = db.GetRuntimeName();
            var table        = db.CreateTable(tableName, new[] { new DatabaseColumnRequest("Id", "int"), });

            var server = db.Server;

            using (var con = server.GetConnection())
            {
                con.Open();
                var cmd = server.GetCommand(
                    "CREATE TRIGGER dbo.[TestTable_OnUpdate] ON [dbo].[" + tableName +
                    "] AFTER DELETE AS RAISERROR('MESSAGE',16,10)", con);

                cmd.ExecuteNonQuery();
            }

            var dbInfo = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(databaseName);

            var factory = new TriggerImplementerFactory(dbInfo.Server.DatabaseType);

            var triggerImplementer = factory.Create(table);
            var isEnabled          = triggerImplementer.GetTriggerStatus();

            Assert.AreEqual(TriggerStatus.Enabled, isEnabled);


            // disable the trigger and test correct reporting
            using (var con = new SqlConnection(dbInfo.Server.Builder.ConnectionString))
            {
                con.Open();
                var cmd =
                    new SqlCommand(
                        "USE [" + databaseName + "]; DISABLE TRIGGER TestTable_OnUpdate ON [" + databaseName + "]..[" +
                        tableName + "]", con);
                cmd.ExecuteNonQuery();
            }

            isEnabled = triggerImplementer.GetTriggerStatus();
            Assert.AreEqual(TriggerStatus.Disabled, isEnabled);
        }
Exemple #6
0
        public void Setup_IdentifierDump()
        {
            IdentifierDump_Database = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(IdentifierDump_DatabaseName);

            if (IdentifierDump_Database.Exists())
            {
                IdentifierDump_Database.Drop();
            }

            var scriptCreate = new MasterDatabaseScriptExecutor(IdentifierDump_Database);
            var p            = new IdentifierDumpDatabasePatcher();

            scriptCreate.CreateAndPatchDatabase(p, new ThrowImmediatelyCheckNotifier());

            //now create a new reference!
            IdentifierDump_ExternalDatabaseServer = new ExternalDatabaseServer(CatalogueRepository, IdentifierDump_DatabaseName, p);
            IdentifierDump_ExternalDatabaseServer.SetProperties(IdentifierDump_Database);

            CatalogueRepository.GetServerDefaults().SetDefault(PermissableDefaults.IdentifierDumpServer_ID, IdentifierDump_ExternalDatabaseServer);
        }
        protected override void OneTimeSetUp()
        {
            base.OneTimeSetUp();

            queryCacheDatabase = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(TestDatabaseNames.Prefix + "QueryCache");

            if (queryCacheDatabase.Exists())
            {
                base.DeleteTables(queryCacheDatabase);
            }

            MasterDatabaseScriptExecutor executor = new MasterDatabaseScriptExecutor(queryCacheDatabase);

            var p = new QueryCachingPatcher();

            executor.CreateAndPatchDatabase(p, new AcceptAllCheckNotifier());

            externalDatabaseServer = new ExternalDatabaseServer(CatalogueRepository, "QueryCacheForUnitTests", p);
            externalDatabaseServer.SetProperties(queryCacheDatabase);
        }
Exemple #8
0
        public void ServerDatabaseIsPresentAndCorrectButHasTablesInIt()
        {
            var server = new ExternalDatabaseServer(CatalogueRepository, "Fiction", null);

            server.Server = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.Name;
            //server.Database = "FictionalDatabase"; Ignored by the extractor!

            DiscoveredServerICanCreateRandomDatabasesAndTablesOn.CreateDatabase("FictionalDatabase");

            var db = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase("FictionalDatabase");

            using (var con = db.Server.GetConnection())
            {
                con.Open();

                db.Server.GetCommand("CREATE TABLE Bob(name varchar(10))", con).ExecuteNonQuery();
            }

            try
            {
                var destination = new ExecuteFullExtractionToDatabaseMSSql();
                destination.PreInitialize(_projectStub, new ThrowImmediatelyDataLoadEventListener());
                destination.PreInitialize(_commandStub, new ThrowImmediatelyDataLoadEventListener());
                destination.TargetDatabaseServer  = server;
                destination.TableNamingPattern    = "$d";
                destination.DatabaseNamingPattern = "FictionalDatabase";

                var tomemory = new ToMemoryCheckNotifier(new ThrowImmediatelyCheckNotifier());
                destination.Check(tomemory);

                Assert.AreEqual(CheckResult.Warning, tomemory.GetWorst());

                db.ExpectTable("Bob").Drop();
            }
            finally
            {
                server.DeleteInDatabase();
            }
        }
        public void TestConnection(bool explicitClose)
        {
            //drop it if it existed
            if (DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(testDbName).Exists())
            {
                DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(testDbName).Drop();
            }

            DiscoveredServerICanCreateRandomDatabasesAndTablesOn.CreateDatabase(testDbName);
            Thread.Sleep(500);

            ThrowIfDatabaseLock();

            var db = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(testDbName);

            ThrowIfDatabaseLock();

            using (var con = db.Server.GetConnection())
            {
                con.Open();

                //we are currently connected so this should throw
                Assert.Throws <Exception>(ThrowIfDatabaseLock);
            }
            Thread.Sleep(500);

            if (explicitClose)
            {
                SqlConnection.ClearAllPools();
                Thread.Sleep(500);
                Assert.DoesNotThrow(ThrowIfDatabaseLock);//in this case we told .net to clear the pools which leaves the server free of locks/hanging connections
            }
            else
            {
                Assert.Throws <Exception>(ThrowIfDatabaseLock);//despite us closing the connection and using the 'using' block .net still keeps a connection in sleep state to the server ><
            }

            db.Drop();
        }
Exemple #10
0
        public void CreateANOVersionTest()
        {
            var dbName = TestDatabaseNames.GetConsistentName("CreateANOVersionTest");

            var db = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(dbName);

            db.Create(true);

            BulkTestsData bulk = new BulkTestsData(CatalogueRepository, DiscoveredDatabaseICanCreateRandomTablesIn, 100);

            bulk.SetupTestData();
            bulk.ImportAsCatalogue();

            var planManager = new ForwardEngineerANOCataloguePlanManager(RepositoryLocator, bulk.catalogue);

            planManager.TargetDatabase = db;

            //setup test rules for migrator
            CreateMigrationRules(planManager, bulk);

            //rules should pass checks
            Assert.DoesNotThrow(() => planManager.Check(new ThrowImmediatelyCheckNotifier()));

            var engine = new ForwardEngineerANOCatalogueEngine(RepositoryLocator, planManager);

            engine.Execute();

            var anoCatalogue = CatalogueRepository.GetAllObjects <Catalogue>().Single(c => c.Folder.Path.StartsWith("\\ano"));

            Assert.IsTrue(anoCatalogue.Exists());

            db.Drop();

            var exports = CatalogueRepository.GetAllObjects <ObjectExport>().Count();
            var imports = CatalogueRepository.GetAllObjects <ObjectImport>().Count();

            Assert.AreEqual(exports, imports);
            Assert.IsTrue(exports > 0);
        }
        public void BeforeEachTest()
        {
            _catalogue = CatalogueRepository.GetAllObjects <Catalogue>("WHERE Name='BackfillSqlHelperTests'").SingleOrDefault();
            if (_catalogue != null)
            {
                // Previous test run has not exited cleanly
                foreach (var ti in _catalogue.GetTableInfoList(false))
                {
                    ti.DeleteInDatabase();
                }

                _catalogue.DeleteInDatabase();
            }

            _stagingDatabase = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(DatabaseName + "_STAGING");
            _liveDatabase    = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(DatabaseName);

            // ensure the test staging and live databases are empty
            _stagingDatabase.Create(true);
            _liveDatabase.Create(true);

            CleanCatalogueDatabase();
        }
Exemple #12
0
        public void ServerDatabaseIsPresentAndCorrect(bool alreadyExists)
        {
            var server = new ExternalDatabaseServer(CatalogueRepository, "Fiction", null);

            server.Server = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.Name;
            //server.Database = "FictionalDatabase"; Ignored by the extractor!

            DiscoveredServerICanCreateRandomDatabasesAndTablesOn.CreateDatabase("FictionalDatabase");
            Assert.IsTrue(DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase("FictionalDatabase").Exists());

            try
            {
                var destination = new ExecuteFullExtractionToDatabaseMSSql();
                destination.PreInitialize(_projectStub, new ThrowImmediatelyDataLoadEventListener());
                destination.PreInitialize(_commandStub, new ThrowImmediatelyDataLoadEventListener());

                destination.TargetDatabaseServer = server;
                destination.TableNamingPattern   = "$d";

                if (alreadyExists)
                {
                    destination.DatabaseNamingPattern = "FictionalDatabase"; //database that exists
                }
                else
                {
                    destination.DatabaseNamingPattern = "Fictional$nDatabase";  //database does not exist (but server does)
                }
                var tomemory = new ToMemoryCheckNotifier(new ThrowImmediatelyCheckNotifier());
                destination.Check(tomemory);

                Assert.AreEqual(alreadyExists? CheckResult.Warning: CheckResult.Success, tomemory.GetWorst());
            }
            finally
            {
                server.DeleteInDatabase();
            }
        }
Exemple #13
0
        public void EndToEndTest()
        {
            var cohortDatabaseNameWillBe = TestDatabaseNames.GetConsistentName("TbvCohort");

            _discoveredCohortDatabase = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(cohortDatabaseNameWillBe);

            //cleanup
            if (_discoveredCohortDatabase.Exists())
            {
                _discoveredCohortDatabase.Drop();
            }

            //create a normal catalogue
            CreateANormalCatalogue();

            //create a cohort database using wizard
            CreateNewCohortDatabaseWizard cohortDatabaseWizard = new CreateNewCohortDatabaseWizard(_discoveredCohortDatabase, CatalogueRepository, DataExportRepository, false);

            _externalCohortTable = cohortDatabaseWizard.CreateDatabase(
                new PrivateIdentifierPrototype(_nonTvfExtractionIdentifier)
                , new ThrowImmediatelyCheckNotifier());

            //create a table valued function
            CreateTvfCatalogue(cohortDatabaseNameWillBe);

            //Test 1
            TestThatQueryBuilderWithoutParametersBeingSetThrowsQueryBuildingException();

            PopulateCohortDatabaseWithRecordsFromNonTvfCatalogue();

            //Test 2
            TestWithParameterValueThatRowsAreReturned();

            //Test 3
            TestUsingTvfForAggregates();

            //Test 4
            TestAddingTvfToCIC();

            //Test 5
            TestDataExportOfTvf();

            //tear down
            DataExportRepository.GetAllObjects <ExtractableCohort>().Single().DeleteInDatabase();
            _externalCohortTable.DeleteInDatabase();

            _database.ExpectTable("NonTVFTable").Drop();
            _database.ExpectTableValuedFunction("GetTopXRandom").Drop();

            //delete global parameter
            ((AnyTableSqlParameter)_aggregate.GetAllParameters().Single()).DeleteInDatabase();
            //delete aggregate
            _aggregate.DeleteInDatabase();

            ((AnyTableSqlParameter)_cicAggregate.GetAllParameters().Single()).DeleteInDatabase();
            //delete aggregate
            _cicAggregate.DeleteInDatabase();

            //get rid of the cohort identification configuration
            _cic.DeleteInDatabase();
            _pipe.DeleteInDatabase();

            //get rid of the cohort database
            _discoveredCohortDatabase.Drop();

            _nonTvfCatalogue.DeleteInDatabase();
            _nonTvfTableInfo.DeleteInDatabase();

            _tvfCatalogue.DeleteInDatabase();
            _tvfTableInfo.DeleteInDatabase();
        }
Exemple #14
0
        public void RefreshCohort_WithCaching()
        {
            ExtractionPipelineUseCase            useCase;
            IExecuteDatasetExtractionDestination results;

            var pipe = new Pipeline(CatalogueRepository, "RefreshPipeWithCaching");

            var source    = new PipelineComponent(CatalogueRepository, pipe, typeof(CohortIdentificationConfigurationSource), 0);
            var args      = source.CreateArgumentsForClassIfNotExists <CohortIdentificationConfigurationSource>();
            var freezeArg = args.Single(a => a.Name.Equals("FreezeAfterSuccessfulImport"));

            freezeArg.SetValue(false);
            freezeArg.SaveToDatabase();

            var dest         = new PipelineComponent(CatalogueRepository, pipe, typeof(BasicCohortDestination), 0);
            var argsDest     = dest.CreateArgumentsForClassIfNotExists <BasicCohortDestination>();
            var allocatorArg = argsDest.Single(a => a.Name.Equals("ReleaseIdentifierAllocator"));

            allocatorArg.SetValue(null);
            allocatorArg.SaveToDatabase();

            pipe.SourcePipelineComponent_ID      = source.ID;
            pipe.DestinationPipelineComponent_ID = dest.ID;
            pipe.SaveToDatabase();

            Execute(out useCase, out results);

            var oldcohort = _configuration.Cohort;

            //Create a query cache
            var p = new QueryCachingPatcher();
            ExternalDatabaseServer queryCacheServer = new ExternalDatabaseServer(CatalogueRepository, "TestCohortRefreshing_CacheTest", p);

            DiscoveredDatabase cachedb = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase("TestCohortRefreshing_CacheTest");

            if (cachedb.Exists())
            {
                cachedb.Drop();
            }

            new MasterDatabaseScriptExecutor(cachedb).CreateAndPatchDatabase(p, new ThrowImmediatelyCheckNotifier());
            queryCacheServer.SetProperties(cachedb);

            //Create a Cohort Identification configuration (query) that will identify the cohort
            CohortIdentificationConfiguration cic = new CohortIdentificationConfiguration(RepositoryLocator.CatalogueRepository, "RefreshCohort.cs");;

            try
            {
                //make it use the cache
                cic.QueryCachingServer_ID = queryCacheServer.ID;
                cic.SaveToDatabase();

                //give it a single table query to fetch distinct chi from test data
                var agg = cic.CreateNewEmptyConfigurationForCatalogue(_catalogue, null);

                //add the sub query as the only entry in the cic (in the root container)
                cic.CreateRootContainerIfNotExists();
                cic.RootCohortAggregateContainer.AddChild(agg, 1);

                //make the ExtractionConfiguration refresh cohort query be the cic
                _configuration.CohortIdentificationConfiguration_ID = cic.ID;
                _configuration.CohortRefreshPipeline_ID             = pipe.ID;
                _configuration.SaveToDatabase();

                //get a refreshing engine
                var engine = new CohortRefreshEngine(new ThrowImmediatelyDataLoadEventListener(), _configuration);
                engine.Execute();

                Assert.NotNull(engine.Request.NewCohortDefinition);

                var oldData = oldcohort.GetExternalData();

                Assert.AreEqual(oldData.ExternalDescription, engine.Request.NewCohortDefinition.Description);
                Assert.AreEqual(oldData.ExternalVersion + 1, engine.Request.NewCohortDefinition.Version);

                Assert.AreNotEqual(oldcohort.CountDistinct, engine.Request.CohortCreatedIfAny.CountDistinct);

                //now nuke all data in the catalogue so the cic returns nobody (except that the identifiers are cached eh?)
                DataAccessPortal.GetInstance().ExpectDatabase(_tableInfo, DataAccessContext.InternalDataProcessing).ExpectTable(_tableInfo.GetRuntimeName()).Truncate();

                var toMem = new ToMemoryDataLoadEventListener(false);

                //get a new engine
                engine = new CohortRefreshEngine(toMem, _configuration);

                //execute it
                var ex = Assert.Throws <Exception>(() => engine.Execute());

                Assert.IsTrue(ex.InnerException.InnerException.Message.Contains("CohortIdentificationCriteria execution resulted in an empty dataset"));

                //expected this message to happen
                //that it did clear the cache
                Assert.AreEqual(1, toMem.EventsReceivedBySender.SelectMany(kvp => kvp.Value).Count(msg => msg.Message.Equals("Clearing Cohort Identifier Cache")));
            }
            finally
            {
                //make the ExtractionConfiguration not use the cic query
                _configuration.CohortRefreshPipeline_ID             = null;
                _configuration.CohortIdentificationConfiguration_ID = null;
                _configuration.SaveToDatabase();

                //delete the cic query
                cic.QueryCachingServer_ID = null;
                cic.SaveToDatabase();
                cic.DeleteInDatabase();

                //delete the caching database
                queryCacheServer.DeleteInDatabase();
                cachedb.Drop();
            }
        }
        public void TestActuallyCreatingIt(DatabaseType type)
        {
            var db = GetCleanedServer(type);

            //drop it
            db.Drop();

            CreateNewCohortDatabaseWizard wizard = new CreateNewCohortDatabaseWizard(db, CatalogueRepository, DataExportRepository, false);

            _extractionInfo2.IsExtractionIdentifier = true;
            _extractionInfo2.SaveToDatabase();

            var candidate = wizard.GetPrivateIdentifierCandidates().Single(c => c.RuntimeName.Equals("PrivateIdentifierB"));
            var ect       = wizard.CreateDatabase(
                candidate,
                new ThrowImmediatelyCheckNotifier());

            //database should exist
            DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(cohortDatabaseName);
            Assert.IsTrue(db.Exists());

            //did it create the correct type?
            Assert.AreEqual(type, ect.DatabaseType);

            //the ExternalCohortTable should pass tests
            ect.Check(new ThrowImmediatelyCheckNotifier());

            //now try putting someone in it
            //the project it will go under
            var project = new Project(DataExportRepository, "MyProject");

            project.ProjectNumber = 10;
            project.SaveToDatabase();

            //the request to put it under there
            var request = new CohortCreationRequest(project, new CohortDefinition(null, "My cohort", 1, 10, ect), DataExportRepository, "Blah");

            //the actual cohort data
            DataTable dt = new DataTable();

            dt.Columns.Add(_extractionInfo2.GetRuntimeName());
            dt.Rows.Add(101243); //_extractionInfo2 is of type int

            //the destination component that will put it there
            var dest = new BasicCohortDestination();

            dest.PreInitialize(request, new ThrowImmediatelyDataLoadEventListener());

            //tell it to use the guid allocator
            dest.ReleaseIdentifierAllocator = typeof(GuidReleaseIdentifierAllocator);

            dest.ProcessPipelineData(dt, new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken());
            dest.Dispose(new ThrowImmediatelyDataLoadEventListener(), null);

            var cohort = request.CohortCreatedIfAny;

            Assert.IsNotNull(cohort);

            var externalData = cohort.GetExternalData();

            Assert.AreEqual(10, externalData.ExternalProjectNumber);
            Assert.IsFalse(string.IsNullOrEmpty(externalData.ExternalDescription));


            Assert.AreEqual(DateTime.Now.Year, externalData.ExternalCohortCreationDate.Value.Year);
            Assert.AreEqual(DateTime.Now.Month, externalData.ExternalCohortCreationDate.Value.Month);
            Assert.AreEqual(DateTime.Now.Day, externalData.ExternalCohortCreationDate.Value.Day);
            Assert.AreEqual(DateTime.Now.Hour, externalData.ExternalCohortCreationDate.Value.Hour);

            cohort.AppendToAuditLog("Test");

            Assert.IsTrue(cohort.AuditLog.Contains("Test"));

            Assert.AreEqual(1, cohort.Count);
            Assert.AreEqual(1, cohort.CountDistinct);

            var cohortTable = cohort.FetchEntireCohort();

            Assert.AreEqual(1, cohortTable.Rows.Count);

            var helper = ect.GetQuerySyntaxHelper();

            Assert.AreEqual(101243, cohortTable.Rows[0][helper.GetRuntimeName(ect.PrivateIdentifierField)]);
            var aguid = cohortTable.Rows[0][helper.GetRuntimeName(ect.ReleaseIdentifierField)].ToString();

            Assert.IsFalse(string.IsNullOrWhiteSpace(aguid)); //should be a guid

            //test reversing the anonymisation of something
            var dtAno = new DataTable();

            dtAno.Columns.Add(cohort.GetReleaseIdentifier(true));
            dtAno.Columns.Add("Age");
            dtAno.Rows.Add(aguid, 23);
            dtAno.Rows.Add(aguid, 99);

            cohort.ReverseAnonymiseDataTable(dtAno, new ThrowImmediatelyDataLoadEventListener(), true);

            Assert.AreEqual(2, dtAno.Columns.Count);
            Assert.IsTrue(dtAno.Columns.Contains(cohort.GetPrivateIdentifier(true)));

            Assert.AreEqual("101243", dtAno.Rows[0][cohort.GetPrivateIdentifier(true)]);
            Assert.AreEqual("101243", dtAno.Rows[1][cohort.GetPrivateIdentifier(true)]);
        }
Exemple #16
0
        public void SetupExampleTables()
        {
            string sql =
                @"CREATE TABLE [dbo].[Tests](
	[chi] [varchar](10) NULL,
	[Date] [datetime] NULL,
	[hb_extract] [varchar](1) NULL,
	[TestId] [int] NOT NULL,
 CONSTRAINT [PK_Tests] PRIMARY KEY CLUSTERED 
(
	[TestId] ASC
)
) 

GO

CREATE TABLE [dbo].[Results](
	[TestId] [int] NOT NULL,
	[Measure] [varchar](10) NOT NULL,
	[Value] [int] NULL,
 CONSTRAINT [PK_Results] PRIMARY KEY CLUSTERED 
(
	[TestId] ASC,
	[Measure] ASC
)
)

GO

ALTER TABLE [dbo].[Results]  WITH CHECK ADD  CONSTRAINT [FK_Results_Tests] FOREIGN KEY([TestId])
REFERENCES [dbo].[Tests] ([TestId])
GO";
            var server = DiscoveredDatabaseICanCreateRandomTablesIn.Server;

            using (var con = server.GetConnection())
            {
                con.Open();
                UsefulStuff.ExecuteBatchNonQuery(sql, con);
            }

            var importer1 = new TableInfoImporter(CatalogueRepository, DiscoveredDatabaseICanCreateRandomTablesIn.ExpectTable("Tests"));
            var importer2 = new TableInfoImporter(CatalogueRepository, DiscoveredDatabaseICanCreateRandomTablesIn.ExpectTable("Results"));

            importer1.DoImport(out t1, out c1);

            importer2.DoImport(out t2, out c2);

            var engineer1 = new ForwardEngineerCatalogue(t1, c1, true);
            var engineer2 = new ForwardEngineerCatalogue(t2, c2, true);

            engineer1.ExecuteForwardEngineering(out cata1, out cataItems1, out eis1);
            engineer2.ExecuteForwardEngineering(out cata2, out cataItems2, out eis2);

            new JoinInfo(CatalogueRepository,
                         c1.Single(e => e.GetRuntimeName().Equals("TestId")),
                         c2.Single(e => e.GetRuntimeName().Equals("TestId")),
                         ExtractionJoinType.Left, null);

            _anoTable = new ANOTable(CatalogueRepository, ANOStore_ExternalDatabaseServer, "ANOTes", "T");
            _anoTable.NumberOfCharactersToUseInAnonymousRepresentation = 10;
            _anoTable.SaveToDatabase();
            _anoTable.PushToANOServerAsNewTable("int", new ThrowImmediatelyCheckNotifier());

            _comboCata = new Catalogue(CatalogueRepository, "Combo Catalogue");

            //pk
            var ciTestId  = new CatalogueItem(CatalogueRepository, _comboCata, "TestId");
            var colTestId = c1.Single(c => c.GetRuntimeName().Equals("TestId"));

            ciTestId.ColumnInfo_ID = colTestId.ID;
            ciTestId.SaveToDatabase();
            var eiTestId = new ExtractionInformation(CatalogueRepository, ciTestId, colTestId, colTestId.Name);

            //Measure
            var ciMeasure  = new CatalogueItem(CatalogueRepository, _comboCata, "Measuree");
            var colMeasure = c2.Single(c => c.GetRuntimeName().Equals("Measure"));

            ciMeasure.ColumnInfo_ID = colMeasure.ID;
            ciMeasure.SaveToDatabase();
            var eiMeasure = new ExtractionInformation(CatalogueRepository, ciMeasure, colMeasure, colMeasure.Name);

            //Date
            var ciDate = new CatalogueItem(CatalogueRepository, _comboCata, "Dat");

            var colDate = c1.Single(c => c.GetRuntimeName().Equals("Date"));

            ciDate.ColumnInfo_ID = colDate.ID;
            ciDate.SaveToDatabase();
            var eiDate = new ExtractionInformation(CatalogueRepository, ciDate, colDate, colDate.Name);

            var destDatabaseName = TestDatabaseNames.GetConsistentName("ANOMigrationTwoTableTests");

            _destinationDatabase = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(destDatabaseName);
            _destinationDatabase.Create(true);
        }
Exemple #17
0
        // This no longer copies between servers, but the original test didn't guarantee that would happen anyway
        public void CloneDatabaseAndTable()
        {
            string testLiveDatabaseName = TestDatabaseNames.GetConsistentName("TEST");

            var testDb = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(testLiveDatabaseName);
            var raw    = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(testLiveDatabaseName + "_RAW");

            foreach (DiscoveredDatabase db in new[] { raw, testDb })
            {
                if (db.Exists())
                {
                    foreach (DiscoveredTable table in db.DiscoverTables(true))
                    {
                        table.Drop();
                    }

                    db.Drop();
                }
            }

            DiscoveredServerICanCreateRandomDatabasesAndTablesOn.CreateDatabase(testLiveDatabaseName);
            Assert.IsTrue(testDb.Exists());

            testDb.CreateTable("Table_1", new[] { new DatabaseColumnRequest("Id", "int") });


            //clone the builder
            var builder = new SqlConnectionStringBuilder(DiscoveredServerICanCreateRandomDatabasesAndTablesOn.Builder.ConnectionString)
            {
                InitialCatalog = testLiveDatabaseName
            };

            var dbConfiguration = new HICDatabaseConfiguration(new DiscoveredServer(builder), null, new ServerDefaults(CatalogueRepository));

            var cloner = new DatabaseCloner(dbConfiguration);

            try
            {
                var cloneDb = cloner.CreateDatabaseForStage(LoadBubble.Raw);

                //confirm database appeared
                Assert.IsTrue(DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(testLiveDatabaseName + "_RAW").Exists());

                //now create a catalogue and wire it SetUp to the table TEST on the test database server
                Catalogue cata = SetupATestCatalogue(builder, testLiveDatabaseName, "Table_1");

                //now clone the catalogue data structures to MachineName
                foreach (TableInfo tableInfo in cata.GetTableInfoList(false))
                {
                    cloner.CreateTablesInDatabaseFromCatalogueInfo(new ThrowImmediatelyDataLoadEventListener(), tableInfo, LoadBubble.Raw);
                }

                Assert.IsTrue(raw.Exists());
                Assert.IsTrue(raw.ExpectTable("Table_1").Exists());
            }
            finally
            {
                cloner.LoadCompletedSoDispose(ExitCodeType.Success, new ThrowImmediatelyDataLoadEventListener());

                while (toCleanUp.Count > 0)
                {
                    try
                    {
                        toCleanUp.Pop().DeleteInDatabase();
                    }
                    catch (Exception e)
                    {
                        //always clean SetUp everything
                        Console.WriteLine(e);
                    }
                }
            }
        }
Exemple #18
0
        public void CreateANOVersion_TestSkippingTables(bool tableInfoAlreadyExistsForSkippedTable, bool putPlanThroughSerialization)
        {
            var dbFrom = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(TestDatabaseNames.GetConsistentName("CreateANOVersion_TestSkippingTables_From"));
            var dbTo   = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(TestDatabaseNames.GetConsistentName("CreateANOVersion_TestSkippingTables_To"));

            dbFrom.Create(true);
            dbTo.Create(true);

            try
            {
                var tblFromHeads = dbFrom.CreateTable("Heads", new[]
                {
                    new DatabaseColumnRequest("SkullColor", "varchar(10)"),
                    new DatabaseColumnRequest("Vertebrae", "varchar(25)")
                });

                var cols = new[]
                {
                    new DatabaseColumnRequest("SpineColor", "varchar(10)"),
                    new DatabaseColumnRequest("Vertebrae", "varchar(25)")
                };

                var tblFromNeck = dbFrom.CreateTable("Necks", cols);

                //Necks table already exists in the destination so will be skipped for migration but still needs to be imported
                var tblToNeck = dbTo.CreateTable("Necks", cols);


                TableInfo    fromHeadsTableInfo;
                ColumnInfo[] fromHeadsColumnInfo;
                TableInfo    fromNeckTableInfo;
                ColumnInfo[] fromNeckColumnInfo;
                TableInfo    toNecksTableInfo  = null;
                ColumnInfo[] toNecksColumnInfo = null;

                TableInfoImporter i1 = new TableInfoImporter(CatalogueRepository, tblFromHeads);
                i1.DoImport(out fromHeadsTableInfo, out fromHeadsColumnInfo);

                TableInfoImporter i2 = new TableInfoImporter(CatalogueRepository, tblFromNeck);
                i2.DoImport(out fromNeckTableInfo, out fromNeckColumnInfo);

                //Table already exists but does the in Catalogue reference exist?
                if (tableInfoAlreadyExistsForSkippedTable)
                {
                    TableInfoImporter i3 = new TableInfoImporter(CatalogueRepository, tblToNeck);
                    i3.DoImport(out toNecksTableInfo, out toNecksColumnInfo);
                }

                //Create a JoinInfo so the query builder knows how to connect the tables
                new JoinInfo(CatalogueRepository,
                             fromHeadsColumnInfo.Single(c => c.GetRuntimeName().Equals("Vertebrae")),
                             fromNeckColumnInfo.Single(c => c.GetRuntimeName().Equals("Vertebrae")), ExtractionJoinType.Inner, null
                             );

                var                     cataEngineer = new ForwardEngineerCatalogue(fromHeadsTableInfo, fromHeadsColumnInfo, true);
                Catalogue               cata;
                CatalogueItem[]         cataItems;
                ExtractionInformation[] extractionInformations;
                cataEngineer.ExecuteForwardEngineering(out cata, out cataItems, out extractionInformations);

                var cataEngineer2 = new ForwardEngineerCatalogue(fromNeckTableInfo, fromNeckColumnInfo, true);
                cataEngineer2.ExecuteForwardEngineering(cata);

                //4 extraction informations in from Catalogue (2 from Heads and 2 from Necks)
                Assert.AreEqual(cata.GetAllExtractionInformation(ExtractionCategory.Any).Count(), 4);

                //setup ANOTable on head
                var anoTable = new ANOTable(CatalogueRepository, ANOStore_ExternalDatabaseServer, "ANOSkullColor", "C");
                anoTable.NumberOfCharactersToUseInAnonymousRepresentation = 10;
                anoTable.SaveToDatabase();
                anoTable.PushToANOServerAsNewTable("varchar(10)", new ThrowImmediatelyCheckNotifier());

                //////////////////The actual test!/////////////////
                var planManager = new ForwardEngineerANOCataloguePlanManager(RepositoryLocator, cata);

                //ano the table SkullColor
                var scPlan = planManager.GetPlanForColumnInfo(fromHeadsColumnInfo.Single(col => col.GetRuntimeName().Equals("SkullColor")));
                scPlan.ANOTable = anoTable;
                scPlan.Plan     = Plan.ANO;

                if (putPlanThroughSerialization)
                {
                    var asString = JsonConvertExtensions.SerializeObject(planManager, RepositoryLocator);

                    planManager = (ForwardEngineerANOCataloguePlanManager)JsonConvertExtensions.DeserializeObject(asString, typeof(ForwardEngineerANOCataloguePlanManager), RepositoryLocator);
                }

                //not part of serialization
                planManager.TargetDatabase = dbTo;
                planManager.SkippedTables.Add(fromNeckTableInfo);//skip the necks table because it already exists (ColumnInfos may or may not exist but physical table definetly does)

                var engine = new ForwardEngineerANOCatalogueEngine(RepositoryLocator, planManager);

                if (!tableInfoAlreadyExistsForSkippedTable)
                {
                    var ex = Assert.Throws <Exception>(engine.Execute);
                    Assert.IsTrue(Regex.IsMatch(ex.InnerException.Message, "Found '0' ColumnInfos called"));
                    Assert.IsTrue(Regex.IsMatch(ex.InnerException.Message, "[Necks].[SpineColor]"));

                    return;
                }
                else
                {
                    engine.Execute();
                }

                var newCata = CatalogueRepository.GetAllObjects <Catalogue>().Single(c => c.Name.Equals("ANOHeads"));
                Assert.IsTrue(newCata.Exists());

                var newCataItems = newCata.CatalogueItems;
                Assert.AreEqual(newCataItems.Count(), 4);

                //should be extraction informations
                //all extraction informations should point to the new table location
                Assert.IsTrue(newCataItems.All(ci => ci.ExtractionInformation.SelectSQL.Contains(dbTo.GetRuntimeName())));

                //these columns should all exist
                Assert.IsTrue(newCataItems.Any(ci => ci.ExtractionInformation.SelectSQL.Contains("SkullColor")));
                Assert.IsTrue(newCataItems.Any(ci => ci.ExtractionInformation.SelectSQL.Contains("SpineColor")));
                Assert.IsTrue(newCataItems.Any(ci => ci.ExtractionInformation.SelectSQL.Contains("Vertebrae"))); //actually there will be 2 copies of this one from Necks one from Heads

                //new ColumnInfo should have a reference to the anotable
                Assert.IsTrue(newCataItems.Single(ci => ci.Name.Equals("ANOSkullColor")).ColumnInfo.ANOTable_ID == anoTable.ID);


                var newSpineColorColumnInfo = newCataItems.Single(ci => ci.Name.Equals("ANOSkullColor")).ColumnInfo;

                //table info already existed, make sure the new CatalogueItems point to the same columninfos / table infos
                Assert.IsTrue(newCataItems.Select(ci => ci.ColumnInfo).Contains(newSpineColorColumnInfo));
            }
            finally
            {
                dbFrom.Drop();
                dbTo.Drop();
            }
        }
        public void SQLServerDestination()
        {
            DiscoveredDatabase dbToExtractTo = null;

            var ci = new CatalogueItem(CatalogueRepository, _catalogue, "YearOfBirth");
            var columnToTransform = _columnInfos.Single(c => c.GetRuntimeName().Equals("DateOfBirth", StringComparison.CurrentCultureIgnoreCase));

            string transform = "YEAR(" + columnToTransform.Name + ")";

            var ei = new ExtractionInformation(CatalogueRepository, ci, columnToTransform, transform);

            ei.Alias = "YearOfBirth";
            ei.ExtractionCategory = ExtractionCategory.Core;
            ei.SaveToDatabase();

            //make it part of the ExtractionConfiguration
            var newColumn = new ExtractableColumn(DataExportRepository, _selectedDataSet.ExtractableDataSet, (ExtractionConfiguration)_selectedDataSet.ExtractionConfiguration, ei, 0, ei.SelectSQL);

            newColumn.Alias = ei.Alias;
            newColumn.SaveToDatabase();

            _extractableColumns.Add(newColumn);

            //recreate the extraction command so it gets updated with the new column too.
            _request = new ExtractDatasetCommand(_configuration, _extractableCohort, new ExtractableDatasetBundle(_extractableDataSet),
                                                 _extractableColumns, new HICProjectSalt(_project),
                                                 new ExtractionDirectory(@"C:\temp\", _configuration));

            try
            {
                _configuration.Name = "ExecuteFullExtractionToDatabaseMSSqlDestinationTest";
                _configuration.SaveToDatabase();

                ExtractionPipelineUseCase            execute;
                IExecuteDatasetExtractionDestination result;

                var dbname = TestDatabaseNames.GetConsistentName(_project.Name + "_" + _project.ProjectNumber);
                dbToExtractTo = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(dbname);
                if (dbToExtractTo.Exists())
                {
                    dbToExtractTo.Drop();
                }

                base.Execute(out execute, out result);

                var destinationTable = dbToExtractTo.ExpectTable(_expectedTableName);
                Assert.IsTrue(destinationTable.Exists());

                var dt = destinationTable.GetDataTable();

                Assert.AreEqual(1, dt.Rows.Count);
                Assert.AreEqual(_cohortKeysGenerated[_cohortKeysGenerated.Keys.First()].Trim(), dt.Rows[0]["ReleaseID"]);
                Assert.AreEqual(new DateTime(2001, 1, 1), dt.Rows[0]["DateOfBirth"]);
                Assert.AreEqual(2001, dt.Rows[0]["YearOfBirth"]);

                Assert.AreEqual(columnToTransform.Data_type, destinationTable.DiscoverColumn("DateOfBirth").DataType.SQLType);
                Assert.AreEqual("int", destinationTable.DiscoverColumn("YearOfBirth").DataType.SQLType);
            }
            finally
            {
                if (_extractionServer != null)
                {
                    _extractionServer.DeleteInDatabase();
                }

                if (dbToExtractTo != null)
                {
                    dbToExtractTo.Drop();
                }
            }
        }
Exemple #20
0
        public void CreateANOVersionTest_LookupsAndExtractionInformations()
        {
            var dbName = TestDatabaseNames.GetConsistentName("CreateANOVersionTest");

            var db = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(dbName);

            db.Create(true);

            BulkTestsData bulk = new BulkTestsData(CatalogueRepository, DiscoveredDatabaseICanCreateRandomTablesIn, 100);

            bulk.SetupTestData();
            bulk.ImportAsCatalogue();

            //Create a lookup table on the server
            var lookupTbl = DiscoveredDatabaseICanCreateRandomTablesIn.CreateTable("z_sexLookup", new[]
            {
                new DatabaseColumnRequest("Code", "varchar(1)")
                {
                    IsPrimaryKey = true
                },
                new DatabaseColumnRequest("hb_Code", "varchar(1)")
                {
                    IsPrimaryKey = true
                },
                new DatabaseColumnRequest("Description", "varchar(100)")
            });

            //import a reference to the table
            TableInfoImporter importer = new TableInfoImporter(CatalogueRepository, lookupTbl);

            ColumnInfo[] lookupColumnInfos;
            TableInfo    lookupTableInfo;

            importer.DoImport(out lookupTableInfo, out lookupColumnInfos);

            //Create a Lookup reference
            var ciSex = bulk.catalogue.CatalogueItems.Single(c => c.Name == "sex");
            var ciHb  = bulk.catalogue.CatalogueItems.Single(c => c.Name == "hb_extract");

            var eiChi = bulk.extractionInformations.Single(ei => ei.GetRuntimeName() == "chi");

            eiChi.IsExtractionIdentifier = true;
            eiChi.SaveToDatabase();

            var eiCentury = bulk.extractionInformations.Single(ei => ei.GetRuntimeName() == "century");

            eiCentury.HashOnDataRelease  = true;
            eiCentury.ExtractionCategory = ExtractionCategory.Internal;
            eiCentury.SaveToDatabase();

            //add a transform
            var eiPostcode = bulk.extractionInformations.Single(ei => ei.GetRuntimeName() == "current_postcode");

            eiPostcode.SelectSQL = string.Format("LEFT(10,{0}.[current_postcode])", eiPostcode.ColumnInfo.TableInfo.Name);
            eiPostcode.Alias     = "MyMutilatedColumn";
            eiPostcode.SaveToDatabase();

            //add a combo transform
            var ciComboCol = new CatalogueItem(CatalogueRepository, bulk.catalogue, "ComboColumn");

            var colForename = bulk.columnInfos.Single(c => c.GetRuntimeName() == "forename");
            var colSurname  = bulk.columnInfos.Single(c => c.GetRuntimeName() == "surname");

            var eiComboCol = new ExtractionInformation(CatalogueRepository, ciComboCol, colForename, colForename + " + ' ' + " + colSurname);

            eiComboCol.Alias = "ComboColumn";
            eiComboCol.SaveToDatabase();

            var eiDataLoadRunId = bulk.extractionInformations.Single(ei => ei.GetRuntimeName().Equals(SpecialFieldNames.DataLoadRunID));

            eiDataLoadRunId.DeleteInDatabase();


            var lookup = new Lookup(CatalogueRepository, lookupColumnInfos[2], ciSex.ColumnInfo, lookupColumnInfos[0], ExtractionJoinType.Left, null);

            //now lets make it worse, lets assume the sex code changes per healthboard therefore the join to the lookup requires both fields sex and hb_extract
            var compositeLookup = new LookupCompositeJoinInfo(CatalogueRepository, lookup, ciHb.ColumnInfo, lookupColumnInfos[1]);

            //now lets make the _Desc field in the original Catalogue
            int orderToInsertDescriptionFieldAt = ciSex.ExtractionInformation.Order;

            //bump everyone down 1
            foreach (var toBumpDown in bulk.catalogue.CatalogueItems.Select(ci => ci.ExtractionInformation).Where(e => e != null && e.Order > orderToInsertDescriptionFieldAt))
            {
                toBumpDown.Order++;
                toBumpDown.SaveToDatabase();
            }

            var ciDescription = new CatalogueItem(CatalogueRepository, bulk.catalogue, "Sex_Desc");
            var eiDescription = new ExtractionInformation(CatalogueRepository, ciDescription, lookupColumnInfos[2], lookupColumnInfos[2].Name);

            eiDescription.Alias = "Sex_Desc";
            eiDescription.Order = orderToInsertDescriptionFieldAt + 1;
            eiDescription.ExtractionCategory = ExtractionCategory.Supplemental;
            eiDescription.SaveToDatabase();

            bulk.catalogue.ClearAllInjections();

            //check it worked
            QueryBuilder qb = new QueryBuilder(null, null);

            qb.AddColumnRange(bulk.catalogue.GetAllExtractionInformation(ExtractionCategory.Any));

            //The query builder should be able to succesfully create SQL
            Console.WriteLine(qb.SQL);

            //there should be 2 tables involved in the query [z_sexLookup] and [BulkData]
            Assert.AreEqual(2, qb.TablesUsedInQuery.Count);

            //the query builder should have identified the lookup
            Assert.AreEqual(lookup, qb.GetDistinctRequiredLookups().Single());

            //////////////////////////////////////////////////////////////////////////////////////The Actual Bit Being Tested////////////////////////////////////////////////////
            var planManager = new ForwardEngineerANOCataloguePlanManager(RepositoryLocator, bulk.catalogue);

            planManager.TargetDatabase = db;

            //setup test rules for migrator
            CreateMigrationRules(planManager, bulk);

            //rules should pass checks
            Assert.DoesNotThrow(() => planManager.Check(new ThrowImmediatelyCheckNotifier()));

            var engine = new ForwardEngineerANOCatalogueEngine(RepositoryLocator, planManager);

            engine.Execute();
            //////////////////////////////////////////////////////////////////////////////////////End The Actual Bit Being Tested////////////////////////////////////////////////////

            var anoCatalogue = CatalogueRepository.GetAllObjects <Catalogue>().Single(c => c.Folder.Path.StartsWith("\\ano"));

            Assert.IsTrue(anoCatalogue.Exists());

            //The new Catalogue should have the same number of ExtractionInformations
            var eiSource      = bulk.catalogue.GetAllExtractionInformation(ExtractionCategory.Any).OrderBy(ei => ei.Order).ToArray();
            var eiDestination = anoCatalogue.GetAllExtractionInformation(ExtractionCategory.Any).OrderBy(ei => ei.Order).ToArray();

            Assert.AreEqual(eiSource.Length, eiDestination.Length, "Both the new and the ANO catalogue should have the same number of ExtractionInformations (extractable columns)");

            for (int i = 0; i < eiSource.Length; i++)
            {
                Assert.AreEqual(eiSource[i].Order, eiDestination[i].Order, "ExtractionInformations in the source and destination Catalogue should have the same order");

                Assert.AreEqual(eiSource[i].GetRuntimeName(),
                                eiDestination[i].GetRuntimeName().Replace("ANO", ""), "ExtractionInformations in the source and destination Catalogue should have the same names (excluding ANO prefix)");

                Assert.AreEqual(eiSource[i].ExtractionCategory, eiDestination[i].ExtractionCategory, "Old / New ANO ExtractionInformations did not match on ExtractionCategory");
                Assert.AreEqual(eiSource[i].IsExtractionIdentifier, eiDestination[i].IsExtractionIdentifier, "Old / New ANO ExtractionInformations did not match on IsExtractionIdentifier");
                Assert.AreEqual(eiSource[i].HashOnDataRelease, eiDestination[i].HashOnDataRelease, "Old / New ANO ExtractionInformations did not match on HashOnDataRelease");
                Assert.AreEqual(eiSource[i].IsPrimaryKey, eiDestination[i].IsPrimaryKey, "Old / New ANO ExtractionInformations did not match on IsPrimaryKey");
            }

            //check it worked
            QueryBuilder qbdestination = new QueryBuilder(null, null);

            qbdestination.AddColumnRange(anoCatalogue.GetAllExtractionInformation(ExtractionCategory.Any));

            //The query builder should be able to succesfully create SQL
            Console.WriteLine(qbdestination.SQL);

            var anoEiPostcode = anoCatalogue.GetAllExtractionInformation(ExtractionCategory.Any).Single(ei => ei.GetRuntimeName().Equals("MyMutilatedColumn"));

            //The transform on postcode should have been refactored to the new table name and preserve the scalar function LEFT...
            Assert.AreEqual(string.Format("LEFT(10,{0}.[current_postcode])", anoEiPostcode.ColumnInfo.TableInfo.GetFullyQualifiedName()), anoEiPostcode.SelectSQL);

            var anoEiComboCol = anoCatalogue.GetAllExtractionInformation(ExtractionCategory.Any).Single(ei => ei.GetRuntimeName().Equals("ComboColumn"));

            //The transform on postcode should have been refactored to the new table name and preserve the scalar function LEFT...
            Assert.AreEqual(string.Format("{0}.[forename] + ' ' + {0}.[surname]", anoEiPostcode.ColumnInfo.TableInfo.GetFullyQualifiedName()), anoEiComboCol.SelectSQL);

            //there should be 2 tables involved in the query [z_sexLookup] and [BulkData]
            Assert.AreEqual(2, qbdestination.TablesUsedInQuery.Count);

            //the query builder should have identified the lookup but it should be the new one not the old one
            Assert.AreEqual(1, qbdestination.GetDistinctRequiredLookups().Count(), "New query builder for ano catalogue did not correctly identify that there was a Lookup");
            Assert.AreNotEqual(lookup, qbdestination.GetDistinctRequiredLookups().Single(), "New query builder for ano catalogue identified the OLD Lookup!");

            Assert.AreEqual(1, qbdestination.GetDistinctRequiredLookups().Single().GetSupplementalJoins().Count(), "The new Lookup did not have the composite join key (sex/hb_extract)");
            Assert.AreNotEqual(compositeLookup, qbdestination.GetDistinctRequiredLookups().Single().GetSupplementalJoins(), "New query builder for ano catalogue identified the OLD LookupCompositeJoinInfo!");

            db.Drop();

            var exports = CatalogueRepository.GetAllObjects <ObjectExport>().Count();
            var imports = CatalogueRepository.GetAllObjects <ObjectImport>().Count();

            Assert.AreEqual(exports, imports);
            Assert.IsTrue(exports > 0);
        }
        public void JoinablesWithCache()
        {
            string queryCachingDatabaseName = To.GetRuntimeName();

            _queryCachingDatabase = To;

            var builder = new CohortQueryBuilder(aggregate1, null);

            //make aggregate 2 a joinable
            var joinable2 = new JoinableCohortAggregateConfiguration(CatalogueRepository, cohortIdentificationConfiguration, aggregate2);

            joinable2.AddUser(aggregate1);

            //make aggregate 2 have an additional column (dtCreated)
            var anotherCol = aggregate2.Catalogue.GetAllExtractionInformation(ExtractionCategory.Any).Single(e => e.GetRuntimeName().Equals("dtCreated"));

            aggregate2.AddDimension(anotherCol);

            MasterDatabaseScriptExecutor scripter = new MasterDatabaseScriptExecutor(_queryCachingDatabase);

            scripter.CreateAndPatchDatabase(new QueryCachingPatcher(), new AcceptAllCheckNotifier());

            var queryCachingDatabaseServer = new ExternalDatabaseServer(CatalogueRepository, queryCachingDatabaseName, null);

            queryCachingDatabaseServer.SetProperties(_queryCachingDatabase);

            //make the builder use the query cache we just set SetUp
            builder.CacheServer = queryCachingDatabaseServer;
            try
            {
                var builderForCaching = new CohortQueryBuilder(aggregate2, null, true);

                var cacheDt = new DataTable();
                using (SqlConnection con = (SqlConnection)Database.Server.GetConnection())
                {
                    con.Open();
                    SqlDataAdapter da = new SqlDataAdapter(new SqlCommand(builderForCaching.SQL, con));
                    da.Fill(cacheDt);
                }

                var cacheManager = new CachedAggregateConfigurationResultsManager(queryCachingDatabaseServer);
                cacheManager.CommitResults(new CacheCommitJoinableInceptionQuery(aggregate2, builderForCaching.SQL, cacheDt, null, 30));

                try
                {
                    Console.WriteLine(builder.SQL);

                    using (var con = (SqlConnection)Database.Server.GetConnection())
                    {
                        con.Open();

                        var dbReader = new SqlCommand(builder.SQL, con).ExecuteReader();

                        //can read at least one row
                        Assert.IsTrue(dbReader.Read());
                    }

                    string expectedTableAlias = "ix" + joinable2.ID;

                    //after joinables
                    Assert.AreEqual(
                        CollapseWhitespace(
                            string.Format(
                                @"/*cic_{2}_UnitTestAggregate1*/
SELECT
distinct
[" + TestDatabaseNames.Prefix + @"ScratchArea]..[BulkData].[chi]
FROM 
[" + TestDatabaseNames.Prefix + @"ScratchArea]..[BulkData]
LEFT Join (
	/*Cached:cic_{2}_UnitTestAggregate2*/
	select * from [{3}]..[JoinableInceptionQuery_AggregateConfiguration{1}]

){0}
on [" + TestDatabaseNames.Prefix + @"ScratchArea]..[BulkData].[chi] = {0}.chi",
                                expectedTableAlias,                   //{0}
                                aggregate2.ID,                        //{1}
                                cohortIdentificationConfiguration.ID, //{2}
                                queryCachingDatabaseName)             //{3}
                            ), CollapseWhitespace(builder.SQL));
                }
                finally
                {
                    joinable2.Users[0].DeleteInDatabase();
                    joinable2.DeleteInDatabase();
                }
            }
            finally
            {
                queryCachingDatabaseServer.DeleteInDatabase();
                DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(queryCachingDatabaseName).Drop();
            }
        }
Exemple #22
0
        public void SQLServerDestination(bool lookupsEtc)
        {
            DiscoveredDatabase dbToExtractTo = null;

            var ci = new CatalogueItem(CatalogueRepository, _catalogue, "YearOfBirth");

            _columnToTransform = _columnInfos.Single(c => c.GetRuntimeName().Equals("DateOfBirth", StringComparison.CurrentCultureIgnoreCase));

            string transform = "YEAR(" + _columnToTransform.Name + ")";


            if (_catalogue.GetAllExtractionInformation(ExtractionCategory.Any).All(ei => ei.GetRuntimeName() != "YearOfBirth"))
            {
                var ei = new ExtractionInformation(CatalogueRepository, ci, _columnToTransform, transform);
                ei.Alias = "YearOfBirth";
                ei.ExtractionCategory = ExtractionCategory.Core;
                ei.SaveToDatabase();

                //make it part of the ExtractionConfiguration
                var newColumn = new ExtractableColumn(DataExportRepository, _selectedDataSet.ExtractableDataSet, (ExtractionConfiguration)_selectedDataSet.ExtractionConfiguration, ei, 0, ei.SelectSQL);
                newColumn.Alias = ei.Alias;
                newColumn.SaveToDatabase();

                _extractableColumns.Add(newColumn);
            }

            if (lookupsEtc)
            {
                CreateLookupsEtc();
            }

            try
            {
                _configuration.Name = "ExecuteFullExtractionToDatabaseMSSqlDestinationTest";
                _configuration.SaveToDatabase();

                var dbname = TestDatabaseNames.GetConsistentName(_project.Name + "_" + _project.ProjectNumber);
                dbToExtractTo = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(dbname);
                if (dbToExtractTo.Exists())
                {
                    dbToExtractTo.Drop();
                }

                base.ExecuteRunner();

                var destinationTable = dbToExtractTo.ExpectTable(_expectedTableName);
                Assert.IsTrue(destinationTable.Exists());

                var dt = destinationTable.GetDataTable();

                Assert.AreEqual(1, dt.Rows.Count);
                Assert.AreEqual(_cohortKeysGenerated[_cohortKeysGenerated.Keys.First()].Trim(), dt.Rows[0]["ReleaseID"]);
                Assert.AreEqual(new DateTime(2001, 1, 1), dt.Rows[0]["DateOfBirth"]);
                Assert.AreEqual(2001, dt.Rows[0]["YearOfBirth"]);

                Assert.AreEqual(_columnToTransform.Data_type, destinationTable.DiscoverColumn("DateOfBirth").DataType.SQLType);
                Assert.AreEqual("int", destinationTable.DiscoverColumn("YearOfBirth").DataType.SQLType);

                if (lookupsEtc)
                {
                    AssertLookupsEtcExist(dbToExtractTo);
                }
            }
            finally
            {
                if (dbToExtractTo != null && dbToExtractTo.Exists())
                {
                    dbToExtractTo.Drop();
                }

                _pipeline?.DeleteInDatabase();
            }
        }
Exemple #23
0
        public void TestSingleJob(bool overrideRAW, bool sendDodgyCredentials)
        {
            if (sendDodgyCredentials && !overrideRAW)
            {
                throw new NotSupportedException("Cannot send dodgy credentials if you aren't overriding RAW");
            }

            ServerDefaults defaults   = new ServerDefaults(CatalogueRepository);
            var            oldDefault = defaults.GetDefaultFor(PermissableDefaults.RAWDataLoadServer);

            var testDirPath = Path.Combine(Path.GetTempPath(), Path.GetRandomFileName());
            var testDir     = Directory.CreateDirectory(testDirPath);
            var server      = DiscoveredServerICanCreateRandomDatabasesAndTablesOn;

            var catalogueEntities           = new CatalogueEntities();
            var databaseHelper              = new DatabaseHelper();
            ExternalDatabaseServer external = null;

            try
            {
                // Set SetUp the dataset's project directory and add the CSV file to ForLoading
                var loadDirectory = LoadDirectory.CreateDirectoryStructure(testDir, "TestDataset");
                File.WriteAllText(Path.Combine(loadDirectory.ForLoading.FullName, "1.csv"),
                                  "Col1\r\n1\r\n2\r\n3\r\n4");

                databaseHelper.SetUp(server);

                // Create the Catalogue entities for the dataset
                catalogueEntities.Create(CatalogueRepository, databaseHelper.DatabaseToLoad, loadDirectory);

                if (overrideRAW)
                {
                    external = new ExternalDatabaseServer(CatalogueRepository, "RAW Server", null);
                    external.SetProperties(DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase("master"));

                    if (sendDodgyCredentials)
                    {
                        external.Username = "******";
                        external.Password = "******";
                    }
                    external.SaveToDatabase();

                    defaults.SetDefault(PermissableDefaults.RAWDataLoadServer, external);
                }

                var options = new DleOptions();
                options.LoadMetadata = catalogueEntities.LoadMetadata.ID;
                options.Command      = CommandLineActivity.check;

                //run checks (with ignore errors if we are sending dodgy credentials)
                new RunnerFactory().CreateRunner(new ThrowImmediatelyActivator(RepositoryLocator), options).Run(RepositoryLocator, new ThrowImmediatelyDataLoadEventListener(),
                                                                                                                sendDodgyCredentials?
                                                                                                                (ICheckNotifier) new IgnoreAllErrorsCheckNotifier(): new AcceptAllCheckNotifier(), new GracefulCancellationToken());

                //run load
                options.Command = CommandLineActivity.run;
                var runner = new RunnerFactory().CreateRunner(new ThrowImmediatelyActivator(RepositoryLocator), options);


                if (sendDodgyCredentials)
                {
                    var ex = Assert.Throws <Exception>(() => runner.Run(RepositoryLocator, new ThrowImmediatelyDataLoadEventListener(), new AcceptAllCheckNotifier(), new GracefulCancellationToken()));
                    Assert.IsTrue(ex.InnerException.Message.Contains("Login failed for user 'IveGotaLovely'"), "Error message did not contain expected text");
                    return;
                }
                else
                {
                    runner.Run(RepositoryLocator, new ThrowImmediatelyDataLoadEventListener(), new AcceptAllCheckNotifier(), new GracefulCancellationToken());
                }


                var archiveFile = loadDirectory.ForArchiving.EnumerateFiles("*.zip").OrderByDescending(f => f.FullName).FirstOrDefault();
                Assert.NotNull(archiveFile, "Archive file has not been created by the load.");
                Assert.IsFalse(loadDirectory.ForLoading.EnumerateFileSystemInfos().Any());
            }
            finally
            {
                //reset the original RAW server
                defaults.SetDefault(PermissableDefaults.RAWDataLoadServer, oldDefault);

                if (external != null)
                {
                    external.DeleteInDatabase();
                }

                testDir.Delete(true);

                databaseHelper.Dispose();
                catalogueEntities.Dispose();
            }
        }
Exemple #24
0
        private void CreateCohortDatabase()
        {
            //Where {0} is the name of the Cohort database
            //Where {1} is either CHI or ANOCHI depending on whether anonymisation is enabled on the target Catalogue
            //Where {2} is either 10 or 13 -- the column length of either CHI or ANOCHI

            var database = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(CohortDatabaseName);

            if (database.Exists())
            {
                database.DiscoverTables(false).ToList().ForEach(t => t.Drop());
                database.Drop();
            }

            string sql = string.Format(@"
CREATE DATABASE {0} 
GO

USE {0}

CREATE TABLE [dbo].[Cohort](
       [PrivateID] [varchar](10) NOT NULL,
       [ReleaseID] [varchar](10) NULL,
       [cohortDefinition_id] [int] NOT NULL,
CONSTRAINT [PK_Cohort] PRIMARY KEY CLUSTERED 
(
       [PrivateID] ASC,
       [cohortDefinition_id] ASC
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON) ON [PRIMARY]
) ON [PRIMARY]
GO

CREATE TABLE [dbo].[CohortDefinition](
       [id] [int] IDENTITY(1,1) NOT NULL,
       [projectNumber] [int] NOT NULL,
       [version] [int] NOT NULL,
       [description] [varchar](4000) NOT NULL,
       [dtCreated] [date] NOT NULL,
CONSTRAINT [PK_CohortDefinition] PRIMARY KEY NONCLUSTERED 
(
       [id] ASC
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON) ON [PRIMARY]
) ON [PRIMARY]

GO
ALTER TABLE [dbo].[CohortDefinition] ADD  CONSTRAINT [DF_CohortDefinition_dtCreated]  DEFAULT (getdate()) FOR [dtCreated]
GO
ALTER TABLE [dbo].[Cohort]  WITH CHECK ADD  CONSTRAINT [FK_Cohort_CohortDefinition] FOREIGN KEY([cohortDefinition_id])
REFERENCES [dbo].[CohortDefinition] ([id])
GO
ALTER TABLE [dbo].[Cohort] CHECK CONSTRAINT [FK_Cohort_CohortDefinition]
GO
",
                                       //{0}
                                       CohortDatabaseName);

            using (var con = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.GetConnection())
            {
                con.Open();
                UsefulStuff.ExecuteBatchNonQuery(sql, con, timeout: 15);
                con.Close();
            }
        }