Exemple #1
0
        /// <summary>
        /// Gets an empty database on the test server of the appropriate DBMS
        /// </summary>
        /// <param name="type">The DBMS you want a server of (a valid connection string must exist in TestDatabases.txt)</param>
        /// <param name="dbnName">null for default test database name (recommended unless you are testing moving data from one database to another on the same test server)</param>
        /// <param name="server"></param>
        /// <param name="database"></param>
        /// <param name="justDropTablesIfPossible">Determines behaviour when the test database already exists.  False to drop and recreate it. True to just drop tables (faster)</param>
        /// <returns></returns>
        protected DiscoveredDatabase GetCleanedServer(DatabaseType type, string dbnName, out DiscoveredServer server, out DiscoveredDatabase database, bool justDropTablesIfPossible = false)
        {
            switch (type)
            {
            case DatabaseType.MicrosoftSQLServer:
                server = new DiscoveredServer(DiscoveredServerICanCreateRandomDatabasesAndTablesOn.Builder);
                break;

            case DatabaseType.MySql:
                server = _discoveredMySqlServer == null ? null : new DiscoveredServer(_discoveredMySqlServer.Builder);
                break;

            case DatabaseType.Oracle:
                server = _discoveredOracleServer == null ? null : new DiscoveredServer(_discoveredOracleServer.Builder);
                break;

            default:
                throw new ArgumentOutOfRangeException("type");
            }

            if (server == null)
            {
                Assert.Inconclusive();
            }

            //the microsoft one should exist! others are optional
            if (!server.Exists() && type != DatabaseType.MicrosoftSQLServer)
            {
                Assert.Inconclusive();
            }

            server.TestConnection();

            database = server.ExpectDatabase(dbnName);

            if (justDropTablesIfPossible && database.Exists())
            {
                foreach (var t in database.DiscoverTables(true))
                {
                    t.Drop();
                }
                foreach (var t in database.DiscoverTableValuedFunctions())
                {
                    t.Drop();
                }
            }
            else
            {
                database.Create(true);
            }

            server.ChangeDatabase(dbnName);

            Assert.IsTrue(database.Exists());

            return(database);
        }
Exemple #2
0
 public void DestroyStagingIfExists()
 {
     if (_stagingDatabase != null && _stagingDatabase.Exists())
     {
         _stagingDatabase.Drop();
     }
 }
Exemple #3
0
 public void DropDatabases()
 {
     if (_destinationDatabase.Exists())
     {
         _destinationDatabase.Drop();
     }
 }
Exemple #4
0
        protected void CheckTablesExist(ICheckNotifier notifier)
        {
            try
            {
                if (!_remoteDatabase.Exists())
                    throw new Exception("Database " + _remoteDatabase + " did not exist on the remote server");

                //still worthwhile doing this incase we cannot connect to the server
                var tables = _remoteDatabase.DiscoverTables(true).Select(t => t.GetRuntimeName()).ToArray();
                
                //overrides table level checks
                if (!string.IsNullOrWhiteSpace(RemoteSelectSQL))
                    return;

                //user has just picked a table to copy exactly so we can precheck for it
                if (tables.Contains(RemoteTableName))
                    notifier.OnCheckPerformed(new CheckEventArgs(
                        "successfully found table " + RemoteTableName + " on server " + _remoteDatabase.Server + " on database " +
                        _remoteDatabase,
                        CheckResult.Success, null));
                else
                    notifier.OnCheckPerformed(new CheckEventArgs(
                        "Could not find table called '" + RemoteTableName + "' on server " + _remoteDatabase.Server + " on database " +
                        _remoteDatabase +Environment.NewLine+"(The following tables were found:"+string.Join(",",tables)+")",
                        CheckResult.Fail, null));
            }
            catch (Exception e)
            {
                notifier.OnCheckPerformed(new CheckEventArgs("Problem occurred when trying to enumerate tables on server " + _remoteDatabase.Server + " on database " +_remoteDatabase, CheckResult.Fail, e));
            }
        }
Exemple #5
0
        private void CreateCohortDatabase()
        {
            _cohortDatabase = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(CohortDatabaseName);

            if (_cohortDatabase.Exists())
            {
                DeleteTables(_cohortDatabase);
            }
            else
            {
                _cohortDatabase.Create();
            }

            string sql = string.Format(@"

CREATE TABLE [dbo].[Cohort](
       [PrivateID] [varchar](10) NOT NULL,
       [ReleaseID] [varchar](10) NULL,
       [cohortDefinition_id] [int] NOT NULL,
CONSTRAINT [PK_Cohort] PRIMARY KEY CLUSTERED 
(
       [PrivateID] ASC,
       [cohortDefinition_id] ASC
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON) ON [PRIMARY]
) ON [PRIMARY]
GO

CREATE TABLE [dbo].[CohortDefinition](
       [id] [int] IDENTITY(1,1) NOT NULL,
       [projectNumber] [int] NOT NULL,
       [version] [int] NOT NULL,
       [description] [varchar](4000) NOT NULL,
       [dtCreated] [date] NOT NULL,
CONSTRAINT [PK_CohortDefinition] PRIMARY KEY NONCLUSTERED 
(
       [id] ASC
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON) ON [PRIMARY]
) ON [PRIMARY]

GO
ALTER TABLE [dbo].[CohortDefinition] ADD  CONSTRAINT [DF_CohortDefinition_dtCreated]  DEFAULT (getdate()) FOR [dtCreated]
GO
ALTER TABLE [dbo].[Cohort]  WITH CHECK ADD  CONSTRAINT [FK_Cohort_CohortDefinition] FOREIGN KEY([cohortDefinition_id])
REFERENCES [dbo].[CohortDefinition] ([id])
GO
ALTER TABLE [dbo].[Cohort] CHECK CONSTRAINT [FK_Cohort_CohortDefinition]
GO
");

            using (var con = _cohortDatabase.Server.GetConnection())
            {
                con.Open();
                UsefulStuff.ExecuteBatchNonQuery(sql, con, timeout: 15);
                con.Close();
            }
        }
        private DataTableUploadDestination PrepareDestination(IDataLoadEventListener listener, DataTable toProcess)
        {
            //see if the user has entered an extraction server/database
            if (TargetDatabaseServer == null)
            {
                throw new Exception("TargetDatabaseServer (the place you want to extract the project data to) property has not been set!");
            }

            try
            {
                if (!_destinationDatabase.Exists())
                {
                    _destinationDatabase.Create();
                }

                if (_request is ExtractGlobalsCommand)
                {
                    return(null);
                }

                var tblName = _toProcess.TableName;

                //See if table already exists on the server (likely to cause problems including duplication, schema changes in configuration etc)
                if (_destinationDatabase.ExpectTable(tblName).Exists())
                {
                    listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning,
                                                                "A table called " + tblName + " already exists on server " + TargetDatabaseServer +
                                                                ", data load might crash if it is populated and/or has an incompatible schema"));
                }
                else
                {
                    _tableDidNotExistAtStartOfLoad = true;
                }
            }
            catch (Exception e)
            {
                //Probably the database didn't exist or the credentials were wrong or something
                listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Error, "Failed to inspect destination for already existing datatables", e));
            }

            _destination = new DataTableUploadDestination();

            PrimeDestinationTypesBasedOnCatalogueTypes(toProcess);

            _destination.AllowResizingColumnsAtUploadTime = true;
            _destination.AlterTimeout = AlterTimeout;

            _destination.PreInitialize(_destinationDatabase, listener);

            return(_destination);
        }
        /// <summary>
        /// Sets up the databases <see cref="From"/> and <see cref="To"/> on the test database server of the given
        /// <paramref name="dbType"/>.  This method is automatically called with <see cref="DatabaseType.MicrosoftSQLServer"/>
        /// in <see cref="OneTimeSetUp()"/> (nunit automatically fires it).
        /// </summary>
        /// <param name="dbType"></param>
        protected void SetupFromTo(DatabaseType dbType)
        {
            To   = GetCleanedServer(dbType);
            From = To.Server.ExpectDatabase(To.GetRuntimeName() + Suffix);

            // ensure the test staging and live databases are empty
            if (!From.Exists())
            {
                From.Create();
            }
            else
            {
                DeleteTables(From);
            }
        }
Exemple #8
0
        public void CreateStaging(DiscoveredServer liveServer)
        {
            _stagingDatabase = liveServer.ExpectDatabase(GetDatabaseName(null, LoadBubble.Staging));

            if (!_stagingDatabase.Exists())
            {
                _stagingDatabase.Create();
            }

            //get rid of any old data from previous load
            foreach (var t in _stagingDatabase.DiscoverTables(false))
            {
                t.Truncate();
            }
        }
Exemple #9
0
        private void CreateScratchArea()
        {
            var scratchDatabaseName = TestDatabaseNames.GetConsistentName("ScratchArea");

            DiscoveredDatabaseICanCreateRandomTablesIn = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(scratchDatabaseName);

            //if it already exists drop it
            if (DiscoveredDatabaseICanCreateRandomTablesIn.Exists())
            {
                DiscoveredDatabaseICanCreateRandomTablesIn.Drop();
            }

            //create it
            DiscoveredServerICanCreateRandomDatabasesAndTablesOn.CreateDatabase(scratchDatabaseName);
        }
        public void Setup()
        {
            DiscoveredQueryCachingDatabase = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(QueryCachingDatabaseName);

            if (DiscoveredQueryCachingDatabase.Exists())
            {
                DiscoveredQueryCachingDatabase.Drop();
            }

            MasterDatabaseScriptExecutor scripter = new MasterDatabaseScriptExecutor(DiscoveredQueryCachingDatabase);
            var p = new QueryCachingPatcher();

            scripter.CreateAndPatchDatabase(p, new ThrowImmediatelyCheckNotifier());

            QueryCachingDatabaseServer = new ExternalDatabaseServer(CatalogueRepository, QueryCachingDatabaseName, p);
            QueryCachingDatabaseServer.SetProperties(DiscoveredQueryCachingDatabase);
        }
Exemple #11
0
        public void Setup_IdentifierDump()
        {
            IdentifierDump_Database = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(IdentifierDump_DatabaseName);

            if (IdentifierDump_Database.Exists())
            {
                IdentifierDump_Database.Drop();
            }

            var scriptCreate = new MasterDatabaseScriptExecutor(IdentifierDump_Database);
            var p            = new IdentifierDumpDatabasePatcher();

            scriptCreate.CreateAndPatchDatabase(p, new ThrowImmediatelyCheckNotifier());

            //now create a new reference!
            IdentifierDump_ExternalDatabaseServer = new ExternalDatabaseServer(CatalogueRepository, IdentifierDump_DatabaseName, p);
            IdentifierDump_ExternalDatabaseServer.SetProperties(IdentifierDump_Database);

            CatalogueRepository.GetServerDefaults().SetDefault(PermissableDefaults.IdentifierDumpServer_ID, IdentifierDump_ExternalDatabaseServer);
        }
Exemple #12
0
        private bool CreateDatabaseIfNotExists(DiscoveredDatabase db)
        {
            if (db == null)
            {
                MessageBox.Show("Choose a database");
                return(false);
            }

            if (db.Exists())
            {
                return(true);
            }
            if (MessageBox.Show("Create database '" + db + "'", "Create", MessageBoxButtons.YesNo) != DialogResult.Yes)
            {
                return(false);
            }
            db.Create();

            return(true);
        }
        protected override void OneTimeSetUp()
        {
            base.OneTimeSetUp();

            queryCacheDatabase = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(TestDatabaseNames.Prefix + "QueryCache");

            if (queryCacheDatabase.Exists())
            {
                base.DeleteTables(queryCacheDatabase);
            }

            MasterDatabaseScriptExecutor executor = new MasterDatabaseScriptExecutor(queryCacheDatabase);

            var p = new QueryCachingPatcher();

            executor.CreateAndPatchDatabase(p, new AcceptAllCheckNotifier());

            externalDatabaseServer = new ExternalDatabaseServer(CatalogueRepository, "QueryCacheForUnitTests", p);
            externalDatabaseServer.SetProperties(queryCacheDatabase);
        }
Exemple #14
0
        public void DiscoverState()
        {
            _unplannedChildren.Clear();

            if (!Database.Exists())
            {
                State = LoadDiagramState.NotFound;
                foreach (var plannedChild in _anticipatedChildren)
                {
                    plannedChild.SetStateNotFound();
                }

                return;
            }

            //database does exist
            State = LoadDiagramState.Found;

            //so check the children (tables) for state
            foreach (var plannedChild in _anticipatedChildren)
            {
                plannedChild.DiscoverState();
            }

            //also discover any unplanned tables if not live
            if (_bubble != LoadBubble.Live)
            {
                foreach (DiscoveredTable discoveredTable in Database.DiscoverTables(true))
                {
                    //it's an anticipated one
                    if (_anticipatedChildren.Any(c => c.TableName.Equals(discoveredTable.GetRuntimeName(), StringComparison.CurrentCultureIgnoreCase)))
                    {
                        continue;
                    }

                    //it's unplanned (maybe user created it as part of his load script or something)
                    _unplannedChildren.Add(new UnplannedTable(discoveredTable));
                }
            }
        }
Exemple #15
0
        /// <summary>
        /// Creates the <see cref="BulkDataTable"/> in the <see cref="BulkDataDatabase"/> and uploads test data.  Use <see cref="ImportAsCatalogue"/> to get
        /// rdmp metadata objects pointing at the table.
        /// </summary>
        public void SetupTestData()
        {
            //make sure database exists
            if (!BulkDataDatabase.Exists())
            {
                BulkDataDatabase.Create();
            }

            //generate some people
            var people = new PersonCollection();

            people.GeneratePeople(5000, r);

            //generate the test data
            var dt = _dataGenerator.GetDataTable(people, ExpectedNumberOfRowsInTestData);

            var tbl = BulkDataDatabase.ExpectTable(BulkDataTable);

            if (tbl.Exists())
            {
                tbl.Drop();
            }

            //create the table but make sure the chi is a primary key and the correct data type and that we have a sensible primary key
            Table = BulkDataDatabase.CreateTable(BulkDataTable, dt, new DatabaseColumnRequest[] {
                new DatabaseColumnRequest("chi", new DatabaseTypeRequest(typeof(string), 10))
                {
                    IsPrimaryKey = true
                },
                new DatabaseColumnRequest("dtCreated", new DatabaseTypeRequest(typeof(DateTime)))
                {
                    IsPrimaryKey = true
                },
                new DatabaseColumnRequest("hb_extract", new DatabaseTypeRequest(typeof(string), 1))
                {
                    IsPrimaryKey = true
                }
            });
        }
        public void SQLServerDestination()
        {
            DiscoveredDatabase dbToExtractTo = null;

            var ci = new CatalogueItem(CatalogueRepository, _catalogue, "YearOfBirth");
            var columnToTransform = _columnInfos.Single(c => c.GetRuntimeName().Equals("DateOfBirth", StringComparison.CurrentCultureIgnoreCase));

            string transform = "YEAR(" + columnToTransform.Name + ")";

            var ei = new ExtractionInformation(CatalogueRepository, ci, columnToTransform, transform);

            ei.Alias = "YearOfBirth";
            ei.ExtractionCategory = ExtractionCategory.Core;
            ei.SaveToDatabase();

            //make it part of the ExtractionConfiguration
            var newColumn = new ExtractableColumn(DataExportRepository, _selectedDataSet.ExtractableDataSet, (ExtractionConfiguration)_selectedDataSet.ExtractionConfiguration, ei, 0, ei.SelectSQL);

            newColumn.Alias = ei.Alias;
            newColumn.SaveToDatabase();

            _extractableColumns.Add(newColumn);

            //recreate the extraction command so it gets updated with the new column too.
            _request = new ExtractDatasetCommand(_configuration, _extractableCohort, new ExtractableDatasetBundle(_extractableDataSet),
                                                 _extractableColumns, new HICProjectSalt(_project),
                                                 new ExtractionDirectory(@"C:\temp\", _configuration));

            try
            {
                _configuration.Name = "ExecuteFullExtractionToDatabaseMSSqlDestinationTest";
                _configuration.SaveToDatabase();

                ExtractionPipelineUseCase            execute;
                IExecuteDatasetExtractionDestination result;

                var dbname = TestDatabaseNames.GetConsistentName(_project.Name + "_" + _project.ProjectNumber);
                dbToExtractTo = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(dbname);
                if (dbToExtractTo.Exists())
                {
                    dbToExtractTo.Drop();
                }

                base.Execute(out execute, out result);

                var destinationTable = dbToExtractTo.ExpectTable(_expectedTableName);
                Assert.IsTrue(destinationTable.Exists());

                var dt = destinationTable.GetDataTable();

                Assert.AreEqual(1, dt.Rows.Count);
                Assert.AreEqual(_cohortKeysGenerated[_cohortKeysGenerated.Keys.First()].Trim(), dt.Rows[0]["ReleaseID"]);
                Assert.AreEqual(new DateTime(2001, 1, 1), dt.Rows[0]["DateOfBirth"]);
                Assert.AreEqual(2001, dt.Rows[0]["YearOfBirth"]);

                Assert.AreEqual(columnToTransform.Data_type, destinationTable.DiscoverColumn("DateOfBirth").DataType.SQLType);
                Assert.AreEqual("int", destinationTable.DiscoverColumn("YearOfBirth").DataType.SQLType);
            }
            finally
            {
                if (_extractionServer != null)
                {
                    _extractionServer.DeleteInDatabase();
                }

                if (dbToExtractTo != null)
                {
                    dbToExtractTo.Drop();
                }
            }
        }
Exemple #17
0
        private void CopyCohortToDataServer(IDataLoadEventListener listener, GracefulCancellationToken cancellationToken)
        {
            DataTable cohortDataTable = null;

            SetServer();

            listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "About to wait for Semaphore OneCrossServerExtractionAtATime to become available"));
            OneCrossServerExtractionAtATime.WaitOne(-1);
            listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Captured Semaphore OneCrossServerExtractionAtATime"));

            try
            {
                IExtractableCohort cohort = Request.ExtractableCohort;
                cohortDataTable = cohort.FetchEntireCohort();
            }
            catch (Exception e)
            {
                throw new Exception("An error occurred while trying to download the cohort from the Cohort server (in preparation for transfering it to the data server for linkage and extraction)", e);
            }

            //make sure tempdb exists (this covers you for servers where it doesn't exist e.g. mysql or when user has specified a different database name)
            if (!_tempDb.Exists())
            {
                if (CreateAndDestroyTemporaryDatabaseIfNotExists)
                {
                    _tempDb.Create();
                    _hadToCreate = true;
                }
                else
                {
                    throw new Exception("Database '" + _tempDb + "' did not exist on server '" + _server + "' and CreateAndDestroyTemporaryDatabaseIfNotExists was false");
                }
            }
            else
            {
                _hadToCreate = false;
            }

            var tbl = _tempDb.ExpectTable(cohortDataTable.TableName);

            if (tbl.Exists())
            {
                listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "Found existing table called '" + tbl + "' in '" + _tempDb + "'"));

                if (DropExistingCohortTableIfExists)
                {
                    listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "About to drop existing table '" + tbl + "'"));

                    try
                    {
                        tbl.Drop();
                        listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "Dropped existing table '" + tbl + "'"));
                    }
                    catch (Exception ex)
                    {
                        listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "Warning dropping '" + tbl + "' failed", ex));
                    }
                }
                else
                {
                    listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "'" + _tempDb + "' contains a table called '" + tbl + "' and DropExistingCohortTableIfExists is false"));
                }
            }

            var destination = new DataTableUploadDestination();

            destination.PreInitialize(_tempDb, listener);
            destination.ProcessPipelineData(cohortDataTable, listener, cancellationToken);
            destination.Dispose(listener, null);



            if (!tbl.Exists())
            {
                throw new Exception("Table '" + tbl + "' did not exist despite DataTableUploadDestination completing Successfully!");
            }

            tablesToCleanup.Add(tbl);

            //table will now be in tempdb
            _haveCopiedCohortAndAdjustedSql = true;
        }
Exemple #18
0
        public void JoinablesWithCache()
        {
            const string queryCachingDatabaseName = "MyQueryCachingDatabase";
            var          builder = new CohortQueryBuilder(aggregate1, null);

            //make aggregate 2 a joinable
            var joinable2 = new JoinableCohortAggregateConfiguration(CatalogueRepository, cohortIdentificationConfiguration, aggregate2);

            joinable2.AddUser(aggregate1);

            //make aggregate 2 have an additional column (dtCreated)
            var anotherCol = aggregate2.Catalogue.GetAllExtractionInformation(ExtractionCategory.Any).Single(e => e.GetRuntimeName().Equals("dtCreated"));

            aggregate2.AddDimension(anotherCol);

            _queryCachingDatabase = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(queryCachingDatabaseName);

            if (_queryCachingDatabase.Exists())
            {
                _queryCachingDatabase.Drop(); //make sure it doesn't exist
            }
            MasterDatabaseScriptExecutor scripter = new MasterDatabaseScriptExecutor(_queryCachingDatabase);

            scripter.CreateAndPatchDatabase(new QueryCachingPatcher(), new AcceptAllCheckNotifier());

            var queryCachingDatabaseServer = new ExternalDatabaseServer(CatalogueRepository, queryCachingDatabaseName, null);

            queryCachingDatabaseServer.SetProperties(_queryCachingDatabase);

            //make the builder use the query cache we just set up
            builder.CacheServer = queryCachingDatabaseServer;
            try
            {
                var builderForCaching = new CohortQueryBuilder(aggregate2, null, true);

                var cacheDt = new DataTable();
                using (SqlConnection con = (SqlConnection)DiscoveredDatabaseICanCreateRandomTablesIn.Server.GetConnection())
                {
                    con.Open();
                    SqlDataAdapter da = new SqlDataAdapter(new SqlCommand(builderForCaching.SQL, con));
                    da.Fill(cacheDt);
                }

                var cacheManager = new CachedAggregateConfigurationResultsManager(queryCachingDatabaseServer);
                cacheManager.CommitResults(new CacheCommitJoinableInceptionQuery(aggregate2, builderForCaching.SQL, cacheDt, null, 30));

                try
                {
                    Console.WriteLine(builder.SQL);

                    using (var con = (SqlConnection)DiscoveredDatabaseICanCreateRandomTablesIn.Server.GetConnection())
                    {
                        con.Open();

                        var dbReader = new SqlCommand(builder.SQL, con).ExecuteReader();

                        //can read at least one row
                        Assert.IsTrue(dbReader.Read());
                    }

                    string expectedTableAlias = "ix" + joinable2.ID;

                    //after joinables
                    Assert.AreEqual(
                        CollapseWhitespace(
                            string.Format(
                                @"/*cic_{2}_UnitTestAggregate1*/
SELECT
distinct
[" + TestDatabaseNames.Prefix + @"ScratchArea]..[BulkData].[chi]
FROM 
[" + TestDatabaseNames.Prefix + @"ScratchArea]..[BulkData]
LEFT Join (
	/*Cached:cic_{2}_UnitTestAggregate2*/
	select * from [MyQueryCachingDatabase]..[JoinableInceptionQuery_AggregateConfiguration{1}]

){0}
on [" + TestDatabaseNames.Prefix + @"ScratchArea]..[BulkData].[chi] = {0}.chi", expectedTableAlias, aggregate2.ID, cohortIdentificationConfiguration.ID)),
                        CollapseWhitespace(builder.SQL));
                }
                finally
                {
                    joinable2.Users[0].DeleteInDatabase();
                    joinable2.DeleteInDatabase();
                }
            }
            finally
            {
                queryCachingDatabaseServer.DeleteInDatabase();
                DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(queryCachingDatabaseName).Drop();
            }
        }
Exemple #19
0
        protected override void RunSpecificChecks(ICheckNotifier notifier, bool isRunTime)
        {
            if (!_releaseData.ReleaseGlobals || _releaseData.ReleaseState == ReleaseState.DoingPatch)
            {
                notifier.OnCheckPerformed(new CheckEventArgs("You cannot untick globals or release a subset of datasets when releasing from a DB", CheckResult.Fail));
            }

            var foundConnection = String.Empty;
            var tables          = new List <string>();

            foreach (var cumulativeResult in _releaseData.ConfigurationsForRelease.SelectMany(x => x.Key.CumulativeExtractionResults))
            {
                if (cumulativeResult.DestinationDescription.Split('|').Length != 3)
                {
                    throw new Exception("The extraction did not generate a description that can be parsed. " +
                                        "Have you extracted to a mix of CSVs and DB tables?");
                }

                string candidate = cumulativeResult.DestinationDescription.Split('|')[0] + "|" +
                                   cumulativeResult.DestinationDescription.Split('|')[1];

                tables.Add(cumulativeResult.DestinationDescription.Split('|')[2]);

                if (String.IsNullOrEmpty(foundConnection)) // the first time we use the candidate as our connection...
                {
                    foundConnection = candidate;
                }

                if (foundConnection != candidate) // ...then we check that all other candidates point to the same DB
                {
                    throw new Exception("You are trying to extract from multiple servers or databases. This is not allowed! " +
                                        "Please re-run the extracts against the same database.");
                }

                foreach (var supplementalResult in cumulativeResult.SupplementalExtractionResults
                         .Where(x => x.IsReferenceTo(typeof(SupportingSQLTable)) ||
                                x.IsReferenceTo(typeof(TableInfo))))
                {
                    if (supplementalResult.DestinationDescription.Split('|').Length != 3)
                    {
                        throw new Exception("The extraction did not generate a description that can be parsed. " +
                                            "Have you extracted to a mix of CSVs and DB tables?");
                    }

                    candidate = supplementalResult.DestinationDescription.Split('|')[0] + "|" +
                                supplementalResult.DestinationDescription.Split('|')[1];

                    tables.Add(supplementalResult.DestinationDescription.Split('|')[2]);

                    if (foundConnection != candidate) // ...then we check that all other candidates point to the same DB
                    {
                        throw new Exception("You are trying to extract from multiple servers or databases. This is not allowed! " +
                                            "Please re-run the extracts against the same database.");
                    }
                }
            }

            foreach (var globalResult in _releaseData.ConfigurationsForRelease.SelectMany(x => x.Key.SupplementalExtractionResults)
                     .Where(x => x.IsReferenceTo(typeof(SupportingSQLTable)) ||
                            x.IsReferenceTo(typeof(TableInfo))))
            {
                if (globalResult.DestinationDescription.Split('|').Length != 3)
                {
                    throw new Exception("The extraction did not generate a description that can be parsed. " +
                                        "Have you extracted the Globals to CSVs rather than DB tables?");
                }

                string candidate = globalResult.DestinationDescription.Split('|')[0] + "|" +
                                   globalResult.DestinationDescription.Split('|')[1];

                tables.Add(globalResult.DestinationDescription.Split('|')[2]);

                if (String.IsNullOrEmpty(foundConnection)) // the first time we use the candidate as our connection...
                {
                    foundConnection = candidate;
                }

                if (foundConnection != candidate) // ...then we check that all other candidates point to the same DB
                {
                    throw new Exception("You are trying to extract from multiple servers or databases. This is not allowed! " +
                                        "Please re-run the extracts against the same database.");
                }
            }

            var externalServerId = int.Parse(foundConnection.Split('|')[0]);
            var dbName           = foundConnection.Split('|')[1];

            var externalServer = _catalogueRepository.GetObjectByID <ExternalDatabaseServer>(externalServerId);

            if (!String.IsNullOrWhiteSpace(externalServer.MappedDataPath))
            {
                _dataPathMap = new DirectoryInfo(externalServer.MappedDataPath);
            }
            else
            {
                throw new Exception("The selected Server (" + externalServer.Name + ") must have a Data Path in order to be used as an extraction destination.");
            }

            var server = DataAccessPortal.GetInstance().ExpectServer(externalServer, DataAccessContext.DataExport, setInitialDatabase: false);

            _database = server.ExpectDatabase(dbName);

            if (!_database.Exists())
            {
                throw new Exception("Database " + _database + " does not exist!");
            }

            foreach (var table in tables)
            {
                var foundTable = _database.ExpectTable(table);
                if (!foundTable.Exists())
                {
                    throw new Exception("Table " + table + " does not exist!");
                }
            }

            var spuriousTables = _database.DiscoverTables(false).Where(t => !tables.Contains(t.GetRuntimeName())).ToList();

            if (spuriousTables.Any() && !notifier.OnCheckPerformed(new CheckEventArgs("Spurious table(s): " + String.Join(",", spuriousTables) + " found in the DB." +
                                                                                      "These WILL BE released, you may want to check them before proceeding.",
                                                                                      CheckResult.Warning,
                                                                                      null,
                                                                                      "Are you sure you want to continue the release process?")))
            {
                if (!isRunTime)
                {
                    throw new Exception("Release aborted by user.");
                }
            }

            DirectoryInfo sourceFolder   = GetSourceFolder();
            var           dbOutputFolder = sourceFolder.CreateSubdirectory(ExtractionDirectory.MASTER_DATA_FOLDER_NAME);

            var databaseName = _database.GetRuntimeName();

            if (File.Exists(Path.Combine(dbOutputFolder.FullName, databaseName + ".mdf")) ||
                File.Exists(Path.Combine(dbOutputFolder.FullName, databaseName + "_log.ldf")))
            {
                if (notifier.OnCheckPerformed(new CheckEventArgs(String.Format("It seems that database {0} was already detached previously into {1} " +
                                                                               "but not released or cleaned from the extraction folder", databaseName, dbOutputFolder.FullName),
                                                                 CheckResult.Warning,
                                                                 null,
                                                                 "Do you want to delete it? You should check the contents first. Clicking 'No' will abort the Release.")))
                {
                    File.Delete(Path.Combine(dbOutputFolder.FullName, databaseName + ".mdf"));
                    File.Delete(Path.Combine(dbOutputFolder.FullName, databaseName + "_log.ldf"));
                    notifier.OnCheckPerformed(new CheckEventArgs("Cleaned non-empty existing db output folder folder: " + dbOutputFolder.FullName,
                                                                 CheckResult.Success));
                }
                else
                {
                    if (!isRunTime)
                    {
                        throw new Exception("Release aborted by user.");
                    }
                }
            }
        }
Exemple #20
0
        internal void Create(DiscoveredDatabase db, ICheckNotifier notifier, PlatformDatabaseCreationOptions options)
        {
            if (db.Exists())
            {
                if (options.DropDatabases)
                {
                    db.Drop();
                }
                else
                {
                    throw new Exception("Database " + db.GetRuntimeName() + " already exists and allowDrop option was not specified");
                }
            }

            notifier.OnCheckPerformed(new CheckEventArgs("About to create " + db.GetRuntimeName(), CheckResult.Success));
            //create a new database for the datasets
            db.Create();

            notifier.OnCheckPerformed(new CheckEventArgs("Succesfully created " + db.GetRuntimeName(), CheckResult.Success));

            //fixed seed so everyone gets the same datasets
            var r = new Random(options.Seed);

            notifier.OnCheckPerformed(new CheckEventArgs("Generating people", CheckResult.Success));
            //people
            var people = new PersonCollection();

            people.GeneratePeople(options.NumberOfPeople, r);

            //datasets
            var biochem     = ImportCatalogue(Create <Biochemistry>(db, people, r, notifier, options.NumberOfRowsPerDataset, "chi", "Healthboard", "SampleDate", "TestCode"));
            var demography  = ImportCatalogue(Create <Demography>(db, people, r, notifier, options.NumberOfRowsPerDataset, "chi", "dtCreated", "hb_extract"));
            var prescribing = ImportCatalogue(Create <Prescribing>(db, people, r, notifier, options.NumberOfRowsPerDataset, "chi", "PrescribedDate", "Name")); //<- this is slooo!
            var admissions  = ImportCatalogue(Create <HospitalAdmissions>(db, people, r, notifier, options.NumberOfRowsPerDataset, "chi", "AdmissionDate"));
            var carotid     = Create <CarotidArteryScan>(db, people, r, notifier, options.NumberOfRowsPerDataset, "RECORD_NUMBER");

            //the following should not be extractable
            ForExtractionInformations(demography,
                                      e => e.DeleteInDatabase(),
                                      "chi_num_of_curr_record",
                                      "surname",
                                      "forename",
                                      "current_address_L1",
                                      "current_address_L2",
                                      "current_address_L3",
                                      "current_address_L4",
                                      "birth_surname",
                                      "previous_surname",
                                      "midname",
                                      "alt_forename",
                                      "other_initials",
                                      "previous_address_L1",
                                      "previous_address_L2",
                                      "previous_address_L3",
                                      "previous_address_L4",
                                      "previous_postcode",
                                      "date_address_changed",
                                      "adr",
                                      "previous_gp_accept_date",
                                      "hic_dataLoadRunID");

            //the following should be special approval only
            ForExtractionInformations(demography,
                                      e => {
                e.ExtractionCategory = ExtractionCategory.SpecialApprovalRequired;
                e.SaveToDatabase();
            },
                                      "current_postcode",
                                      "current_gp",
                                      "previous_gp",
                                      "date_of_birth");


            CreateAdmissionsViews(db);
            var vConditions = ImportCatalogue(db.ExpectTable("vConditions"));
            var vOperations = ImportCatalogue(db.ExpectTable("vOperations"));

            CreateGraph(biochem, "Test Codes", "TestCode", false, null);
            CreateGraph(biochem, "Test Codes By Date", "SampleDate", true, "TestCode");

            CreateFilter(biochem, "Creatinine", "TestCode", "TestCode like '%CRE%'", @"Serum creatinine is a blood measurement.  It is an indicator of renal health.");
            CreateFilter(biochem, "Test Code", "TestCode", "TestCode like @code", "Filters any test code set");

            CreateExtractionInformation(demography, "Age", "date_of_birth", "FLOOR(DATEDIFF(DAY, date_of_birth, GETDATE()) / 365.25) As Age");
            var fAge = CreateFilter(demography, "Older at least x years", "Age", "FLOOR(DATEDIFF(DAY, date_of_birth, GETDATE()) / 365.25) >= @age", "Patients age is greater than or equal to the provided @age");

            SetParameter(fAge, "@age", "int", "16");

            CreateGraph(demography, "Patient Ages", "Age", false, null);

            CreateGraph(prescribing, "Approved Name", "ApprovedName", false, null);
            CreateGraph(prescribing, "Approved Name Over Time", "PrescribedDate", true, "ApprovedName");

            CreateGraph(prescribing, "Bnf", "FormattedBnfCode", false, null);
            CreateGraph(prescribing, "Bnf Over Time", "PrescribedDate", true, "FormattedBnfCode");

            CreateFilter(
                CreateGraph(vConditions, "Conditions frequency", "Field", false, "Condition"),
                "Common Conditions Only",
                @"(Condition in 
(select top 40 Condition from vConditions c
 WHERE Condition <> 'NULL' AND Condition <> 'Nul' 
 group by Condition order by count(*) desc))");

            CreateFilter(
                CreateGraph(vOperations, "Operation frequency", "Field", false, "Operation"),
                "Common Operation Only",
                @"(Operation in 
(select top 40 Operation from vOperations c
 WHERE Operation <> 'NULL' AND Operation <> 'Nul' 
 group by Operation order by count(*) desc))");

            //group these all into the same folder
            admissions.Folder = new CatalogueFolder(admissions, @"\admissions");
            admissions.SaveToDatabase();
            vConditions.Folder = new CatalogueFolder(vConditions, @"\admissions");
            vConditions.SaveToDatabase();
            vOperations.Folder = new CatalogueFolder(vOperations, @"\admissions");
            vOperations.SaveToDatabase();


            //Create cohort store database
            var wizard = new CreateNewCohortDatabaseWizard(db, _repos.CatalogueRepository, _repos.DataExportRepository, false);
            var externalCohortTable = wizard.CreateDatabase(new PrivateIdentifierPrototype("chi", "varchar(10)"), new ThrowImmediatelyCheckNotifier());

            //Find the pipeline for committing cohorts
            var cohortCreationPipeline = _repos.CatalogueRepository.GetAllObjects <Pipeline>().FirstOrDefault(p => p?.Source?.Class == typeof(CohortIdentificationConfigurationSource).FullName);

            if (cohortCreationPipeline == null)
            {
                throw new Exception("Could not find a cohort committing pipeline");
            }

            //A cohort creation query
            var f = CreateFilter(vConditions, "Lung Cancer Condition", "Condition", "Condition like 'C349'", "ICD-10-CM Diagnosis Code C34.9 Malignant neoplasm of unspecified part of bronchus or lung");

            var cic = CreateCohortIdentificationConfiguration((ExtractionFilter)f);

            var cohort = CommitCohortToNewProject(cic, externalCohortTable, cohortCreationPipeline, "Lung Cancer Project", "P1 Lung Cancer Patients", 123, out Project project);

            var cohortTable = cohort.ExternalCohortTable.DiscoverCohortTable();

            using (var con = cohortTable.Database.Server.GetConnection())
            {
                con.Open();
                //delete half the records (so we can simulate cohort refresh)
                var cmd = cohortTable.Database.Server.GetCommand(string.Format("DELETE TOP (10) PERCENT from {0}", cohortTable.GetFullyQualifiedName()), con);
                cmd.ExecuteNonQuery();
            }

            var ec1 = CreateExtractionConfiguration(project, cohort, "First Extraction (2016 - project 123)", true, notifier, biochem, prescribing, demography);
            var ec2 = CreateExtractionConfiguration(project, cohort, "Project 123 - 2017 Refresh", true, notifier, biochem, prescribing, demography, admissions);
            var ec3 = CreateExtractionConfiguration(project, cohort, "Project 123 - 2018 Refresh", true, notifier, biochem, prescribing, demography, admissions);

            ReleaseAllConfigurations(notifier, ec1, ec2, ec3);
        }
        public ExternalCohortTable CreateDatabase(PrivateIdentifierPrototype privateIdentifierPrototype, ICheckNotifier notifier)
        {
            if (!_targetDatabase.Exists())
            {
                notifier.OnCheckPerformed(new CheckEventArgs("Did not find database " + _targetDatabase + " on server so creating it", CheckResult.Success));
                _targetDatabase.Create();
            }

            try
            {
                var definitionTable = _targetDatabase.CreateTable("CohortDefinition", new[]
                {
                    new DatabaseColumnRequest("id", new DatabaseTypeRequest(typeof(int)))
                    {
                        AllowNulls = false, IsAutoIncrement = true, IsPrimaryKey = true
                    },
                    new DatabaseColumnRequest("projectNumber", new DatabaseTypeRequest(typeof(int)))
                    {
                        AllowNulls = false
                    },
                    new DatabaseColumnRequest("version", new DatabaseTypeRequest(typeof(int)))
                    {
                        AllowNulls = false
                    },
                    new DatabaseColumnRequest("description", new DatabaseTypeRequest(typeof(string), 3000))
                    {
                        AllowNulls = false
                    },
                    new DatabaseColumnRequest("dtCreated", new DatabaseTypeRequest(typeof(DateTime)))
                    {
                        AllowNulls = false, Default = MandatoryScalarFunctions.GetTodaysDate
                    }
                });


                var idColumn   = definitionTable.DiscoverColumn("id");
                var foreignKey = new DatabaseColumnRequest(_definitionTableForeignKeyField, new DatabaseTypeRequest(typeof(int)), false)
                {
                    IsPrimaryKey = true
                };


                var cohortTable = _targetDatabase.CreateTable("Cohort", new []
                {
                    new DatabaseColumnRequest(privateIdentifierPrototype.RuntimeName, privateIdentifierPrototype.DataType, false)
                    {
                        IsPrimaryKey = true
                    },
                    new DatabaseColumnRequest(_releaseIdentifierFieldName, new DatabaseTypeRequest(typeof(string), 300))
                    {
                        AllowNulls = AllowNullReleaseIdentifiers
                    },
                    foreignKey
                }
                                                              ,
                                                              //foreign key between id and cohortDefinition_id
                                                              new Dictionary <DatabaseColumnRequest, DiscoveredColumn>()
                {
                    { foreignKey, idColumn }
                }, true);


                notifier.OnCheckPerformed(new CheckEventArgs("About to create pointer to the source", CheckResult.Success));
                var pointer = new ExternalCohortTable(_dataExportRepository, "TestExternalCohort", _targetDatabase.Server.DatabaseType)
                {
                    DatabaseType                   = _targetDatabase.Server.DatabaseType,
                    Server                         = _targetDatabase.Server.Name,
                    Database                       = _targetDatabase.GetRuntimeName(),
                    Username                       = _targetDatabase.Server.ExplicitUsernameIfAny,
                    Password                       = _targetDatabase.Server.ExplicitPasswordIfAny,
                    Name                           = _targetDatabase.GetRuntimeName(),
                    TableName                      = cohortTable.GetRuntimeName(),
                    PrivateIdentifierField         = privateIdentifierPrototype.RuntimeName,
                    ReleaseIdentifierField         = _releaseIdentifierFieldName,
                    DefinitionTableForeignKeyField = _definitionTableForeignKeyField,
                    DefinitionTableName            = definitionTable.GetRuntimeName()
                };

                pointer.SaveToDatabase();

                notifier.OnCheckPerformed(new CheckEventArgs("successfully created reference to cohort source in data export manager", CheckResult.Success));

                notifier.OnCheckPerformed(new CheckEventArgs("About to run post creation checks", CheckResult.Success));
                pointer.Check(notifier);

                notifier.OnCheckPerformed(new CheckEventArgs("Finished", CheckResult.Success));

                return(pointer);
            }
            catch (Exception e)
            {
                notifier.OnCheckPerformed(
                    new CheckEventArgs("Entire setup failed with exception (double click to find out why)",
                                       CheckResult.Fail, e));
                return(null);
            }
        }
Exemple #22
0
        public void SQLServerDestination(bool lookupsEtc)
        {
            DiscoveredDatabase dbToExtractTo = null;

            var ci = new CatalogueItem(CatalogueRepository, _catalogue, "YearOfBirth");

            _columnToTransform = _columnInfos.Single(c => c.GetRuntimeName().Equals("DateOfBirth", StringComparison.CurrentCultureIgnoreCase));

            string transform = "YEAR(" + _columnToTransform.Name + ")";


            if (_catalogue.GetAllExtractionInformation(ExtractionCategory.Any).All(ei => ei.GetRuntimeName() != "YearOfBirth"))
            {
                var ei = new ExtractionInformation(CatalogueRepository, ci, _columnToTransform, transform);
                ei.Alias = "YearOfBirth";
                ei.ExtractionCategory = ExtractionCategory.Core;
                ei.SaveToDatabase();

                //make it part of the ExtractionConfiguration
                var newColumn = new ExtractableColumn(DataExportRepository, _selectedDataSet.ExtractableDataSet, (ExtractionConfiguration)_selectedDataSet.ExtractionConfiguration, ei, 0, ei.SelectSQL);
                newColumn.Alias = ei.Alias;
                newColumn.SaveToDatabase();

                _extractableColumns.Add(newColumn);
            }

            if (lookupsEtc)
            {
                CreateLookupsEtc();
            }

            try
            {
                _configuration.Name = "ExecuteFullExtractionToDatabaseMSSqlDestinationTest";
                _configuration.SaveToDatabase();

                var dbname = TestDatabaseNames.GetConsistentName(_project.Name + "_" + _project.ProjectNumber);
                dbToExtractTo = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(dbname);
                if (dbToExtractTo.Exists())
                {
                    dbToExtractTo.Drop();
                }

                base.ExecuteRunner();

                var destinationTable = dbToExtractTo.ExpectTable(_expectedTableName);
                Assert.IsTrue(destinationTable.Exists());

                var dt = destinationTable.GetDataTable();

                Assert.AreEqual(1, dt.Rows.Count);
                Assert.AreEqual(_cohortKeysGenerated[_cohortKeysGenerated.Keys.First()].Trim(), dt.Rows[0]["ReleaseID"]);
                Assert.AreEqual(new DateTime(2001, 1, 1), dt.Rows[0]["DateOfBirth"]);
                Assert.AreEqual(2001, dt.Rows[0]["YearOfBirth"]);

                Assert.AreEqual(_columnToTransform.Data_type, destinationTable.DiscoverColumn("DateOfBirth").DataType.SQLType);
                Assert.AreEqual("int", destinationTable.DiscoverColumn("YearOfBirth").DataType.SQLType);

                if (lookupsEtc)
                {
                    AssertLookupsEtcExist(dbToExtractTo);
                }
            }
            finally
            {
                if (dbToExtractTo != null && dbToExtractTo.Exists())
                {
                    dbToExtractTo.Drop();
                }

                _pipeline?.DeleteInDatabase();
            }
        }
Exemple #23
0
        public void RefreshCohort_WithCaching()
        {
            ExtractionPipelineUseCase            useCase;
            IExecuteDatasetExtractionDestination results;

            var pipe = new Pipeline(CatalogueRepository, "RefreshPipeWithCaching");

            var source    = new PipelineComponent(CatalogueRepository, pipe, typeof(CohortIdentificationConfigurationSource), 0);
            var args      = source.CreateArgumentsForClassIfNotExists <CohortIdentificationConfigurationSource>();
            var freezeArg = args.Single(a => a.Name.Equals("FreezeAfterSuccessfulImport"));

            freezeArg.SetValue(false);
            freezeArg.SaveToDatabase();

            var dest         = new PipelineComponent(CatalogueRepository, pipe, typeof(BasicCohortDestination), 0);
            var argsDest     = dest.CreateArgumentsForClassIfNotExists <BasicCohortDestination>();
            var allocatorArg = argsDest.Single(a => a.Name.Equals("ReleaseIdentifierAllocator"));

            allocatorArg.SetValue(null);
            allocatorArg.SaveToDatabase();

            pipe.SourcePipelineComponent_ID      = source.ID;
            pipe.DestinationPipelineComponent_ID = dest.ID;
            pipe.SaveToDatabase();

            Execute(out useCase, out results);

            var oldcohort = _configuration.Cohort;

            //Create a query cache
            var p = new QueryCachingPatcher();
            ExternalDatabaseServer queryCacheServer = new ExternalDatabaseServer(CatalogueRepository, "TestCohortRefreshing_CacheTest", p);

            DiscoveredDatabase cachedb = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase("TestCohortRefreshing_CacheTest");

            if (cachedb.Exists())
            {
                cachedb.Drop();
            }

            new MasterDatabaseScriptExecutor(cachedb).CreateAndPatchDatabase(p, new ThrowImmediatelyCheckNotifier());
            queryCacheServer.SetProperties(cachedb);

            //Create a Cohort Identification configuration (query) that will identify the cohort
            CohortIdentificationConfiguration cic = new CohortIdentificationConfiguration(RepositoryLocator.CatalogueRepository, "RefreshCohort.cs");;

            try
            {
                //make it use the cache
                cic.QueryCachingServer_ID = queryCacheServer.ID;
                cic.SaveToDatabase();

                //give it a single table query to fetch distinct chi from test data
                var agg = cic.CreateNewEmptyConfigurationForCatalogue(_catalogue, null);

                //add the sub query as the only entry in the cic (in the root container)
                cic.CreateRootContainerIfNotExists();
                cic.RootCohortAggregateContainer.AddChild(agg, 1);

                //make the ExtractionConfiguration refresh cohort query be the cic
                _configuration.CohortIdentificationConfiguration_ID = cic.ID;
                _configuration.CohortRefreshPipeline_ID             = pipe.ID;
                _configuration.SaveToDatabase();

                //get a refreshing engine
                var engine = new CohortRefreshEngine(new ThrowImmediatelyDataLoadEventListener(), _configuration);
                engine.Execute();

                Assert.NotNull(engine.Request.NewCohortDefinition);

                var oldData = oldcohort.GetExternalData();

                Assert.AreEqual(oldData.ExternalDescription, engine.Request.NewCohortDefinition.Description);
                Assert.AreEqual(oldData.ExternalVersion + 1, engine.Request.NewCohortDefinition.Version);

                Assert.AreNotEqual(oldcohort.CountDistinct, engine.Request.CohortCreatedIfAny.CountDistinct);

                //now nuke all data in the catalogue so the cic returns nobody (except that the identifiers are cached eh?)
                DataAccessPortal.GetInstance().ExpectDatabase(_tableInfo, DataAccessContext.InternalDataProcessing).ExpectTable(_tableInfo.GetRuntimeName()).Truncate();

                var toMem = new ToMemoryDataLoadEventListener(false);

                //get a new engine
                engine = new CohortRefreshEngine(toMem, _configuration);

                //execute it
                var ex = Assert.Throws <Exception>(() => engine.Execute());

                Assert.IsTrue(ex.InnerException.InnerException.Message.Contains("CohortIdentificationCriteria execution resulted in an empty dataset"));

                //expected this message to happen
                //that it did clear the cache
                Assert.AreEqual(1, toMem.EventsReceivedBySender.SelectMany(kvp => kvp.Value).Count(msg => msg.Message.Equals("Clearing Cohort Identifier Cache")));
            }
            finally
            {
                //make the ExtractionConfiguration not use the cic query
                _configuration.CohortRefreshPipeline_ID             = null;
                _configuration.CohortIdentificationConfiguration_ID = null;
                _configuration.SaveToDatabase();

                //delete the cic query
                cic.QueryCachingServer_ID = null;
                cic.SaveToDatabase();
                cic.DeleteInDatabase();

                //delete the caching database
                queryCacheServer.DeleteInDatabase();
                cachedb.Drop();
            }
        }
Exemple #24
0
        public void EndToEndTest()
        {
            var cohortDatabaseNameWillBe = TestDatabaseNames.GetConsistentName("TbvCohort");

            _discoveredCohortDatabase = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(cohortDatabaseNameWillBe);

            //cleanup
            if (_discoveredCohortDatabase.Exists())
            {
                _discoveredCohortDatabase.Drop();
            }

            //create a normal catalogue
            CreateANormalCatalogue();

            //create a cohort database using wizard
            CreateNewCohortDatabaseWizard cohortDatabaseWizard = new CreateNewCohortDatabaseWizard(_discoveredCohortDatabase, CatalogueRepository, DataExportRepository, false);

            _externalCohortTable = cohortDatabaseWizard.CreateDatabase(
                new PrivateIdentifierPrototype(_nonTvfExtractionIdentifier)
                , new ThrowImmediatelyCheckNotifier());

            //create a table valued function
            CreateTvfCatalogue(cohortDatabaseNameWillBe);

            //Test 1
            TestThatQueryBuilderWithoutParametersBeingSetThrowsQueryBuildingException();

            PopulateCohortDatabaseWithRecordsFromNonTvfCatalogue();

            //Test 2
            TestWithParameterValueThatRowsAreReturned();

            //Test 3
            TestUsingTvfForAggregates();

            //Test 4
            TestAddingTvfToCIC();

            //Test 5
            TestDataExportOfTvf();

            //tear down
            DataExportRepository.GetAllObjects <ExtractableCohort>().Single().DeleteInDatabase();
            _externalCohortTable.DeleteInDatabase();

            _database.ExpectTable("NonTVFTable").Drop();
            _database.ExpectTableValuedFunction("GetTopXRandom").Drop();

            //delete global parameter
            ((AnyTableSqlParameter)_aggregate.GetAllParameters().Single()).DeleteInDatabase();
            //delete aggregate
            _aggregate.DeleteInDatabase();

            ((AnyTableSqlParameter)_cicAggregate.GetAllParameters().Single()).DeleteInDatabase();
            //delete aggregate
            _cicAggregate.DeleteInDatabase();

            //get rid of the cohort identification configuration
            _cic.DeleteInDatabase();
            _pipe.DeleteInDatabase();

            //get rid of the cohort database
            _discoveredCohortDatabase.Drop();

            _nonTvfCatalogue.DeleteInDatabase();
            _nonTvfTableInfo.DeleteInDatabase();

            _tvfCatalogue.DeleteInDatabase();
            _tvfTableInfo.DeleteInDatabase();
        }
        public DataTable ProcessPipelineData(DataTable toProcess, IDataLoadEventListener listener, GracefulCancellationToken cancellationToken)
        {
            if (toProcess == null)
            {
                return(null);
            }

            IDatabaseColumnRequestAdjuster adjuster = null;

            if (Adjuster != null)
            {
                var constructor = new ObjectConstructor();
                adjuster = (IDatabaseColumnRequestAdjuster)constructor.Construct(Adjuster);
            }

            //work out the table name for the table we are going to create
            if (TargetTableName == null)
            {
                if (string.IsNullOrWhiteSpace(toProcess.TableName))
                {
                    throw new Exception("Chunk did not have a TableName, did not know what to call the newly created table");
                }

                TargetTableName = QuerySyntaxHelper.MakeHeaderNameSane(toProcess.TableName);
            }

            ClearPrimaryKeyFromDataTableAndExplicitWriteTypes(toProcess);

            StartAuditIfExists(TargetTableName);

            if (_loggingDatabaseListener != null)
            {
                listener = new ForkDataLoadEventListener(listener, _loggingDatabaseListener);
            }

            EnsureTableHasDataInIt(toProcess);

            bool createdTable = false;

            if (_firstTime)
            {
                bool tableAlreadyExistsButEmpty = false;

                if (!_database.Exists())
                {
                    throw new Exception("Database " + _database + " does not exist");
                }

                discoveredTable = _database.ExpectTable(TargetTableName);

                //table already exists
                if (discoveredTable.Exists())
                {
                    tableAlreadyExistsButEmpty = true;

                    if (!AllowLoadingPopulatedTables)
                    {
                        if (discoveredTable.IsEmpty())
                        {
                            listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "Found table " + TargetTableName + " already, normally this would forbid you from loading it (data duplication / no primary key etc) but it is empty so we are happy to load it, it will not be created"));
                        }
                        else
                        {
                            throw new Exception("There is already a table called " + TargetTableName + " at the destination " + _database);
                        }
                    }

                    if (AllowResizingColumnsAtUploadTime)
                    {
                        _dataTypeDictionary = discoveredTable.DiscoverColumns().ToDictionary(k => k.GetRuntimeName(), v => v.GetDataTypeComputer(), StringComparer.CurrentCultureIgnoreCase);
                    }
                }
                else
                {
                    listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Determined that the table name " + TargetTableName + " is unique at destination " + _database));
                }

                //create connection to destination
                if (!tableAlreadyExistsButEmpty)
                {
                    createdTable = true;

                    if (AllowResizingColumnsAtUploadTime)
                    {
                        _database.CreateTable(out _dataTypeDictionary, TargetTableName, toProcess, ExplicitTypes.ToArray(), true, adjuster);
                    }
                    else
                    {
                        _database.CreateTable(TargetTableName, toProcess, ExplicitTypes.ToArray(), true, adjuster);
                    }

                    listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Created table " + TargetTableName + " successfully."));
                }

                _managedConnection = _server.BeginNewTransactedConnection();
                _bulkcopy          = discoveredTable.BeginBulkInsert(_managedConnection.ManagedTransaction);

                if (Culture != null)
                {
                    _bulkcopy.DateTimeDecider.Culture = Culture;
                }

                _firstTime = false;
            }

            try
            {
                if (AllowResizingColumnsAtUploadTime && !createdTable)
                {
                    ResizeColumnsIfRequired(toProcess, listener);
                }

                //push the data
                swTimeSpentWritting.Start();

                _affectedRows += _bulkcopy.Upload(toProcess);

                swTimeSpentWritting.Stop();
                listener.OnProgress(this, new ProgressEventArgs("Uploading to " + TargetTableName, new ProgressMeasurement(_affectedRows, ProgressType.Records), swTimeSpentWritting.Elapsed));
            }
            catch (Exception e)
            {
                _managedConnection.ManagedTransaction.AbandonAndCloseConnection();

                if (LoggingServer != null)
                {
                    _dataLoadInfo.LogFatalError(GetType().Name, ExceptionHelper.ExceptionToListOfInnerMessages(e, true));
                }

                throw new Exception("Failed to write rows (in transaction) to table " + TargetTableName, e);
            }

            return(null);
        }