Ejemplo n.º 1
0
        public void TestAllowingEmptyDatasets(bool allowEmptyDatasetExtractions)
        {
            Pipeline p = SetupPipeline();

            TruncateDataTable();

            var host = new ExtractionPipelineUseCase(_request.Configuration.Project, _request, p, DataLoadInfo.Empty);

            var engine = host.GetEngine(p, new ThrowImmediatelyDataLoadEventListener());

            host.Source.AllowEmptyExtractions = allowEmptyDatasetExtractions;

            var token = new GracefulCancellationToken();

            if (allowEmptyDatasetExtractions)
            {
                var dt = host.Source.GetChunk(new ThrowImmediatelyDataLoadEventListener(), token);
                Assert.IsNull(host.Source.GetChunk(new ThrowImmediatelyDataLoadEventListener(), token));

                Assert.AreEqual(0, dt.Rows.Count);
                Assert.AreEqual(3, dt.Columns.Count);
            }
            else
            {
                var exception = Assert.Throws <Exception>(() => host.Source.GetChunk(new ThrowImmediatelyDataLoadEventListener(), token));

                Assert.IsTrue(exception.Message.StartsWith("There is no data to load, query returned no rows, query was"));
            }

            p.DeleteInDatabase();
        }
Ejemplo n.º 2
0
        protected override void ExecuteRun(object runnable, OverrideSenderIDataLoadEventListener listener)
        {
            var dataLoadInfo = StartAudit();

            var globalCommand  = runnable as ExtractGlobalsCommand;
            var datasetCommand = runnable as ExtractDatasetCommand;

            var logging = new ToLoggingDatabaseDataLoadEventListener(_logManager, dataLoadInfo);
            var fork    =
                datasetCommand != null ?
                new ForkDataLoadEventListener(logging, listener, new ElevateStateListener(datasetCommand)):
                new ForkDataLoadEventListener(logging, listener);

            if (globalCommand != null)
            {
                var useCase = new ExtractionPipelineUseCase(_activator, _project, _globalsCommand, _pipeline, dataLoadInfo)
                {
                    Token = Token
                };
                useCase.Execute(fork);
            }

            if (datasetCommand != null)
            {
                var executeUseCase = new ExtractionPipelineUseCase(_activator, _project, datasetCommand, _pipeline, dataLoadInfo)
                {
                    Token = Token
                };
                executeUseCase.Execute(fork);
            }

            logging.FinalizeTableLoadInfos();
            dataLoadInfo.CloseAndMarkComplete();
        }
        protected void Execute(out ExtractionPipelineUseCase pipelineUseCase, out IExecuteDatasetExtractionDestination results)
        {
            DataLoadInfo d = new DataLoadInfo("Internal", _testDatabaseName, "IgnoreMe", "", true, new DiscoveredServer(UnitTestLoggingConnectionString));

            Pipeline pipeline = null;

            //because extractable columns is likely to include chi column, it will be removed from the collection (for a substitution identifier)
            var before = _extractableColumns.ToArray();

            try
            {
                pipeline        = SetupPipeline();
                pipelineUseCase = new ExtractionPipelineUseCase(_request.Configuration.Project, _request, pipeline, d);

                pipelineUseCase.Execute(new ThrowImmediatelyDataLoadEventListener());

                Assert.IsNotEmpty(pipelineUseCase.Source.Request.QueryBuilder.SQL);

                Assert.IsTrue(pipelineUseCase.ExtractCommand.State == ExtractCommandState.Completed);
            }
            finally
            {
                if (pipeline != null)
                {
                    pipeline.DeleteInDatabase();
                }
            }

            results             = pipelineUseCase.Destination;
            _extractableColumns = new List <IColumn>(before);
        }
Ejemplo n.º 4
0
        protected void Execute(out ExtractionPipelineUseCase pipelineUseCase, out IExecuteDatasetExtractionDestination results)
        {
            DataLoadInfo d = new DataLoadInfo("Internal", _testDatabaseName, "IgnoreMe", "", true, new DiscoveredServer(UnitTestLoggingConnectionString));

            Pipeline pipeline = null;

            try
            {
                pipeline        = SetupPipeline();
                pipelineUseCase = new ExtractionPipelineUseCase(_request.Configuration.Project, _request, pipeline, d);

                pipelineUseCase.Execute(new ThrowImmediatelyDataLoadEventListener());

                Assert.IsNotEmpty(pipelineUseCase.Source.Request.QueryBuilder.SQL);

                Assert.IsTrue(pipelineUseCase.ExtractCommand.State == ExtractCommandState.Completed);
            }
            finally
            {
                if (pipeline != null)
                {
                    pipeline.DeleteInDatabase();
                }
            }

            results = pipelineUseCase.Destination;
        }
Ejemplo n.º 5
0
        /// <summary>
        /// Checks that all globals pass thier respective checkers (<see cref="SupportingSQLTableChecker"/> and <see cref="SupportingDocumentsFetcher"/>) and that
        /// the <see cref="Pipeline"/> (if any) is capable of extracting the globals.
        /// </summary>
        /// <param name="notifier"></param>
        public void Check(ICheckNotifier notifier)
        {
            foreach (SupportingSQLTable table in _configuration.GetGlobals().OfType <SupportingSQLTable>())
            {
                new SupportingSQLTableChecker(table).Check(notifier);
            }

            foreach (SupportingDocument document in _configuration.GetGlobals().OfType <SupportingDocument>())
            {
                new SupportingDocumentsFetcher(document).Check(notifier);
            }

            if (_alsoCheckPipeline != null && _command != null)
            {
                var engine = new ExtractionPipelineUseCase(_configuration.Project, _command, _alsoCheckPipeline, DataLoadInfo.Empty)
                             .GetEngine(_alsoCheckPipeline, new FromCheckNotifierToDataLoadEventListener(notifier));
                engine.Check(notifier);
            }
        }
Ejemplo n.º 6
0
        private void SetupPipelineSelectionExtraction()
        {
            //already set i tup
            if (_extractionPipelineSelectionUI != null)
            {
                return;
            }

            //the use case is extracting a dataset
            var useCase = ExtractionPipelineUseCase.DesignTime();

            //the user is DefaultPipeline_ID field of ExtractionConfiguration
            var user = new PipelineUser(typeof(ExtractionConfiguration).GetProperty("DefaultPipeline_ID"), _extractionConfiguration);

            //create the UI for this situation
            var factory = new PipelineSelectionUIFactory(Activator.RepositoryLocator.CatalogueRepository, user, useCase);

            _extractionPipelineSelectionUI = factory.Create(Activator, "Extraction Pipeline", DockStyle.Fill, pChooseExtractionPipeline);
            _extractionPipelineSelectionUI.CollapseToSingleLineMode();
        }
Ejemplo n.º 7
0
        public WordDataWriter(ExtractionPipelineUseCase executer)
        {
            if (executer == null)
            {
                throw new NullReferenceException("Cannot write meta data without the accompanying ExtractionPipelineHost");
            }

            if (executer.Source.WasCancelled)
            {
                throw new NullReferenceException("Cannot write meta data since ExtractionPipelineHost reports that it was Cancelled");
            }

            Executer = executer;

            _destination = Executer.Destination;

            if (_destination == null)
            {
                throw new NotSupportedException(GetType().FullName + " only supports destinations which are " + typeof(ExecuteDatasetExtractionFlatFileDestination).FullName);
            }
        }
Ejemplo n.º 8
0
        protected override void AdjustCommand(ExtractionOptions opts, CommandLineActivity activity)
        {
            base.AdjustCommand(opts, activity);

            var useCase = ExtractionPipelineUseCase.DesignTime();

            var compatible = useCase.FilterCompatiblePipelines(BasicActivator.RepositoryLocator.CatalogueRepository.GetAllObjects <Pipeline>()).ToArray();

            if (!compatible.Any())
            {
                throw new Exception("No compatible pipelines");
            }

            var pipe = BasicActivator.SelectOne("Extraction Pipeline", compatible, null, true);

            if (pipe == null)
            {
                throw new OperationCanceledException();
            }

            opts.Pipeline = pipe.ID;
        }
Ejemplo n.º 9
0
        public override void SetDatabaseObject(IActivateItems activator, ExtractionConfiguration databaseObject)
        {
            base.SetDatabaseObject(activator, databaseObject);

            _extractionConfiguration = databaseObject;

            if (!_commonFunctionality.IsSetup)
            {
                _commonFunctionality.SetUp(RDMPCollection.None, tlvDatasets, activator, olvName, null, new RDMPCollectionCommonFunctionalitySettings()
                {
                    AddFavouriteColumn    = false,
                    AllowPinning          = false,
                    SuppressChildrenAdder = true,
                    SuppressActivate      = true,
                    AddCheckColumn        = false
                });
            }

            var checkedBefore = tlvDatasets.CheckedObjects;

            tlvDatasets.ClearObjects();

            _globals  = _extractionConfiguration.GetGlobals();
            _datasets = databaseObject.SelectedDataSets.ToArray();

            GetBundledStuff();

            //add the folders
            tlvDatasets.AddObjects(new object[] { _globalsFolder, _coreDatasetsFolder, _projectSpecificDatasetsFolder });

            //enable all to start with
            tlvDatasets.EnableObjects(tlvDatasets.Objects);

            tlvDatasets.DisableObjects(_globals);
            tlvDatasets.DisableObjects(_bundledStuff);

            //if there are no globals disable this option
            if (!_globals.Any())
            {
                tlvDatasets.DisableObject(_globalsFolder);
            }

            //if there are no project specific datasets
            if (_datasets.All(sds => sds.ExtractableDataSet.Project_ID == null))
            {
                tlvDatasets.DisableObject(_projectSpecificDatasetsFolder); //disable this option
            }
            //if all the datasets are project specific
            if (_datasets.All(sds => sds.ExtractableDataSet.Project_ID != null))
            {
                tlvDatasets.DisableObject(_coreDatasetsFolder);
            }

            //don't accept refresh while executing
            if (checkAndExecuteUI1.IsExecuting)
            {
                return;
            }

            if (_pipelineSelectionUI1 == null)
            {
                //create a new selection UI (pick an extraction pipeliene UI)
                var useCase = ExtractionPipelineUseCase.DesignTime();
                var factory = new PipelineSelectionUIFactory(Activator.RepositoryLocator.CatalogueRepository, null, useCase);

                _pipelineSelectionUI1 = factory.Create("Extraction Pipeline", DockStyle.Fill);
                _pipelineSelectionUI1.CollapseToSingleLineMode();

                //if the configuration has a default then use that pipeline
                if (_extractionConfiguration.DefaultPipeline_ID != null)
                {
                    _pipelineSelectionUI1.Pipeline = _extractionConfiguration.DefaultPipeline;
                }

                _pipelineSelectionUI1.PipelineChanged += ResetChecksUI;

                _pipelinePanel = new ToolStripControlHost((Control)_pipelineSelectionUI1);

                helpIcon1.SetHelpText("Extraction", "It is a wise idea to click here if you don't know what this screen can do for you...", BuildHelpFlow());
            }

            CommonFunctionality.Add(new ToolStripLabel("Extraction Pipeline:"));
            CommonFunctionality.Add(_pipelinePanel);
            CommonFunctionality.AddHelpStringToToolStrip("Extraction Pipeline", "The sequence of components that will be executed in order to enable the datasets to be extracted. This will start with a source component that performs the linkage against the cohort followed by subsequent components (if any) and then a destination component that writes the final records (e.g. to database / csv file etc).");

            CommonFunctionality.AddToMenu(new ExecuteCommandRelease(activator).SetTarget(_extractionConfiguration));

            CommonFunctionality.Add(lblMaxConcurrent);
            CommonFunctionality.Add(tbMaxConcurrent);
            CommonFunctionality.AddHelpStringToToolStrip("Concurrent", "The maximum number of datasets to extract at once.  Once this number is reached the remainder will be queued and only started when one of the other extracting datasets completes.");

            checkAndExecuteUI1.SetItemActivator(activator);

            tlvDatasets.ExpandAll();

            if (_isFirstTime)
            {
                tlvDatasets.CheckAll();
                foreach (var disabledObject in tlvDatasets.DisabledObjects.OfType <ArbitraryFolderNode>())
                {
                    tlvDatasets.UncheckObject(disabledObject);
                }
            }
            else if (checkedBefore.Count > 0)
            {
                tlvDatasets.CheckObjects(checkedBefore);
            }

            _isFirstTime = false;
        }
Ejemplo n.º 10
0
        public DataExportChildProvider(IRDMPPlatformRepositoryServiceLocator repositoryLocator, IChildProvider[] pluginChildProviders, ICheckNotifier errorsCheckNotifier, DataExportChildProvider previousStateIfKnown) : base(repositoryLocator.CatalogueRepository, pluginChildProviders, errorsCheckNotifier, previousStateIfKnown)
        {
            BlackListedSources   = previousStateIfKnown?.BlackListedSources ?? new List <ExternalCohortTable>();
            _errorsCheckNotifier = errorsCheckNotifier;
            dataExportRepository = repositoryLocator.DataExportRepository;

            AllProjectAssociatedCics = GetAllObjects <ProjectCohortIdentificationConfigurationAssociation>(dataExportRepository);

            _cicAssociations = new HashSet <int>(AllProjectAssociatedCics.Select(a => a.CohortIdentificationConfiguration_ID));

            CohortSources       = GetAllObjects <ExternalCohortTable>(dataExportRepository);
            ExtractableDataSets = GetAllObjects <ExtractableDataSet>(dataExportRepository);

            AddToDictionaries(new HashSet <object>(AllCohortIdentificationConfigurations.Where(cic => _cicAssociations.Contains(cic.ID))), new DescendancyList(AllProjectCohortIdentificationConfigurationsNode));
            AddToDictionaries(new HashSet <object>(AllCohortIdentificationConfigurations.Where(cic => !_cicAssociations.Contains(cic.ID))), new DescendancyList(AllFreeCohortIdentificationConfigurationsNode));

            _selectedDataSetsWithNoIsExtractionIdentifier = new HashSet <ISelectedDataSets>(dataExportRepository.GetSelectedDatasetsWithNoExtractionIdentifiers());

            SelectedDataSets = GetAllObjects <SelectedDataSets>(dataExportRepository);
            ReportProgress("Fetching data export objects");

            var dsDictionary = ExtractableDataSets.ToDictionary(ds => ds.ID, d => d);

            foreach (SelectedDataSets s in SelectedDataSets)
            {
                s.InjectKnown(dsDictionary[s.ExtractableDataSet_ID]);
            }

            ReportProgress("Injecting SelectedDataSets");

            //This means that the ToString method in ExtractableDataSet doesn't need to go lookup catalogue info
            var catalogueIdDict = AllCatalogues.ToDictionary(c => c.ID, c2 => c2);

            foreach (ExtractableDataSet ds in ExtractableDataSets)
            {
                if (catalogueIdDict.TryGetValue(ds.Catalogue_ID, out Catalogue cata))
                {
                    ds.InjectKnown(cata);
                }
            }

            ReportProgress("Injecting ExtractableDataSet");

            AllPackages = GetAllObjects <ExtractableDataSetPackage>(dataExportRepository);

            Projects = GetAllObjects <Project>(dataExportRepository);
            ExtractionConfigurations = GetAllObjects <ExtractionConfiguration>(dataExportRepository);

            ReportProgress("Get Projects and Configurations");

            ExtractionConfigurationsByProject = ExtractionConfigurations.GroupBy(k => k.Project_ID).ToDictionary(gdc => gdc.Key, gdc => gdc.ToList());

            ReportProgress("Grouping Extractions by Project");

            AllGlobalExtractionFilterParameters = GetAllObjects <GlobalExtractionFilterParameter>(dataExportRepository);

            AllContainers = GetAllObjects <FilterContainer>(dataExportRepository).ToDictionary(o => o.ID, o => o);
            AllDeployedExtractionFilters = GetAllObjects <DeployedExtractionFilter>(dataExportRepository);
            _allParameters = GetAllObjects <DeployedExtractionFilterParameter>(dataExportRepository);

            ReportProgress("Getting Filters");

            //if we are using a database repository then we can make use of the caching class DataExportFilterManagerFromChildProvider to speed up
            //filter contents
            var dbRepo = dataExportRepository as DataExportRepository;

            _dataExportFilterManager = dbRepo == null ? dataExportRepository.FilterManager : new DataExportFilterManagerFromChildProvider(dbRepo, this);

            ReportProgress("Building FilterManager");

            Cohorts            = GetAllObjects <ExtractableCohort>(dataExportRepository);
            _cohortsByOriginId = new Dictionary <int, HashSet <ExtractableCohort> >();

            foreach (ExtractableCohort c in Cohorts)
            {
                if (!_cohortsByOriginId.ContainsKey(c.OriginID))
                {
                    _cohortsByOriginId.Add(c.OriginID, new HashSet <ExtractableCohort>());
                }

                _cohortsByOriginId[c.OriginID].Add(c);
            }

            _configurationToDatasetMapping = new Dictionary <ExtractionConfiguration, List <SelectedDataSets> >();

            ReportProgress("Fetching Cohorts");

            GetCohortAvailability();

            ReportProgress("GetCohortAvailability");

            var configToSds = SelectedDataSets.GroupBy(k => k.ExtractionConfiguration_ID).ToDictionary(gdc => gdc.Key, gdc => gdc.ToList());

            foreach (ExtractionConfiguration configuration in ExtractionConfigurations)
            {
                if (configToSds.TryGetValue(configuration.ID, out List <SelectedDataSets> result))
                {
                    _configurationToDatasetMapping.Add(configuration, result);
                }
            }

            ReportProgress("Mapping configurations to datasets");

            RootCohortsNode = new AllCohortsNode();
            AddChildren(RootCohortsNode, new DescendancyList(RootCohortsNode));

            foreach (ExtractableDataSetPackage package in AllPackages)
            {
                AddChildren(package, new DescendancyList(package));
            }

            ReportProgress("Packages and Cohorts");

            foreach (Project p in Projects)
            {
                AddChildren(p, new DescendancyList(p));
            }

            ReportProgress("Projects");

            //work out all the Catalogues that are extractable (Catalogues are extractable if there is an ExtractableDataSet with the Catalogue_ID that matches them)
            var cataToEds = new Dictionary <int, ExtractableDataSet>(ExtractableDataSets.ToDictionary(k => k.Catalogue_ID));

            //inject extractability into Catalogues
            foreach (Catalogue catalogue in AllCatalogues)
            {
                if (cataToEds.TryGetValue(catalogue.ID, out ExtractableDataSet result))
                {
                    catalogue.InjectKnown(result.GetCatalogueExtractabilityStatus());
                }
                else
                {
                    catalogue.InjectKnown(new CatalogueExtractabilityStatus(false, false));
                }
            }

            ReportProgress("Catalogue extractability injection");

            try
            {
                AddPipelineUseCases(new Dictionary <string, PipelineUseCase>
                {
                    { "File Import", UploadFileUseCase.DesignTime() },
                    { "Extraction", ExtractionPipelineUseCase.DesignTime() },
                    { "Release", ReleaseUseCase.DesignTime() },
                    { "Cohort Creation", CohortCreationRequest.DesignTime() },
                    { "Caching", CachingPipelineUseCase.DesignTime() },
                    { "Aggregate Committing", CreateTableFromAggregateUseCase.DesignTime(repositoryLocator.CatalogueRepository) }
                });
            }
            catch (Exception ex)
            {
                _errorsCheckNotifier.OnCheckPerformed(new CheckEventArgs("Failed to build DesignTime PipelineUseCases", CheckResult.Fail, ex));
            }

            ReportProgress("Pipeline adding");
        }
Ejemplo n.º 11
0
        /// <summary>
        /// Checks the <see cref="SelectedDataSet"/> and reports success/failures to the <paramref name="notifier"/>
        /// </summary>
        /// <param name="notifier"></param>
        public void Check(ICheckNotifier notifier)
        {
            var       ds      = SelectedDataSet.ExtractableDataSet;
            var       config  = SelectedDataSet.ExtractionConfiguration;
            var       cohort  = config.Cohort;
            var       project = config.Project;
            const int timeout = 5;

            notifier.OnCheckPerformed(new CheckEventArgs("Inspecting dataset " + ds, CheckResult.Success));

            var selectedcols = new List <IColumn>(config.GetAllExtractableColumnsFor(ds));

            if (!selectedcols.Any())
            {
                notifier.OnCheckPerformed(
                    new CheckEventArgs(
                        "Dataset " + ds + " in configuration '" + config + "' has no selected columns",
                        CheckResult.Fail));

                return;
            }

            ICatalogue cata;

            try
            {
                cata = ds.Catalogue;
            }
            catch (Exception e)
            {
                notifier.OnCheckPerformed(new CheckEventArgs("Unable to find Catalogue for ExtractableDataSet", CheckResult.Fail, e));
                return;
            }

            if (cata.IsInternalDataset)
            {
                notifier.OnCheckPerformed(new CheckEventArgs($"Dataset '{ds}' is marked {nameof(ICatalogue.IsInternalDataset)} so should not be extracted", CheckResult.Fail));
            }

            var request = new ExtractDatasetCommand(config, cohort, new ExtractableDatasetBundle(ds),
                                                    selectedcols, new HICProjectSalt(project), new ExtractionDirectory(project.ExtractionDirectory, config))
            {
                TopX = 1
            };

            try
            {
                request.GenerateQueryBuilder();
            }
            catch (Exception e)
            {
                notifier.OnCheckPerformed(
                    new CheckEventArgs(
                        "Could not generate valid extraction SQL for dataset " + ds +
                        " in configuration " + config, CheckResult.Fail, e));
                return;
            }

            var  server       = request.GetDistinctLiveDatabaseServer();
            bool serverExists = server.Exists();

            notifier.OnCheckPerformed(new CheckEventArgs("Server " + server + " Exists:" + serverExists,
                                                         serverExists ? CheckResult.Success : CheckResult.Fail));

            var cohortServer = request.ExtractableCohort.ExternalCohortTable.Discover();

            if (cohortServer == null || !cohortServer.Exists())
            {
                notifier.OnCheckPerformed(new CheckEventArgs("Cohort server did not exist or was unreachable", CheckResult.Fail));
                return;
            }

            //when 2+ columns have the same Name it's a problem
            foreach (IGrouping <string, IColumn> grouping in request.ColumnsToExtract.GroupBy(c => c.GetRuntimeName()).Where(g => g.Count() > 1))
            {
                notifier.OnCheckPerformed(new CheckEventArgs($"There are { grouping.Count() } columns in the extract ({request.DatasetBundle?.DataSet}) called '{ grouping.Key }'", CheckResult.Fail));
            }

            //when 2+ columns have the same Order it's a problem because
            foreach (IGrouping <int, IColumn> grouping in request.ColumnsToExtract.GroupBy(c => c.Order).Where(g => g.Count() > 1))
            {
                notifier.OnCheckPerformed(new CheckEventArgs($"There are { grouping.Count() } columns in the extract ({request.DatasetBundle?.DataSet}) that share the same Order '{ grouping.Key }'", CheckResult.Fail));
            }

            // Warn user if stuff is out of sync with the Catalogue version (changes have happened to the master but not propgated to the copy in this extraction)
            var outOfSync = selectedcols.OfType <ExtractableColumn>().Where(c => c.IsOutOfSync()).ToArray();

            if (outOfSync.Any())
            {
                notifier.OnCheckPerformed(new CheckEventArgs($"'{ds}' columns out of sync with CatalogueItem version(s): { Environment.NewLine + string.Join(',', outOfSync.Select(o => o.ToString() + Environment.NewLine)) }" +
                                                             $"{ Environment.NewLine } Extraction Configuration: '{config}' ", CheckResult.Warning));
            }

            var nonSelectedCore = cata.GetAllExtractionInformation(ExtractionCategory.Core)
                                  .Union(cata.GetAllExtractionInformation(ExtractionCategory.ProjectSpecific))
                                  .Where(ei => !ei.IsExtractionIdentifier &&
                                         !selectedcols.OfType <ExtractableColumn>().Any(ec => ec.CatalogueExtractionInformation_ID == ei.ID))
                                  .ToArray();

            if (nonSelectedCore.Any())
            {
                notifier.OnCheckPerformed(new CheckEventArgs($"'{ds}' Core columns not selected for extractions: { Environment.NewLine + string.Join(',', nonSelectedCore.Select(o => o.ToString() + Environment.NewLine)) }" +
                                                             $"{ Environment.NewLine } Extraction Configuration: '{config}' ", CheckResult.Warning));
            }

            //Make sure cohort and dataset are on same server before checking (can still get around this at runtime by using ExecuteCrossServerDatasetExtractionSource)
            if (!cohortServer.Server.Name.Equals(server.Name, StringComparison.CurrentCultureIgnoreCase) || !cohortServer.Server.DatabaseType.Equals(server.DatabaseType))
            {
                notifier.OnCheckPerformed(new CheckEventArgs(
                                              $"Cohort is on server '{cohortServer.Server.Name}' ({cohortServer.Server.DatabaseType}) but dataset '{request.DatasetBundle?.DataSet}' is on '{server.Name}' ({server.DatabaseType})"
                                              , CheckResult.Warning));
            }
            else
            {
                //Try to fetch TOP 1 data
                try
                {
                    using (var con = server.BeginNewTransactedConnection())
                    {
                        //incase user somehow manages to write a filter/transform that nukes data or something

                        DbCommand cmd;

                        try
                        {
                            cmd = server.GetCommand(request.QueryBuilder.SQL, con);
                            cmd.CommandTimeout = timeout;
                            notifier.OnCheckPerformed(
                                new CheckEventArgs(
                                    "/*About to send Request SQL :*/" + Environment.NewLine + request.QueryBuilder.SQL,
                                    CheckResult.Success));
                        }
                        catch (QueryBuildingException e)
                        {
                            notifier.OnCheckPerformed(new CheckEventArgs("Failed to assemble query for dataset " + ds,
                                                                         CheckResult.Fail, e));
                            return;
                        }

                        try
                        {
                            using (var r = cmd.ExecuteReader())
                            {
                                if (r.Read())
                                {
                                    notifier.OnCheckPerformed(new CheckEventArgs("Read at least 1 row successfully from dataset " + ds,
                                                                                 CheckResult.Success));
                                }
                                else
                                {
                                    notifier.OnCheckPerformed(new CheckEventArgs("Dataset " + ds + " is completely empty (when linked with the cohort). " +
                                                                                 "Extraction may fail if the Source does not allow empty extractions",
                                                                                 CheckResult.Warning));
                                }
                            }
                        }
                        catch (Exception e)
                        {
                            if (server.GetQuerySyntaxHelper().IsTimeout(e))
                            {
                                notifier.OnCheckPerformed(new CheckEventArgs(ErrorCodes.ExtractTimeoutChecking, e, timeout));
                            }
                            else
                            {
                                notifier.OnCheckPerformed(new CheckEventArgs("Failed to execute the query (See below for query)", CheckResult.Fail, e));
                            }
                        }

                        con.ManagedTransaction.AbandonAndCloseConnection();
                    }
                }
                catch (Exception e)
                {
                    notifier.OnCheckPerformed(new CheckEventArgs("Failed to execute Top 1 on dataset " + ds, CheckResult.Fail, e));
                }
            }

            var fetchOptions = _checkGlobals ? FetchOptions.ExtractableGlobalsAndLocals : FetchOptions.ExtractableLocals;

            foreach (var supportingDocument in cata.GetAllSupportingDocuments(fetchOptions))
            {
                new SupportingDocumentsFetcher(supportingDocument).Check(notifier);
            }

            //check catalogue locals
            foreach (SupportingSQLTable table in cata.GetAllSupportingSQLTablesForCatalogue(fetchOptions))
            {
                new SupportingSQLTableChecker(table).Check(notifier);
            }

            if (_alsoCheckPipeline != null)
            {
                var engine = new ExtractionPipelineUseCase(_activator, request.Project, request, _alsoCheckPipeline, DataLoadInfo.Empty)
                             .GetEngine(_alsoCheckPipeline, new FromCheckNotifierToDataLoadEventListener(notifier));
                engine.Check(notifier);
            }
        }
Ejemplo n.º 12
0
        /// <summary>
        /// Checks the <see cref="SelectedDataSet"/> and reports success/failures to the <paramref name="notifier"/>
        /// </summary>
        /// <param name="notifier"></param>
        public void Check(ICheckNotifier notifier)
        {
            var       ds      = SelectedDataSet.ExtractableDataSet;
            var       config  = SelectedDataSet.ExtractionConfiguration;
            var       cohort  = config.Cohort;
            var       project = config.Project;
            const int timeout = 5;

            notifier.OnCheckPerformed(new CheckEventArgs("Inspecting dataset " + ds, CheckResult.Success));

            var selectedcols = new List <IColumn>(config.GetAllExtractableColumnsFor(ds));

            if (!selectedcols.Any())
            {
                notifier.OnCheckPerformed(
                    new CheckEventArgs(
                        "Dataset " + ds + " in configuration '" + config + "' has no selected columns",
                        CheckResult.Fail));

                return;
            }

            var request = new ExtractDatasetCommand(config, cohort, new ExtractableDatasetBundle(ds),
                                                    selectedcols, new HICProjectSalt(project), new ExtractionDirectory(project.ExtractionDirectory, config))
            {
                TopX = 1
            };

            try
            {
                request.GenerateQueryBuilder();
            }
            catch (Exception e)
            {
                notifier.OnCheckPerformed(
                    new CheckEventArgs(
                        "Could not generate valid extraction SQL for dataset " + ds +
                        " in configuration " + config, CheckResult.Fail, e));
                return;
            }

            var  server       = request.GetDistinctLiveDatabaseServer();
            bool serverExists = server.Exists();

            notifier.OnCheckPerformed(new CheckEventArgs("Server " + server + " Exists:" + serverExists,
                                                         serverExists ? CheckResult.Success : CheckResult.Fail));

            var cohortServer = request.ExtractableCohort.ExternalCohortTable.Discover();

            if (cohortServer == null || !cohortServer.Exists())
            {
                notifier.OnCheckPerformed(new CheckEventArgs("Cohort server did not exist or was unreachable", CheckResult.Fail));
                return;
            }

            foreach (IGrouping <string, IColumn> grouping in request.ColumnsToExtract.GroupBy(c => c.GetRuntimeName()).Where(g => g.Count() > 1))
            {
                notifier.OnCheckPerformed(new CheckEventArgs("There are " + grouping.Count() + " columns in the extract called '" + grouping.Key + "'", CheckResult.Fail));
            }

            //Make sure cohort and dataset are on same server before checking (can still get around this at runtime by using ExecuteCrossServerDatasetExtractionSource)
            if (!cohortServer.Server.Name.Equals(server.Name, StringComparison.CurrentCultureIgnoreCase) || !cohortServer.Server.DatabaseType.Equals(server.DatabaseType))
            {
                notifier.OnCheckPerformed(new CheckEventArgs(
                                              string.Format("Cohort is on server '{0}' ({1}) but dataset is on '{2}' ({3})",
                                                            cohortServer.Server.Name,
                                                            cohortServer.Server.DatabaseType,
                                                            server.Name, server.DatabaseType), CheckResult.Warning));
            }
            else
            {
                //Try to fetch TOP 1 data
                try
                {
                    using (var con = server.BeginNewTransactedConnection())
                    {
                        //incase user somehow manages to write a filter/transform that nukes data or something

                        DbCommand cmd;

                        try
                        {
                            cmd = server.GetCommand(request.QueryBuilder.SQL, con);
                            cmd.CommandTimeout = timeout;
                            notifier.OnCheckPerformed(
                                new CheckEventArgs(
                                    "/*About to send Request SQL :*/" + Environment.NewLine + request.QueryBuilder.SQL,
                                    CheckResult.Success));
                        }
                        catch (QueryBuildingException e)
                        {
                            notifier.OnCheckPerformed(new CheckEventArgs("Failed to assemble query for dataset " + ds,
                                                                         CheckResult.Fail, e));
                            return;
                        }

                        try
                        {
                            using (var r = cmd.ExecuteReader())
                            {
                                if (r.Read())
                                {
                                    notifier.OnCheckPerformed(new CheckEventArgs("Read at least 1 row successfully from dataset " + ds,
                                                                                 CheckResult.Success));
                                }
                                else
                                {
                                    notifier.OnCheckPerformed(new CheckEventArgs("Dataset " + ds + " is completely empty (when linked with the cohort). " +
                                                                                 "Extraction may fail if the Source does not allow empty extractions",
                                                                                 CheckResult.Warning));
                                }
                            }
                        }
                        catch (Exception e)
                        {
                            if (server.GetQuerySyntaxHelper().IsTimeout(e))
                            {
                                notifier.OnCheckPerformed(new CheckEventArgs("Failed to read rows after " + timeout + "s", CheckResult.Warning, e));
                            }
                            else
                            {
                                notifier.OnCheckPerformed(new CheckEventArgs("Failed to execute the query (See below for query)", CheckResult.Fail, e));
                            }
                        }

                        con.ManagedTransaction.AbandonAndCloseConnection();
                    }
                }
                catch (Exception e)
                {
                    notifier.OnCheckPerformed(new CheckEventArgs("Failed to execute Top 1 on dataset " + ds, CheckResult.Fail, e));
                }
            }

            var cata         = ds.Catalogue;
            var fetchOptions = _checkGlobals ? FetchOptions.ExtractableGlobalsAndLocals : FetchOptions.ExtractableLocals;

            foreach (var supportingDocument in cata.GetAllSupportingDocuments(fetchOptions))
            {
                new SupportingDocumentsFetcher(supportingDocument).Check(notifier);
            }

            //check catalogue locals
            foreach (SupportingSQLTable table in cata.GetAllSupportingSQLTablesForCatalogue(fetchOptions))
            {
                new SupportingSQLTableChecker(table).Check(notifier);
            }

            if (_alsoCheckPipeline != null)
            {
                var engine = new ExtractionPipelineUseCase(request.Project, request, _alsoCheckPipeline, DataLoadInfo.Empty)
                             .GetEngine(_alsoCheckPipeline, new FromCheckNotifierToDataLoadEventListener(notifier));
                engine.Check(notifier);
            }
        }