Esempio n. 1
0
        protected IPipelineRunner GetConfigureAndExecuteControl(ICohortCreationRequest request, string description)
        {
            var catalogueRepository = BasicActivator.RepositoryLocator.CatalogueRepository;

            var pipelineRunner = BasicActivator.GetPipelineRunner(request, Pipeline);

            pipelineRunner.PipelineExecutionFinishedsuccessfully += (o, args) => OnCohortCreatedSuccessfully(pipelineRunner, request);

            //add in the logging server
            var loggingServer = catalogueRepository.GetServerDefaults().GetDefaultFor(PermissableDefaults.LiveLoggingServer_ID);

            if (loggingServer != null)
            {
                var logManager = new LogManager(loggingServer);
                logManager.CreateNewLoggingTaskIfNotExists(ExtractableCohort.CohortLoggingTask);

                //create a db listener
                var toDbListener = new ToLoggingDatabaseDataLoadEventListener(this, logManager, ExtractableCohort.CohortLoggingTask, description);

                //make all messages go to both the db and the UI
                pipelineRunner.SetAdditionalProgressListener(toDbListener);

                //after executing the pipeline finalise the db listener table info records
                pipelineRunner.PipelineExecutionFinishedsuccessfully += (s, e) => toDbListener.FinalizeTableLoadInfos();
            }

            return(pipelineRunner);
        }
Esempio n. 2
0
        protected override void ExecuteRun(object runnable, OverrideSenderIDataLoadEventListener listener)
        {
            var dataLoadInfo = StartAudit();

            var globalCommand  = runnable as ExtractGlobalsCommand;
            var datasetCommand = runnable as ExtractDatasetCommand;

            var logging = new ToLoggingDatabaseDataLoadEventListener(_logManager, dataLoadInfo);
            var fork    =
                datasetCommand != null ?
                new ForkDataLoadEventListener(logging, listener, new ElevateStateListener(datasetCommand)):
                new ForkDataLoadEventListener(logging, listener);

            if (globalCommand != null)
            {
                var useCase = new ExtractionPipelineUseCase(_activator, _project, _globalsCommand, _pipeline, dataLoadInfo)
                {
                    Token = Token
                };
                useCase.Execute(fork);
            }

            if (datasetCommand != null)
            {
                var executeUseCase = new ExtractionPipelineUseCase(_activator, _project, datasetCommand, _pipeline, dataLoadInfo)
                {
                    Token = Token
                };
                executeUseCase.Execute(fork);
            }

            logging.FinalizeTableLoadInfos();
            dataLoadInfo.CloseAndMarkComplete();
        }
Esempio n. 3
0
 private void EndAuditIfExists()
 {
     //user is auditing
     if (_loggingDatabaseListener != null)
     {
         _loggingDatabaseListener.FinalizeTableLoadInfos();
     }
 }
Esempio n. 4
0
        public int Run(IRDMPPlatformRepositoryServiceLocator repositoryLocator, IDataLoadEventListener listener, ICheckNotifier checkNotifier, GracefulCancellationToken token)
        {
            const string dataLoadTask = "caching";

            CacheProgress cp = repositoryLocator.CatalogueRepository.GetObjectByID <CacheProgress>(_options.CacheProgress);

            var defaults      = repositoryLocator.CatalogueRepository.GetServerDefaults();
            var loggingServer = defaults.GetDefaultFor(PermissableDefaults.LiveLoggingServer_ID);

            if (loggingServer == null)
            {
                throw new NotSupportedException("No default logging server specified, you must specify one in ");
            }

            var logManager = new LogManager(loggingServer);

            logManager.CreateNewLoggingTaskIfNotExists(dataLoadTask);

            switch (_options.Command)
            {
            case CommandLineActivity.run:

                //Setup dual listeners for the Cache process, one ticks the lifeline one very message and one logs to the logging db
                var toLog        = new ToLoggingDatabaseDataLoadEventListener(this, logManager, dataLoadTask, "Caching " + cp);
                var forkListener = new ForkDataLoadEventListener(toLog, listener);
                try
                {
                    var cachingHost = new CachingHost(repositoryLocator.CatalogueRepository);
                    cachingHost.RetryMode         = _options.RetryMode;
                    cachingHost.CacheProgressList = new ICacheProgress[] { cp }.ToList();     //run the cp

                    //By default caching host will block
                    cachingHost.TerminateIfOutsidePermissionWindow = true;

                    cachingHost.Start(forkListener, token);
                }
                finally
                {
                    //finish everything
                    toLog.FinalizeTableLoadInfos();
                }

                break;

            case CommandLineActivity.check:
                var checkable = new CachingPreExecutionChecker(cp);
                checkable.Check(checkNotifier);
                break;

            default:
                throw new ArgumentOutOfRangeException();
            }

            return(0);
        }
Esempio n. 5
0
        public override void GenerateReport(ICatalogue c, IDataLoadEventListener listener, CancellationToken cancellationToken)
        {
            SetupLogging(c.CatalogueRepository);

            var toDatabaseLogger = new ToLoggingDatabaseDataLoadEventListener(this, _logManager, _loggingTask, "DQE evaluation of " + c);

            var forker = new ForkDataLoadEventListener(listener, toDatabaseLogger);

            try
            {
                _catalogue = c;
                var dqeRepository = new DQERepository(c.CatalogueRepository);

                byPivotCategoryCubesOverTime.Add("ALL", new PeriodicityCubesOverTime("ALL"));
                byPivotRowStatesOverDataLoadRunId.Add("ALL", new DQEStateOverDataLoadRunId("ALL"));

                Check(new FromDataLoadEventListenerToCheckNotifier(forker));

                var sw = Stopwatch.StartNew();
                using (var con = _server.GetConnection())
                {
                    con.Open();

                    var cmd = _server.GetCommand(_queryBuilder.SQL, con);
                    cmd.CommandTimeout = 500000;

                    var t = cmd.ExecuteReaderAsync(cancellationToken);
                    t.Wait(cancellationToken);

                    if (cancellationToken.IsCancellationRequested)
                    {
                        throw new OperationCanceledException("User cancelled DQE while fetching data");
                    }

                    var r = t.Result;

                    int progress = 0;

                    while (r.Read())
                    {
                        cancellationToken.ThrowIfCancellationRequested();

                        progress++;
                        int dataLoadRunIDOfCurrentRecord = 0;
                        //to start with assume we will pass the results for the 'unknown batch' (where data load run ID is null or not available)

                        //if the DataReader is likely to have a data load run ID column
                        if (_containsDataLoadID)
                        {
                            //get data load run id
                            int?runID = dqeRepository.ObjectToNullableInt(r[_dataLoadRunFieldName]);

                            //if it has a value use it (otherwise it is null so use 0 - ugh I know, it's a primary key constraint issue)
                            if (runID != null)
                            {
                                dataLoadRunIDOfCurrentRecord = (int)runID;
                            }
                        }

                        string pivotValue = null;

                        //if the user has a pivot category configured
                        if (_pivotCategory != null)
                        {
                            pivotValue = GetStringValueForPivotField(r[_pivotCategory], forker);

                            if (!haveComplainedAboutNullCategories && string.IsNullOrWhiteSpace(pivotValue))
                            {
                                forker.OnNotify(this,
                                                new NotifyEventArgs(ProgressEventType.Warning,
                                                                    "Found a null/empty value for pivot category '" + _pivotCategory +
                                                                    "', this record will ONLY be recorded under ALL and not it's specific category, you will not be warned of further nulls because there are likely to be many if there are any"));
                                haveComplainedAboutNullCategories = true;
                                pivotValue = null;
                            }
                        }

                        //always increase the "ALL" category
                        ProcessRecord(dqeRepository, dataLoadRunIDOfCurrentRecord, r,
                                      byPivotCategoryCubesOverTime["ALL"], byPivotRowStatesOverDataLoadRunId["ALL"]);

                        //if there is a value in the current record for the pivot column
                        if (pivotValue != null)
                        {
                            //if it is a novel
                            if (!byPivotCategoryCubesOverTime.ContainsKey(pivotValue))
                            {
                                //we will need to expand the dictionaries
                                if (byPivotCategoryCubesOverTime.Keys.Count > MaximumPivotValues)
                                {
                                    throw new OverflowException(
                                              "Encountered more than " + MaximumPivotValues + " values for the pivot column " + _pivotCategory +
                                              " this will result in crazy space usage since it is a multiplicative scale of DQE tesseracts");
                                }

                                //expand both the time periodicity and the state results
                                byPivotRowStatesOverDataLoadRunId.Add(pivotValue,
                                                                      new DQEStateOverDataLoadRunId(pivotValue));
                                byPivotCategoryCubesOverTime.Add(pivotValue, new PeriodicityCubesOverTime(pivotValue));
                            }

                            //now we are sure that the dictionaries have the category field we can increment it
                            ProcessRecord(dqeRepository, dataLoadRunIDOfCurrentRecord, r,
                                          byPivotCategoryCubesOverTime[pivotValue], byPivotRowStatesOverDataLoadRunId[pivotValue]);
                        }

                        if (progress % 5000 == 0)
                        {
                            forker.OnProgress(this,
                                              new ProgressEventArgs("Processing " + _catalogue,
                                                                    new ProgressMeasurement(progress, ProgressType.Records), sw.Elapsed));
                        }
                    }
                    //final value
                    forker.OnProgress(this,
                                      new ProgressEventArgs("Processing " + _catalogue,
                                                            new ProgressMeasurement(progress, ProgressType.Records), sw.Elapsed));
                    con.Close();
                }
                sw.Stop();

                foreach (var state in byPivotRowStatesOverDataLoadRunId.Values)
                {
                    state.CalculateFinalValues();
                }

                //now commit results
                using (var con = dqeRepository.BeginNewTransactedConnection())
                {
                    try
                    {
                        //mark down that we are beginning an evaluation on this the day of our lord etc...
                        Evaluation evaluation = new Evaluation(dqeRepository, _catalogue);

                        foreach (var state in byPivotRowStatesOverDataLoadRunId.Values)
                        {
                            state.CommitToDatabase(evaluation, _catalogue, con.Connection, con.Transaction);
                        }

                        if (_timePeriodicityField != null)
                        {
                            foreach (PeriodicityCubesOverTime periodicity in byPivotCategoryCubesOverTime.Values)
                            {
                                periodicity.CommitToDatabase(evaluation);
                            }
                        }

                        con.ManagedTransaction.CommitAndCloseConnection();
                    }
                    catch (Exception)
                    {
                        con.ManagedTransaction.AbandonAndCloseConnection();
                        throw;
                    }
                }

                forker.OnNotify(this,
                                new NotifyEventArgs(ProgressEventType.Information,
                                                    "CatalogueConstraintReport completed successfully  and committed results to DQE server"));
            }
            catch (Exception e)
            {
                if (!(e is OperationCanceledException))
                {
                    forker.OnNotify(this, new NotifyEventArgs(ProgressEventType.Error, "Fatal Crash", e));
                }
                else
                {
                    forker.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "DQE Execution Cancelled", e));
                }
            }
            finally
            {
                toDatabaseLogger.FinalizeTableLoadInfos();
            }
        }