Пример #1
0
        protected virtual ICacheLayout CreateCacheLayout(ICacheProgress cacheProgress, IDataLoadEventListener listener)
        {
            var pipelineFactory = new CachingPipelineUseCase(cacheProgress);
            var destination     = pipelineFactory.CreateDestinationOnly(listener);

            return(destination.CreateCacheLayout());
        }
Пример #2
0
        private IDataFlowPipelineEngine CreateRetryCachingEngine(ICacheProgress cacheProgress)
        {
            var cachingPipelineEngineFactory = new CachingPipelineUseCase(cacheProgress, true, new FailedCacheFetchRequestProvider(cacheProgress));
            var engine = cachingPipelineEngineFactory.GetEngine(_listener);

            _engineMap.Add(engine, cacheProgress.LoadProgress);
            return(engine);
        }
Пример #3
0
        private IDataFlowPipelineEngine CreateCachingEngine(ICacheProgress cacheProgress)
        {
            var cachingPipelineEngineFactory = new CachingPipelineUseCase(cacheProgress);
            var engine = cachingPipelineEngineFactory.GetEngine(_listener);

            _engineMap.Add(engine, cacheProgress.LoadProgress);
            return(engine);
        }
Пример #4
0
        private void SetupPipelineUI()
        {
            if (_pipelineSelectionUI == null)
            {
                var user    = new PipelineUser(_cacheProgress);
                var useCase = CachingPipelineUseCase.DesignTime();

                var selectionFactory = new PipelineSelectionUIFactory(Activator.RepositoryLocator.CatalogueRepository, user, useCase);
                _pipelineSelectionUI = (Control)selectionFactory.Create("Cache Pipeline", DockStyle.Fill, pPipeline);
            }
        }
Пример #5
0
        public ICacheLayout CreateCacheLayout(ILoadProgress loadProgress, ILoadMetadata metadata)
        {
            AssertThatThereIsACacheDataProvider(metadata, metadata.ProcessTasks.Where(p=>!p.IsDisabled));

            var cp = loadProgress.CacheProgress;

            var factory = new CachingPipelineUseCase(cp);
            var destination = factory.CreateDestinationOnly(new ThrowImmediatelyDataLoadEventListener());

            return destination.CreateCacheLayout();
        }
Пример #6
0
        public Task Fetch(DateTime startDate, DateTime endDate, GracefulCancellationToken token, IDataLoadEventListener listener, bool ignorePermissionWindow = false)
        {
            var dateToRetrieve      = new DateTime(startDate.Year, startDate.Month, startDate.Day);
            var initialFetchRequest = new BackfillCacheFetchRequest(_catalogueRepository, dateToRetrieve)
            {
                CacheProgress = _cacheProgress,
                ChunkPeriod   = _cacheProgress.ChunkPeriod
            };

            var requestProvider = (startDate == endDate)
                ? (ICacheFetchRequestProvider) new SingleDayCacheFetchRequestProvider(initialFetchRequest)
                : new MultiDayCacheFetchRequestProvider(initialFetchRequest, endDate);

            var factory = new CachingPipelineUseCase(_cacheProgress, ignorePermissionWindow, requestProvider);

            var engine = factory.GetEngine(listener);

            return(Task.Factory.StartNew(() => engine.ExecutePipeline(token)));
        }
Пример #7
0
        /// <summary>
        /// Retrieves the destination component from the caching pipeline associated with the ICacheProgress object. The destination component is required to be an ICacheFileSystemDestination.
        /// </summary>
        /// <param name="cacheProgress"></param>
        /// <returns></returns>
        /// <exception cref="InvalidOperationException">Caching pipeline is not configured properly/doesn't exist</exception>
        private static ICacheFileSystemDestination GetCacheDestinationPipelineComponent(ICacheProgress cacheProgress)
        {
            if (cacheProgress.Pipeline_ID == null)
            {
                throw new InvalidOperationException("This CacheProgress does not have a caching pipeline, please configure one.");
            }

            var factory = new CachingPipelineUseCase(cacheProgress);
            ICacheFileSystemDestination destination;

            try
            {
                destination = factory.CreateDestinationOnly(new ThrowImmediatelyDataLoadEventListener());
            }
            catch (Exception e)
            {
                throw new Exception("We identified that your cache uses pipeline " + cacheProgress.Pipeline + " but we could not instantiate the Pipeline's Destination instance, make sure the pipeline is intact in PipelineDiagramUI.  See inner exception for details", e);
            }

            return(destination);
        }
Пример #8
0
        public DataExportChildProvider(IRDMPPlatformRepositoryServiceLocator repositoryLocator, IChildProvider[] pluginChildProviders, ICheckNotifier errorsCheckNotifier, DataExportChildProvider previousStateIfKnown) : base(repositoryLocator.CatalogueRepository, pluginChildProviders, errorsCheckNotifier, previousStateIfKnown)
        {
            BlackListedSources   = previousStateIfKnown?.BlackListedSources ?? new List <ExternalCohortTable>();
            _errorsCheckNotifier = errorsCheckNotifier;
            dataExportRepository = repositoryLocator.DataExportRepository;

            AllProjectAssociatedCics = GetAllObjects <ProjectCohortIdentificationConfigurationAssociation>(dataExportRepository);

            _cicAssociations = new HashSet <int>(AllProjectAssociatedCics.Select(a => a.CohortIdentificationConfiguration_ID));

            CohortSources       = GetAllObjects <ExternalCohortTable>(dataExportRepository);
            ExtractableDataSets = GetAllObjects <ExtractableDataSet>(dataExportRepository);

            AddToDictionaries(new HashSet <object>(AllCohortIdentificationConfigurations.Where(cic => _cicAssociations.Contains(cic.ID))), new DescendancyList(AllProjectCohortIdentificationConfigurationsNode));
            AddToDictionaries(new HashSet <object>(AllCohortIdentificationConfigurations.Where(cic => !_cicAssociations.Contains(cic.ID))), new DescendancyList(AllFreeCohortIdentificationConfigurationsNode));

            _selectedDataSetsWithNoIsExtractionIdentifier = new HashSet <ISelectedDataSets>(dataExportRepository.GetSelectedDatasetsWithNoExtractionIdentifiers());

            SelectedDataSets = GetAllObjects <SelectedDataSets>(dataExportRepository);
            ReportProgress("Fetching data export objects");

            var dsDictionary = ExtractableDataSets.ToDictionary(ds => ds.ID, d => d);

            foreach (SelectedDataSets s in SelectedDataSets)
            {
                s.InjectKnown(dsDictionary[s.ExtractableDataSet_ID]);
            }

            ReportProgress("Injecting SelectedDataSets");

            //This means that the ToString method in ExtractableDataSet doesn't need to go lookup catalogue info
            var catalogueIdDict = AllCatalogues.ToDictionary(c => c.ID, c2 => c2);

            foreach (ExtractableDataSet ds in ExtractableDataSets)
            {
                if (catalogueIdDict.TryGetValue(ds.Catalogue_ID, out Catalogue cata))
                {
                    ds.InjectKnown(cata);
                }
            }

            ReportProgress("Injecting ExtractableDataSet");

            AllPackages = GetAllObjects <ExtractableDataSetPackage>(dataExportRepository);

            Projects = GetAllObjects <Project>(dataExportRepository);
            ExtractionConfigurations = GetAllObjects <ExtractionConfiguration>(dataExportRepository);

            ReportProgress("Get Projects and Configurations");

            ExtractionConfigurationsByProject = ExtractionConfigurations.GroupBy(k => k.Project_ID).ToDictionary(gdc => gdc.Key, gdc => gdc.ToList());

            ReportProgress("Grouping Extractions by Project");

            AllGlobalExtractionFilterParameters = GetAllObjects <GlobalExtractionFilterParameter>(dataExportRepository);

            AllContainers = GetAllObjects <FilterContainer>(dataExportRepository).ToDictionary(o => o.ID, o => o);
            AllDeployedExtractionFilters = GetAllObjects <DeployedExtractionFilter>(dataExportRepository);
            _allParameters = GetAllObjects <DeployedExtractionFilterParameter>(dataExportRepository);

            ReportProgress("Getting Filters");

            //if we are using a database repository then we can make use of the caching class DataExportFilterManagerFromChildProvider to speed up
            //filter contents
            var dbRepo = dataExportRepository as DataExportRepository;

            _dataExportFilterManager = dbRepo == null ? dataExportRepository.FilterManager : new DataExportFilterManagerFromChildProvider(dbRepo, this);

            ReportProgress("Building FilterManager");

            Cohorts            = GetAllObjects <ExtractableCohort>(dataExportRepository);
            _cohortsByOriginId = new Dictionary <int, HashSet <ExtractableCohort> >();

            foreach (ExtractableCohort c in Cohorts)
            {
                if (!_cohortsByOriginId.ContainsKey(c.OriginID))
                {
                    _cohortsByOriginId.Add(c.OriginID, new HashSet <ExtractableCohort>());
                }

                _cohortsByOriginId[c.OriginID].Add(c);
            }

            _configurationToDatasetMapping = new Dictionary <ExtractionConfiguration, List <SelectedDataSets> >();

            ReportProgress("Fetching Cohorts");

            GetCohortAvailability();

            ReportProgress("GetCohortAvailability");

            var configToSds = SelectedDataSets.GroupBy(k => k.ExtractionConfiguration_ID).ToDictionary(gdc => gdc.Key, gdc => gdc.ToList());

            foreach (ExtractionConfiguration configuration in ExtractionConfigurations)
            {
                if (configToSds.TryGetValue(configuration.ID, out List <SelectedDataSets> result))
                {
                    _configurationToDatasetMapping.Add(configuration, result);
                }
            }

            ReportProgress("Mapping configurations to datasets");

            RootCohortsNode = new AllCohortsNode();
            AddChildren(RootCohortsNode, new DescendancyList(RootCohortsNode));

            foreach (ExtractableDataSetPackage package in AllPackages)
            {
                AddChildren(package, new DescendancyList(package));
            }

            ReportProgress("Packages and Cohorts");

            foreach (Project p in Projects)
            {
                AddChildren(p, new DescendancyList(p));
            }

            ReportProgress("Projects");

            //work out all the Catalogues that are extractable (Catalogues are extractable if there is an ExtractableDataSet with the Catalogue_ID that matches them)
            var cataToEds = new Dictionary <int, ExtractableDataSet>(ExtractableDataSets.ToDictionary(k => k.Catalogue_ID));

            //inject extractability into Catalogues
            foreach (Catalogue catalogue in AllCatalogues)
            {
                if (cataToEds.TryGetValue(catalogue.ID, out ExtractableDataSet result))
                {
                    catalogue.InjectKnown(result.GetCatalogueExtractabilityStatus());
                }
                else
                {
                    catalogue.InjectKnown(new CatalogueExtractabilityStatus(false, false));
                }
            }

            ReportProgress("Catalogue extractability injection");

            try
            {
                AddPipelineUseCases(new Dictionary <string, PipelineUseCase>
                {
                    { "File Import", UploadFileUseCase.DesignTime() },
                    { "Extraction", ExtractionPipelineUseCase.DesignTime() },
                    { "Release", ReleaseUseCase.DesignTime() },
                    { "Cohort Creation", CohortCreationRequest.DesignTime() },
                    { "Caching", CachingPipelineUseCase.DesignTime() },
                    { "Aggregate Committing", CreateTableFromAggregateUseCase.DesignTime(repositoryLocator.CatalogueRepository) }
                });
            }
            catch (Exception ex)
            {
                _errorsCheckNotifier.OnCheckPerformed(new CheckEventArgs("Failed to build DesignTime PipelineUseCases", CheckResult.Fail, ex));
            }

            ReportProgress("Pipeline adding");
        }
        private IDataFlowPipelineEngine CreateCachingEngine(ICacheProgress cacheProgress)
        {
            var cachingPipelineEngineFactory = new CachingPipelineUseCase(cacheProgress);

            return(cachingPipelineEngineFactory.GetEngine(_listener));
        }
Пример #10
0
        private void FetchCacheData(ICheckNotifier notifier)
        {
            if (_cacheProgress != null)
            {
                DateTime[] availableFiles;

                try
                {
                    var cacheFileSystem = new CachingPipelineUseCase(_cacheProgress).CreateDestinationOnly(new FromCheckNotifierToDataLoadEventListener(notifier));

                    var layout = cacheFileSystem.CreateCacheLayout();
                    availableFiles    = layout.GetSortedDateQueue(new ThrowImmediatelyDataLoadEventListener()).ToArray();
                    ResolvedCachePath = layout.GetLoadCacheDirectory(new FromCheckNotifierToDataLoadEventListener(notifier));
                }
                catch (Exception e)
                {
                    throw new Exception(
                              "Failed to generate cache layout/population information because the CacheProgress does not have a stable/working Pipeline Destination.  See Inner Exception for specifics",
                              e);
                }

                CachePeriodictiyData = new DataTable();

                CachePeriodictiyData.Columns.Add("YearMonth");
                CachePeriodictiyData.Columns.Add("Year", typeof(int));
                CachePeriodictiyData.Columns.Add("Month", typeof(int));

                CachePeriodictiyData.Columns.Add("Fetch Failures", typeof(int));
                CachePeriodictiyData.Columns.Add("Files In Cache", typeof(int));

                var allFailures =
                    _cacheProgress.CacheFetchFailures
                    .Where(f => f.ResolvedOn == null)
                    .Select(f => f.FetchRequestStart)
                    .ToArray();
                Array.Sort(allFailures);

                bool anyFailures  = allFailures.Any();
                bool anySuccesses = availableFiles.Any();

                //Make sure main data table has room on it's X axis for the cache failures and loaded files
                if (anyFailures)
                {
                    ExtendXAxisTill(allFailures.Max());
                    ExtendXAxisBackwardsTill(allFailures.Min());
                }

                if (anySuccesses)
                {
                    ExtendXAxisTill(availableFiles.Max());
                    ExtendXAxisBackwardsTill(availableFiles.Min());
                }

                //now clone the data table but populate the axis with available/failures instead of
                foreach (DataRow originRow in CataloguesPeriodictiyData.Rows)
                {
                    int year  = Convert.ToInt32(originRow["Year"]);
                    int month = Convert.ToInt32(originRow["Month"]);

                    var newRow = CachePeriodictiyData.Rows.Add();

                    newRow["YearMonth"] = originRow["YearMonth"];
                    newRow["Year"]      = originRow["Year"];
                    newRow["Month"]     = originRow["Month"];

                    int totalFailuresForMonth = anyFailures
                        ? allFailures.Count(f => f.Year == year && f.Month == month)
                        : 0;
                    int totalAvailableForMonth = anySuccesses
                        ? availableFiles.Count(f => f.Year == year && f.Month == month)
                        : 0;

                    newRow["Fetch Failures"] = totalFailuresForMonth;
                    newRow["Files In Cache"] = totalAvailableForMonth;
                }
            }
            else
            {
                notifier.OnCheckPerformed(
                    new CheckEventArgs("There is no Cache configured for LoadProgress '" + _loadProgress + "' (Not nessesarily a problem e.g. if you have a RemoteTableAttacher or some other load module that uses LoadProgress directly, short cutting the need for a cache)",
                                       CheckResult.Warning));
            }
        }
Пример #11
0
        public void Check(ICheckNotifier notifier)
        {
            try
            {
                if (_cacheProgress.Pipeline_ID == null)
                {
                    throw new Exception("CacheProgress " + _cacheProgress.ID + " doesn't have a caching pipeline!");
                }

                IPipeline pipeline = null;
                try
                {
                    pipeline = _cacheProgress.Pipeline;
                }
                catch (Exception e)
                {
                    notifier.OnCheckPerformed(new CheckEventArgs("Error when trying to load Pipeline ID = " + _cacheProgress.Pipeline_ID.Value, CheckResult.Fail, e));
                }

                if (pipeline == null)
                {
                    notifier.OnCheckPerformed(new CheckEventArgs("Could not run Pipeline checks due to previous errors", CheckResult.Fail));
                }
                else
                {
                    var checker = new PipelineChecker(pipeline);
                    checker.Check(notifier);
                }

                if (_cacheProgress.CacheFillProgress == null && _cacheProgress.LoadProgress.OriginDate == null)
                {
                    //if we don't know what dates to request
                    notifier.OnCheckPerformed(
                        new CheckEventArgs(
                            "Both the CacheFillProgress and the LoadProgress.OriginDate are null, this means we don't know where the cache has filled up to and we don't know when the dataset is supposed to start.  This means it is impossible to know what dates to fetch",
                            CheckResult.Fail));
                }

                if (_cacheProgress.PermissionWindow_ID != null && !_cacheProgress.PermissionWindow.WithinPermissionWindow(DateTime.UtcNow))
                {
                    notifier.OnCheckPerformed(new CheckEventArgs(
                                                  "Current time is " + DateTime.UtcNow +
                                                  " which is not a permitted time according to the configured PermissionWindow " + _cacheProgress.PermissionWindow.Description +
                                                  " of the CacheProgress " + _cacheProgress,
                                                  CheckResult.Warning));
                }

                var shortfall = _cacheProgress.GetShortfall();

                if (shortfall <= TimeSpan.Zero)
                {
                    if (_cacheProgress.CacheLagPeriod == null)
                    {
                        notifier.OnCheckPerformed(
                            new CheckEventArgs(
                                "CacheProgress reports that it has loaded up till " + _cacheProgress.CacheFillProgress +
                                " which is in the future.  So we don't need to load this cache.", CheckResult.Warning));
                    }
                    else
                    {
                        notifier.OnCheckPerformed(
                            new CheckEventArgs(
                                "CacheProgress reports that it has loaded up till " + _cacheProgress.CacheFillProgress +
                                " but there is a lag period of " + _cacheProgress.CacheLagPeriod +
                                " which means we are not due to load any cached data yet.", CheckResult.Warning));
                    }
                }

                var factory = new CachingPipelineUseCase(_cacheProgress);
                IDataFlowPipelineEngine engine = null;
                try
                {
                    engine = factory.GetEngine(new FromCheckNotifierToDataLoadEventListener(notifier));
                }
                catch (Exception e)
                {
                    notifier.OnCheckPerformed(new CheckEventArgs("Could not create IDataFlowPipelineEngine", CheckResult.Fail, e));
                }

                if (engine != null)
                {
                    engine.Check(notifier);
                }
            }
            catch (Exception e)
            {
                notifier.OnCheckPerformed(
                    new CheckEventArgs(
                        "Entire checking process for cache progress " + _cacheProgress +
                        " crashed, see Exception for details", CheckResult.Fail, e));
            }
        }