public NotificationImportService(INotificationMapper notificationMapper, INotificationRepository notificationRepository, INotificationImportRepository notificationImportRepository, IImportLogger logger, IHub sentryHub, IMigratedNotificationsMarker migratedNotificationsMarker, ISpecimenImportService specimenImportService, IImportValidator importValidator, IClusterImportService clusterImportService, ICultureAndResistanceService cultureAndResistanceService, IDrugResistanceProfileService drugResistanceProfileService, ICaseManagerImportService caseManagerImportService) { sentryHub.ConfigureScope(s => { s.SetTag("context", "migration"); }); _notificationMapper = notificationMapper; _notificationRepository = notificationRepository; _notificationImportRepository = notificationImportRepository; _logger = logger; _migratedNotificationsMarker = migratedNotificationsMarker; _specimenImportService = specimenImportService; _importValidator = importValidator; _clusterImportService = clusterImportService; _cultureAndResistanceService = cultureAndResistanceService; _drugResistanceProfileService = drugResistanceProfileService; _caseManagerImportService = caseManagerImportService; }
public NotificationMapperTest() { _referenceDataRepositoryMock.Setup(repo => repo.GetTbServiceFromHospitalIdAsync(It.IsAny <Guid>())) .Returns((Guid guid) => Task.FromResult(_hospitalToTbServiceCodeDict[guid])); _referenceDataRepositoryMock.Setup(repo => repo.GetTreatmentOutcomeForTypeAndSubType( TreatmentOutcomeType.Died, TreatmentOutcomeSubType.Unknown)) .ReturnsAsync(new TreatmentOutcome { TreatmentOutcomeType = TreatmentOutcomeType.Died, TreatmentOutcomeSubType = TreatmentOutcomeSubType.Unknown }); _postcodeService.Setup(service => service.FindPostcodeAsync(It.IsAny <string>())) .ReturnsAsync((string postcode) => new PostcodeLookup { Postcode = postcode.Replace(" ", "").ToUpper() }); // Needs to happen after the mocking, as the constructor uses a method from reference data repo var importLogger = new ImportLogger(); _notificationMapper = new NotificationMapper( _migrationRepository, _referenceDataRepositoryMock.Object, importLogger, _postcodeService.Object); _importValidator = new ImportValidator(importLogger, _referenceDataRepositoryMock.Object); }
public static async Task <HttpResponseMessage> Run( [HttpTrigger(AuthorizationLevel.Anonymous, "get", "post", Route = null)] HttpRequestMessage req, [Inject] IImportValidator importValidator, [Inject] ILogger <IImportValidator> logger) { logger.LogInformation("Azure Function ValidateSourceData processed a request."); try { string failureInfo; bool isDataSourceValid = importValidator.TryValidateDataSource(out failureInfo); if (!isDataSourceValid) { logger.LogWarning("Enlir Import Data not in Expected Format: \n" + failureInfo); throw new Exception("Enlir Import Data not in Expected Format: \n" + failureInfo); } var response = req.CreateResponse(HttpStatusCode.OK, true); return(response); } catch (System.Exception ex) { logger.LogError(ex, $"Error in Azure Function ValidateSourceData : {ex.Message}"); var response = req.CreateResponse(HttpStatusCode.InternalServerError, ex.Message); return(response); } }
public FileSystemSource(ImportSettings settings, IImportValidator validator) { FileSystemSource.log.Info((object)("Initializing data source: File system: " + settings.SourceFolder)); this.m_settings = settings; this.m_validator = validator; this.MetaDataProvider = (IMetaDataProvider) new NullUserInformationManager(); this.m_filenameConverter = new FileNameConverter() { MaximumFileNameLenght = validator.MaximumFileNameLength, IllegalCharacters = validator.IllegalCharacters }; }
public NotificationMapperTest() { _caseManagerImportService .Setup(serv => serv.ImportOrUpdateLegacyUser(It.IsAny <string>(), It.IsAny <PerformContext>(), It.IsAny <int>())) .Returns(() => Task.CompletedTask); _referenceDataRepositoryMock.Setup(repo => repo.GetUserByUsernameAsync(It.IsAny <string>())) .Returns((string username) => Task.FromResult(_usernameToUserDict[username])); _referenceDataRepositoryMock.Setup(repo => repo.GetTbServiceFromHospitalIdAsync(It.IsAny <Guid>())) .Returns((Guid guid) => Task.FromResult(_hospitalToTbServiceCodeDict[guid])); _referenceDataRepositoryMock.Setup(repo => repo.GetAllTbServicesAsync()) .Returns(() => Task.FromResult <IList <TBService> >(new List <TBService> { new TBService { Code = WestonGeneralCode, PHECCode = PhecResult } })); _referenceDataRepositoryMock.Setup(repo => repo.GetAllTreatmentOutcomes()) .ReturnsAsync(ntbs_service.Models.SeedData.TreatmentOutcomes.GetTreatmentOutcomes().ToList); _referenceDataRepositoryMock.Setup(repo => repo.GetTreatmentOutcomeForTypeAndSubType(It.IsAny <TreatmentOutcomeType>(), It.IsAny <TreatmentOutcomeSubType>())) .ReturnsAsync(ntbs_service.Models.SeedData.TreatmentOutcomes.GetTreatmentOutcomes() .FirstOrDefault(o => o.TreatmentOutcomeType == TreatmentOutcomeType.Died && o.TreatmentOutcomeSubType == TreatmentOutcomeSubType.Unknown)); _postcodeService.Setup(service => service.FindPostcodeAsync(It.IsAny <string>())) .ReturnsAsync((string postcode) => new PostcodeLookup { Postcode = postcode.Replace(" ", "").ToUpper() }); // Needs to happen after the mocking, as the constructor uses a method from reference data repo _treatmentEventMapper = new TreatmentEventMapper(_caseManagerImportService.Object, _referenceDataRepositoryMock.Object); _notificationMapper = new NotificationMapper( _migrationRepository, _referenceDataRepositoryMock.Object, _importLoggerMock.Object, _postcodeService.Object, _caseManagerImportService.Object, _treatmentEventMapper); _importValidator = new ImportValidator(_importLoggerMock.Object, _referenceDataRepositoryMock.Object); }
public NotificationImportService(INotificationMapper notificationMapper, INotificationRepository notificationRepository, INotificationImportRepository notificationImportRepository, IImportLogger logger, IHub sentryHub, IMigrationRepository migrationRepository, IMigratedNotificationsMarker migratedNotificationsMarker, ISpecimenService specimenService, IImportValidator importValidator) { sentryHub.ConfigureScope(s => { s.SetTag("context", "migration"); }); _notificationMapper = notificationMapper; _notificationRepository = notificationRepository; _notificationImportRepository = notificationImportRepository; _logger = logger; _migrationRepository = migrationRepository; _migratedNotificationsMarker = migratedNotificationsMarker; _specimenService = specimenService; _importValidator = importValidator; }
public void Execute() { DocumentImporter.log.Info((object)"Configuring application"); DocumentImporter.log.Info((object)"Initializing Sharepoint Connection"); IImportDestination importDestination = new ImportDestinationFactory(this.m_settings).Create(); DocumentImporter.log.Info((object)"Initializing Validation"); IImportValidator validator = importDestination.GetValidator(); if (!validator.IsValid) { DocumentImporter.log.Error((object)("The document library " + this.m_settings.DocumentLibrary + " does not exist")); } else { IImportSource with = new FileSystemSourceFactory(this.m_settings).CreateWith(validator); PostImportFileProcessor importFileProcessor = new PostImportFileProcessor(this.m_settings); importDestination.ItemProcessed += (EventHandler <ItemProcessedEventArgs>)((s, args) => DocumentImporter.log.Info((object)("END Processing " + args.Item.OriginalFullName))); if (this.m_settings.MoveFiles) { importDestination.ItemProcessed += new EventHandler <ItemProcessedEventArgs>(importFileProcessor.MoveItem); } ImportItem importItem = with.LoadItems(); DocumentImporter.DisplayImportStatistics(importItem); if (this.m_settings.Mode == ImportMode.Execute) { DocumentImporter.log.Info((object)"Start Import"); importDestination.Import(importItem); DocumentImporter.log.Info((object)"Import finished"); } else { int mode = (int)this.m_settings.Mode; } } }
public IImportSource CreateWith(IImportValidator validator) { return((IImportSource) new FileSystemSource(this.m_settings, validator)); }
public void Run() { try { Stopwatch stopwatchFull = Stopwatch.StartNew(); _logger.LogInformation($"{nameof(Application)}.{nameof(Run)} execution invoked"); _importValidator = _serviceProvider.GetService <IImportValidator>(); _typeListValidator = _serviceProvider.GetService <ITypeListValidator>(); _importManager = _serviceProvider.GetService <IImportManager>(); _importStorageProvider = _serviceProvider.GetService <IImportStorageProvider>(); _transformManager = _serviceProvider.GetService <ITransformManager>(); _transformStorageProvider = _serviceProvider.GetService <ITransformStorageProvider>(); _mergeManager = _serviceProvider.GetService <IMergeManager>(); _mergeStorageProvider = _serviceProvider.GetService <IMergeStorageProvider>(); //uncomment below to actually run import and transform stages - file based string formattedDateString = DateTime.UtcNow.ToString(DateFormatSpecifier); //Import Stopwatch stopwatchImport = Stopwatch.StartNew(); string failureInfo; //Before we take the overhead of downloading all the import data, check that the data has the right overall structure bool isDataSourceValid = _importValidator.TryValidateDataSource(out failureInfo); if (!isDataSourceValid) { _logger.LogWarning("Enlir Import Data not in Expected Format: \n" + failureInfo); throw new ValidationException("Enlir Import Data not in Expected Format: \n" + failureInfo); } ImportResultsContainer importResultsContainer = _importManager.ImportAll(); string importStoragePath = _importStorageProvider.StoreImportResults(importResultsContainer, formattedDateString); stopwatchImport.Stop(); //cheat data setup for testing - comment out when doing full run for real //string importStoragePath = @"D:\Temp\FFRKApi\ImportResults-2018-12-21_09-48-46.json"; //string transformStoragePath = @"D:\Docs\Personal\FFRKLinqQuery\TransformResults-Latest.json"; //string formattedDateString = "2018-12-21_09-48-46"; //string importContents = File.ReadAllText(importStoragePath); //ImportResultsContainer importResultsContainer = JsonConvert.DeserializeObject<ImportResultsContainer>(importContents); ////Now that we have the import data, we need to check whether our TypeLists (used to convert staring data into ids) ////is still accurate. If the source data has changed their list of values for each type, we need to stop and correct the TypeLists IEnumerable <TypeListDifferences> typeListDifferences = _typeListValidator.TryValidateTypeLists(importResultsContainer); if (typeListDifferences.Any(t => t.IsIdListDifferentFromSource)) { _logger.LogWarning("Enlir TypeList Data differs from coded TypeLists."); //write validation failure data to log for easy perusal string typeListDifferencesLogPath = $"{AppContext.BaseDirectory}\\TypeListDifferencesLog.json"; string typeListDifferencesLogContents = JsonConvert.SerializeObject(typeListDifferences); File.WriteAllText(typeListDifferencesLogPath, typeListDifferencesLogContents); _logger.LogWarning("Enlir TypeList differences written to file: " + typeListDifferencesLogPath); throw new ValidationException("Enlir Type List Data differs from coded TypeLists"); } //Transform Stopwatch stopwatchTransform = Stopwatch.StartNew(); TransformResultsContainer transformResultsContainer = _transformManager.TransformAll(importStoragePath); string transformStoragePath = _transformStorageProvider.StoreTransformResults(transformResultsContainer, formattedDateString); stopwatchTransform.Stop(); //Merge Stopwatch stopwatchMerge = Stopwatch.StartNew(); MergeResultsContainer mergeResultsContainer = _mergeManager.MergeAll(transformStoragePath); string mergeStoragePath = _mergeStorageProvider.StoreMergeResults(mergeResultsContainer, formattedDateString); stopwatchMerge.Stop(); //test merge storage MergeResultsContainer testMergeResultsContainer = _mergeStorageProvider.RetrieveMergeResults(mergeStoragePath); stopwatchFull.Stop(); _logger.LogInformation("Import Completed in {ImportTime} seconds", stopwatchImport.Elapsed.Seconds); _logger.LogInformation("Transform Completed in {TransformTime} seconds", stopwatchTransform.Elapsed.Seconds); _logger.LogInformation("Merge Completed in {MergeTime} seconds", stopwatchMerge.Elapsed.Seconds); _logger.LogInformation("Full Run Completed in {FullRunTime} seconds", stopwatchFull.Elapsed.Seconds); int aggregateTime = stopwatchImport.Elapsed.Seconds + stopwatchMerge.Elapsed.Seconds + stopwatchFull.Elapsed.Seconds; _logger.LogInformation("Full Run Aggregate Time in {AggregateTime} seconds", aggregateTime); } catch (Exception ex) { _logger.LogError(ex, ex.Message); _logger.LogInformation("Error in Top Level Application execution. Validate, Import, Transform, and Merge operations were NOT successfully completed. Previously existing data is unchanged"); throw; } }
public ImportValidatorTest() { _importValidator = new ImportValidator(_importLoggerMock.Object, _referenceDataRepositoryMock.Object); }