public async Task TestFundingReportGeneration(string sourceFileName, string collectionName, int expectedZipEntryCount) { var dateTime = DateTime.UtcNow; var filename = $"10005752/1/ESF-2108 ESF (Round 2) Supplementary Data Funding Report {dateTime:yyyyMMdd-HHmmss}"; var supplementaryDataWrapper = new SupplementaryDataWrapper() { SupplementaryDataLooseModels = GetSupplementaryDataLooseModels(), SupplementaryDataModels = GetSupplementaryDataModels(), ValidErrorModels = new List <ValidationErrorModel>() }; Mock <IDateTimeProvider> dateTimeProviderMock = new Mock <IDateTimeProvider>(); dateTimeProviderMock.Setup(x => x.GetNowUtc()).Returns(dateTime); dateTimeProviderMock.Setup(x => x.ConvertUtcToUk(It.IsAny <DateTime>())).Returns(dateTime); var testStream = new MemoryStream(); var csvServiceMock = new Mock <ICsvFileService>(); csvServiceMock.Setup(x => x.WriteAsync <FundingReportModel, FundingReportMapper>(It.IsAny <List <FundingReportModel> >(), $"{filename}.csv", It.IsAny <string>(), It.IsAny <CancellationToken>(), null, null)) .Returns(Task.CompletedTask); Mock <IReferenceDataService> referenceDataService = new Mock <IReferenceDataService>(); referenceDataService.Setup(m => m.GetLarsVersion(It.IsAny <CancellationToken>())).ReturnsAsync("123456"); referenceDataService.Setup(m => m.GetOrganisationVersion(It.IsAny <CancellationToken>())).ReturnsAsync("234567"); referenceDataService.Setup(m => m.GetPostcodeVersion(It.IsAny <CancellationToken>())).ReturnsAsync("345678"); referenceDataService.Setup(m => m.GetProviderName(It.IsAny <int>(), It.IsAny <CancellationToken>())).Returns("Foo College"); referenceDataService.Setup(m => m.GetDeliverableUnitCosts(It.IsAny <string>(), It.IsAny <IList <string> >())) .Returns(new List <DeliverableUnitCost>()); Mock <IVersionInfo> versionInfo = new Mock <IVersionInfo>(); versionInfo.Setup(m => m.ServiceReleaseVersion).Returns("1.2.3.4"); var valueProvider = new ValueProvider(); var fundigReport = new FundingReport( dateTimeProviderMock.Object, csvServiceMock.Object, referenceDataService.Object); SourceFileModel sourceFile = GetEsfSourceFileModel(); sourceFile.FileName = sourceFileName; var esfJobContextMock = new Mock <IEsfJobContext>(); esfJobContextMock.Setup(x => x.UkPrn).Returns(10005752); esfJobContextMock.Setup(x => x.JobId).Returns(1); esfJobContextMock.Setup(x => x.BlobContainerName).Returns("TestContainer"); esfJobContextMock.Setup(x => x.CollectionYear).Returns(1819); esfJobContextMock.Setup(x => x.CollectionName).Returns(collectionName); await fundigReport.GenerateReport(esfJobContextMock.Object, sourceFile, supplementaryDataWrapper, CancellationToken.None); csvServiceMock.VerifyAll(); }
public async Task ExecuteTasks( IReadOnlyList <ITaskItem> tasks, SourceFileModel sourceFileModel, SupplementaryDataWrapper supplementaryDataWrapper, CancellationToken cancellationToken) { foreach (ITaskItem taskItem in tasks) { if (taskItem.SupportsParallelExecution) { Parallel.ForEach( taskItem.Tasks, new ParallelOptions { CancellationToken = cancellationToken }, async task => { await HandleTask(supplementaryDataWrapper, task, sourceFileModel, cancellationToken); }); } else { var subTasks = taskItem.Tasks; foreach (var task in subTasks) { if (cancellationToken.IsCancellationRequested) { break; } await HandleTask(supplementaryDataWrapper, task, sourceFileModel, cancellationToken); } } } }
public ContextFile(ContextProject projectContext, SourceFileModel fileModel) { ProjectContext = projectContext; FileModel = fileModel; ImportUseContext = new ContextImportUse(this); _KeyContext = FileModel.GeneratedClassName; }
private void InitPorjectModel(SourceProjectModel projectModel, string srcPath) { //var srcFileInfo = new FileInfo(srcPath); //var projectModel = new SourceProjectModel(); string srcFileTypeName = Path.GetFileNameWithoutExtension(srcPath); //projectModel.ProjectRootDirectoryInfo = srcFileInfo.Directory; projectModel.BinaryFileKind = PEFileKinds.Dll; //projectModel.BinarySaveDirectoryInfo = srcFileInfo.Directory; projectModel.ProjectPackageName = "ZLOGOEmit"; projectModel.EntryClassName = srcFileTypeName; projectModel.BinaryFileNameNoEx = srcFileTypeName; //projectModel.ProjectFileInfo = new ZCompileFileInfo( true, srcPath,null,null); //projectModel.AddRefPackage("Z语言系统"); projectModel.AddRefPackage("ZLogoEngine"); projectModel.AddRefDll(new FileInfo(Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "ZLogoEngine.dll"))); projectModel.NeedSave = true; //ZFileModel classModel = new ZFileModel(new ZCompileFileInfo(false, srcPath, PreCode,null)); //projectModel.AddClass(classModel); //return projectModel; string sourceCode = File.ReadAllText(srcPath); SourceFileModel fileModel = new SourceFileModel(srcPath, srcPath, projectModel.EntryClassName, projectModel.ProjectPackageName, projectModel.EntryClassName, sourceCode, 1); fileModel.PreSourceCode = PreCode; projectModel.AddFile(fileModel); }
public async Task Execute( SourceFileModel sourceFile, SupplementaryDataWrapper supplementaryDataWrapper, CancellationToken cancellationToken) { await _reportingController.ProduceReports(supplementaryDataWrapper, sourceFile, cancellationToken); }
public FileAST Parse(SourceFileModel fileModel) { ContextFile fileContext = new ContextFile(this.projectContext, fileModel); List <LineTokenCollection> Tokens = Scan(fileContext, fileModel); //foreach (LineTokenCollection ltc in Tokens) //{ // Console.WriteLine(ltc); // foreach(LexToken tok in ltc.ToList()) // { // Console.Write(tok.Text+" "); // } // Console.WriteLine(); //} FileRawParser parser = new FileRawParser(); FileRaw fileRaw = parser.Parse(Tokens, fileContext); //FileMutilTypeRaw FileASTParser fileASTParser = new FileASTParser(); FileAST fileAST = fileASTParser.Parse(fileRaw, fileContext); //FileSource fileType = ParseSingleMutil(fileMutilType); //if (fileType != null) //{ // fileType.FileModel = fileModel; // fileType.ProjectContext = this.projectContext; //} //return fileType; return(fileAST); }
public static BoundSourceFile ToBoundSourceFile(SourceFileModel sourceFile) { Contract.Requires(sourceFile != null); Contract.Ensures(Contract.Result <BoundSourceFile>() != null); return(new BoundSourceFile { SourceFile = new SourceFile { Content = sourceFile.Content, Info = new SourceFileInfo { Language = sourceFile.Language, Path = sourceFile.Path, WebAddress = sourceFile.WebAddress, RepoRelativePath = sourceFile.RepoRelativePath } }, Uid = sourceFile.Uid, ProjectId = sourceFile.ProjectId, ExcludeFromSearch = sourceFile.ExcludeFromSearch, // TODO: Possible NRE, Spans could be null ClassificationSpans = IndexableListAdapter.GetSpanList(sourceFile.Classifications), Definitions = sourceFile.Definitions?.Select(s => ToObjectModel(s)).ToList() ?? new List <DefinitionSpan>(0), References = IndexableListAdapter.GetSpanList(sourceFile.References) ?? IndexableSpans.Empty <ReferenceSpan>() }); }
public async Task RunTasks( IJobContextMessage jobContextMessage, IReadOnlyList <ITaskItem> tasks, CancellationToken cancellationToken) { var wrapper = new SupplementaryDataWrapper(); var sourceFileModel = new SourceFileModel(); if (tasks.SelectMany(t => t.Tasks).Contains(Constants.ValidationTask)) { sourceFileModel = _fileHelper.GetSourceFileData(jobContextMessage); wrapper = await _fileValidationService.GetFile(sourceFileModel, cancellationToken); if (!wrapper.ValidErrorModels.Any()) { wrapper = _fileValidationService.RunFileValidators(sourceFileModel, wrapper); } if (wrapper.ValidErrorModels.Any()) { await _storageController.StoreValidationOnly(sourceFileModel, wrapper, cancellationToken); await _reportingController.FileLevelErrorReport(wrapper, sourceFileModel, cancellationToken); return; } } await _taskHelper.ExecuteTasks(tasks, sourceFileModel, wrapper, cancellationToken); }
public SupplementaryDataWrapper RunFileValidators( SourceFileModel sourceFileModel, SupplementaryDataWrapper wrapper) { foreach (var model in wrapper.SupplementaryDataLooseModels) { foreach (var validator in _validators) { if (validator.Execute(sourceFileModel, model)) { continue; } wrapper.ValidErrorModels.Add(new ValidationErrorModel { RuleName = validator.ErrorName, ErrorMessage = validator.ErrorMessage, IsWarning = false }); return(wrapper); } } return(wrapper); }
public async Task <SupplementaryDataWrapper> GetFile( SourceFileModel sourceFileModel, CancellationToken cancellationToken) { SupplementaryDataWrapper wrapper = new SupplementaryDataWrapper(); IList <SupplementaryDataLooseModel> esfRecords = new List <SupplementaryDataLooseModel>(); IList <ValidationErrorModel> errors = new List <ValidationErrorModel>(); try { esfRecords = await _fileHelper.GetESFRecords(sourceFileModel, cancellationToken); if (esfRecords == null || !esfRecords.Any()) { _logger.LogInfo("No ESF records to process"); } } catch (ValidationException ex) { _logger.LogError($"The file format is incorrect, key: {sourceFileModel.FileName}", ex); errors.Add(new ValidationErrorModel { RuleName = "Fileformat_01", ErrorMessage = "The file format is incorrect. Please check the field headers are as per the Guidance document.", IsWarning = false }); } wrapper.SupplementaryDataLooseModels = esfRecords; wrapper.ValidErrorModels = errors; return(wrapper); }
public Task Execute( SourceFileModel sourceFile, SupplementaryDataWrapper wrapper, CancellationToken cancellationToken) { _controller.ValidateData(wrapper, sourceFile, cancellationToken); return(Task.CompletedTask); }
public async Task <bool> StoreData( SourceFileModel sourceFile, SupplementaryDataWrapper wrapper, CancellationToken cancellationToken) { bool successfullyCommitted = false; using (SqlConnection connection = new SqlConnection(_dbConfiguration.ESFNonEFConnectionString)) { SqlTransaction transaction = null; try { await connection.OpenAsync(cancellationToken); if (cancellationToken.IsCancellationRequested) { return(false); } transaction = connection.BeginTransaction(); var ukPrn = Convert.ToInt32(sourceFile.UKPRN); var storeClear = new StoreClear(connection, transaction); await storeClear.ClearAsync(ukPrn, sourceFile.ConRefNumber, cancellationToken); int fileId = await _storeFileDetails.StoreAsync(connection, transaction, cancellationToken, sourceFile); await _storeValidation.StoreAsync(connection, transaction, fileId, wrapper.ValidErrorModels, cancellationToken); await _store.StoreAsync(connection, transaction, fileId, wrapper.SupplementaryDataModels, cancellationToken); transaction.Commit(); successfullyCommitted = true; } catch (Exception ex) { _logger.LogError("Failed to persist to DEDs", ex); } finally { if (!successfullyCommitted) { try { transaction?.Rollback(); } catch (Exception ex2) { _logger.LogError("Failed to rollback DEDs persist transaction", ex2); } } } } return(successfullyCommitted); }
private SourceFileModel readFileModel(string sourcFolder, string sourceName, string packageName) { string srcfileFullPath = Path.Combine(sourcFolder, sourceName); string className = Path.GetFileNameWithoutExtension(srcfileFullPath); string sourceCode = File.ReadAllText(srcfileFullPath); SourceFileModel fileModel = new SourceFileModel(sourceName, srcfileFullPath, className, packageName, className, sourceCode, 1); return(fileModel); }
public void Setup() { mem = new Memory(); src = new SourceFileModel(); src.SourceFile = ""; fil = new FileService(); fil.CreateFileList(src); com = new ApplicationService(mem, src); }
public async Task Execute( SourceFileModel sourceFile, SupplementaryDataWrapper supplementaryDataWrapper, CancellationToken cancellationToken) { var success = await _storageController.StoreData(sourceFile, supplementaryDataWrapper, cancellationToken); if (!success) { _logger.LogError("Failed to save data to the data store."); } }
private void InitPorjectModel(SourceProjectModel projectModel, string srcPath) { projectModel.AddRefPackage("Z标准包"); projectModel.AddRefPackage("ZExcel开发包"); projectModel.ProjectPackageName = "ZLangSingleFile"; projectModel.BinaryFileKind = PEFileKinds.ConsoleApplication; projectModel.RefDllList = GetRefDllList(srcPath); string sourceCode = File.ReadAllText(srcPath); SourceFileModel fileModel = new SourceFileModel(srcPath, srcPath, projectModel.EntryClassName, projectModel.ProjectPackageName, projectModel.EntryClassName, sourceCode, 1); projectModel.AddFile(fileModel); }
private FundingSummaryHeaderModel PopulateReportHeader( SourceFileModel sourceFile, IEnumerable <ILRFileDetailsModel> fileData, int ukPrn, CancellationToken cancellationToken) { var ukPrnRow = new List <string> { ukPrn.ToString(), string.Empty, string.Empty }; var contractReferenceNumberRow = new List <string> { sourceFile.ConRefNumber, string.Empty, string.Empty, "ILR File :" }; var supplementaryDataFileRow = new List <string> { sourceFile.FileName.Contains("/") ? sourceFile.FileName.Substring(sourceFile.FileName.IndexOf("/", StringComparison.Ordinal) + 1) : sourceFile.FileName, string.Empty, string.Empty, "Last ILR File Update :" }; var lastSupplementaryDataFileUpdateRow = new List <string> { sourceFile.SuppliedDate?.ToString("dd/MM/yyyy hh:mm:ss"), string.Empty, string.Empty, "File Preparation Date :" }; foreach (var model in fileData) { var preparationDate = FileNameHelper.GetPreparedDateFromILRFileName(model.FileName); var secondYear = FileNameHelper.GetSecondYearFromReportYear(model.Year); ukPrnRow.Add(string.Empty); ukPrnRow.Add($"{model.Year}/{secondYear}"); contractReferenceNumberRow.Add(model.FileName.Substring(model.FileName.Contains("/") ? model.FileName.IndexOf("/", StringComparison.Ordinal) + 1 : 0)); contractReferenceNumberRow.Add(string.Empty); supplementaryDataFileRow.Add(model.LastSubmission?.ToString("dd/MM/yyyy hh:mm:ss")); supplementaryDataFileRow.Add(string.Empty); lastSupplementaryDataFileUpdateRow.Add(preparationDate); lastSupplementaryDataFileUpdateRow.Add(string.Empty); } var header = new FundingSummaryHeaderModel { ProviderName = _referenceDataCache.GetProviderName(ukPrn, cancellationToken), Ukprn = ukPrnRow.ToArray(), ContractReferenceNumber = contractReferenceNumberRow.ToArray(), SupplementaryDataFile = supplementaryDataFileRow.ToArray(), LastSupplementaryDataFileUpdate = lastSupplementaryDataFileUpdateRow.ToArray() }; return(header); }
public async Task GenerateReport( SourceFileModel sourceFile, SupplementaryDataWrapper wrapper, ZipArchive archive, CancellationToken cancellationToken) { string csv = GetCsv(wrapper.ValidErrorModels); string externalFileName = GetExternalFilename(sourceFile.UKPRN, sourceFile.JobId ?? 0, sourceFile.SuppliedDate ?? DateTime.MinValue); string fileName = GetFilename(sourceFile.UKPRN, sourceFile.JobId ?? 0, sourceFile.SuppliedDate ?? DateTime.MinValue); await _storage.SaveAsync($"{externalFileName}.csv", csv, cancellationToken); await WriteZipEntry(archive, $"{fileName}.csv", csv); }
public async Task TestAimAndDeliverableReportGeneration() { var dateTime = DateTime.UtcNow; var filename = $"10005752/2/ESF Round 2 Aim and Deliverable Report Legacy {dateTime:yyyyMMdd-HHmmss}"; var dateTimeProviderMock = new Mock <IDateTimeProvider>(); dateTimeProviderMock.Setup(x => x.GetNowUtc()).Returns(dateTime); dateTimeProviderMock.Setup(x => x.ConvertUtcToUk(It.IsAny <DateTime>())).Returns(dateTime); var models = AimAndDeliverableBuilder.BuildAimAndDeliverableModels(); var aimAndDeliverableService1819Mock = new Mock <IAimAndDeliverableService1819>(); aimAndDeliverableService1819Mock .Setup(m => m.GetAimAndDeliverableModel(It.IsAny <int>(), It.IsAny <CancellationToken>())) .ReturnsAsync(models); var aimAndDeliverableService1920Mock = new Mock <IAimAndDeliverableService1920>(); aimAndDeliverableService1920Mock .Setup(m => m.GetAimAndDeliverableModel(It.IsAny <int>(), It.IsAny <CancellationToken>())) .ReturnsAsync(models); var csvServiceMock = new Mock <ICsvFileService>(); csvServiceMock.Setup(x => x.WriteAsync <AimAndDeliverableModel, AimAndDeliverableMapper>(models, $"{filename}.csv", It.IsAny <string>(), It.IsAny <CancellationToken>(), null, null)) .Returns(Task.CompletedTask); var aimAndDeliverableReport = new AimAndDeliverableReport( dateTimeProviderMock.Object, csvServiceMock.Object, aimAndDeliverableService1819Mock.Object, aimAndDeliverableService1920Mock.Object); var esfJobContextMock = new Mock <IEsfJobContext>(); esfJobContextMock.Setup(x => x.UkPrn).Returns(10005752); esfJobContextMock.Setup(x => x.JobId).Returns(2); esfJobContextMock.Setup(x => x.BlobContainerName).Returns("TestBlob"); esfJobContextMock.Setup(x => x.CollectionYear).Returns(1819); SourceFileModel sourceFile = GetEsfSourceFileModel(); var reportName = await aimAndDeliverableReport.GenerateReport(esfJobContextMock.Object, sourceFile, null, CancellationToken.None); csvServiceMock.VerifyAll(); }
public async Task GenerateReport( SourceFileModel sourceFile, SupplementaryDataWrapper wrapper, ZipArchive archive, CancellationToken cancellationToken) { var report = GetValidationReport(wrapper.SupplementaryDataModels, wrapper.ValidErrorModels); var fileName = GetFilename(sourceFile.UKPRN, sourceFile.JobId ?? 0, sourceFile.SuppliedDate ?? DateTime.MinValue); var externalFilename = GetExternalFilename(sourceFile.UKPRN, sourceFile.JobId ?? 0, sourceFile.SuppliedDate ?? DateTime.MinValue); var json = _jsonSerializationService.Serialize(report); await SaveJson(externalFilename, json, cancellationToken); await WriteZipEntry(archive, $"{fileName}.json", json); }
public void AddSourceFile(SourceFileModel sourceFile) { AddParagraph($"File: {sourceFile.RelativePath}", _fileHeaderFont); AddParagraph($"MD5 Checksum: {sourceFile.Checksum}", _fileHeaderFont); var lines = sourceFile.Lines.ToList(); for (var idx = 0; idx < lines.Count; idx++) { var line = lines[idx]; var processed = FormatSourceLine(idx, line); AddParagraph(processed, _sourceLineFont); } AddBlankLine(); }
public void Create() { //TODO: set this to timer port when it is implemented PortA = new Port(); PortB = new Port(); RAM = new RAMModel(PortA, PortB); PCStack = new ObservableStack <short>(new Stack <short>(MemoryConstants.PC_STACK_CAPACITY)); Memory = new MemoryService(RAM, PCStack); SourceFile = new SourceFileModel(); FileService = new FileService(); DialogService = new DialogService(); OperationHelpers = new OperationHelpers(Memory); BitOperations = new BitOperations(Memory); ByteOperations = new ByteOperations(Memory); LiteralControlOperations = new LiteralControlOperations(Memory); ApplicationService = new ApplicationService(Memory, SourceFile, OperationHelpers, BitOperations, ByteOperations, LiteralControlOperations); }
private async Task <int> StoreAsync( SourceFileModel sourceFile, CancellationToken cancellationToken) { string insertFileDetails = "INSERT INTO [dbo].[SourceFile] ([ConRefNumber], [UKPRN], [Filename], [DateTime], [FilePreparationDate]) output INSERTED.SourceFileId VALUES ('" + $"{sourceFile.ConRefNumber}', '{sourceFile.UKPRN}', '{sourceFile.FileName}', " + $"'{sourceFile.SuppliedDate?.ToString("yyyy-MM-dd HH:mm:ss")}', '{sourceFile.PreparationDate:yyyy-MM-dd HH:mm:ss}')"; if (cancellationToken.IsCancellationRequested) { return(0); } using (var sqlCommand = new SqlCommand(insertFileDetails, _sqlConnection, _sqlTransaction)) { return((int)await sqlCommand.ExecuteScalarAsync(cancellationToken)); } }
private async Task HandleTask( SupplementaryDataWrapper wrapper, string task, SourceFileModel sourceFile, CancellationToken cancellationToken) { var orderedHandlers = _taskHandlers.OrderBy(t => t.Order); foreach (var handler in orderedHandlers) { if (!handler.IsMatch(task)) { continue; } await handler.Execute(sourceFile, wrapper, cancellationToken); break; } }
public FundingSummaryReportHeaderModel PopulateReportHeader( SourceFileModel sourceFile, IEnumerable <ILRFileDetails> ilrFileData, int ukPrn, string providerName, string conRefNumber, int collectionYear, int baseIlrYear, IDictionary <int, string> academicYearDictionary) { var ilrFileDetailModels = BuildIlrFileDetailModelsForYears(collectionYear, baseIlrYear, academicYearDictionary); var lastSupplementaryDataFileUpdateUk = sourceFile?.SuppliedDate.HasValue ?? false?_dateTimeProvider.ConvertUtcToUk(sourceFile.SuppliedDate.Value) : (DateTime?)null; foreach (var model in ilrFileDetailModels) { var ilrData = ilrFileData?.Where(x => x?.Year == model.Year).FirstOrDefault(); if (ilrData != null) { var lastIlrFileUpdateUk = ilrData.LastSubmission.HasValue ? _dateTimeProvider.ConvertUtcToUk(ilrData.LastSubmission.Value) : (DateTime?)null; model.IlrFile = !string.IsNullOrWhiteSpace(ilrData?.FileName) ? Path.GetFileName(ilrData?.FileName) : null; model.FilePrepDate = ilrData?.FilePreparationDate?.ToString(ReportingConstants.ShortDateFormat); model.LastIlrFileUpdate = lastIlrFileUpdateUk?.ToString(ReportingConstants.LongDateFormat); } } var header = new FundingSummaryReportHeaderModel { Ukprn = ukPrn.ToString(), ProviderName = providerName, ContractReferenceNumber = conRefNumber, SecurityClassification = ReportingConstants.Classification, SupplementaryDataFile = !string.IsNullOrWhiteSpace(sourceFile?.FileName) ? Path.GetFileName(sourceFile?.FileName) : null, LastSupplementaryDataFileUpdate = lastSupplementaryDataFileUpdateUk?.ToString(ReportingConstants.LongDateFormat), IlrFileDetails = ilrFileDetailModels }; return(header); }
public async Task ProduceReports( SupplementaryDataWrapper wrapper, SourceFileModel sourceFile, CancellationToken cancellationToken) { _logger.LogInfo("ESF Reporting service called"); if (!wrapper.SupplementaryDataModels.Any() && string.IsNullOrEmpty(sourceFile.FileName)) { // todo ... get data from ESF database, only received reporting task } using (var memoryStream = new MemoryStream()) { using (var archive = new ZipArchive(memoryStream, ZipArchiveMode.Create, true)) { if (cancellationToken.IsCancellationRequested) { return; } foreach (var validationReport in _validationReports) { await validationReport.GenerateReport(sourceFile, wrapper, archive, cancellationToken); } foreach (var report in _esfReports) { await report.GenerateReport(wrapper, sourceFile, archive, cancellationToken); } if (cancellationToken.IsCancellationRequested) { return; } } await _streamableKeyValuePersistenceService.SaveAsync($"{sourceFile.UKPRN}_{sourceFile.JobId}_Reports.zip", memoryStream, cancellationToken); } }
public List <LineTokenCollection> Scan(ContextFile fileContext, SourceFileModel fileModel) { List <LineTokenCollection> Tokens = new List <LineTokenCollection>(); if (!string.IsNullOrWhiteSpace(fileModel.PreSourceCode)) { List <LineTokenCollection> preTokens = ScanTextCode(fileModel.PreSourceCode, fileContext, fileModel.PreSourceStartLine); Tokens.AddRange(preTokens); } if (!string.IsNullOrWhiteSpace(fileModel.RealSourceCode)) { List <LineTokenCollection> realTokens = ScanTextCode(fileModel.RealSourceCode, fileContext, fileModel.RealSourceStartLine); Tokens.AddRange(realTokens); } if (!string.IsNullOrWhiteSpace(fileModel.BackSourceCode)) { List <LineTokenCollection> backTokens = ScanTextCode(fileModel.BackSourceCode, fileContext, fileModel.BackSourceStartLine); Tokens.AddRange(backTokens); } return(Tokens); }
public async Task GenerateReport( SupplementaryDataWrapper wrapper, SourceFileModel sourceFile, ZipArchive archive, CancellationToken cancellationToken) { var externalFileName = GetExternalFilename(sourceFile.UKPRN, sourceFile.JobId ?? 0, sourceFile.SuppliedDate ?? DateTime.MinValue); var fileName = GetFilename(sourceFile.UKPRN, sourceFile.JobId ?? 0, sourceFile.SuppliedDate ?? DateTime.MinValue); if (cancellationToken.IsCancellationRequested) { return; } var ukPrn = Convert.ToInt32(sourceFile.UKPRN); string csv = await GetCsv(ukPrn, cancellationToken); if (csv != null) { await _storage.SaveAsync($"{externalFileName}.csv", csv, cancellationToken); await WriteZipEntry(archive, $"{fileName}.csv", csv); } }
public async Task FileLevelErrorReport( SupplementaryDataWrapper wrapper, SourceFileModel sourceFile, CancellationToken cancellationToken) { if (cancellationToken.IsCancellationRequested) { return; } using (var memoryStream = new MemoryStream()) { using (var archive = new ZipArchive(memoryStream, ZipArchiveMode.Create, true)) { foreach (var validationReport in _validationReports) { await validationReport.GenerateReport(sourceFile, wrapper, archive, cancellationToken); } } await _streamableKeyValuePersistenceService.SaveAsync($"{sourceFile.UKPRN}_{sourceFile.JobId}_Reports.zip", memoryStream, cancellationToken); } }
public void ValidateData( SupplementaryDataWrapper wrapper, SourceFileModel sourceFile, CancellationToken cancellationToken) { foreach (var looseModel in wrapper.SupplementaryDataLooseModels) { if (_looseValidatorCommand.Execute(looseModel)) { continue; } foreach (var error in _looseValidatorCommand.Errors) { wrapper.ValidErrorModels.Add(error); } if (!_looseValidatorCommand.RejectFile) { continue; } return; } wrapper.SupplementaryDataLooseModels = FilterOutInvalidLooseRows(wrapper); wrapper.SupplementaryDataModels = wrapper.SupplementaryDataLooseModels.Select(m => _mapper.GetSupplementaryDataModelFromLooseModel(m)).ToList(); var allUlns = wrapper.SupplementaryDataModels.Select(m => m.ULN).ToList(); _populationService.PrePopulateUlnCache(allUlns, cancellationToken); var ukPrn = Convert.ToInt64(sourceFile.UKPRN); _populationService.PrePopulateContractAllocations(ukPrn, wrapper.SupplementaryDataModels, cancellationToken); foreach (var command in _validatorCommands) { if (command is ICrossRecordCommand) { ((ICrossRecordCommand)command).AllRecords = wrapper.SupplementaryDataModels; } foreach (var model in wrapper.SupplementaryDataModels) { if (command.Execute(model)) { continue; } foreach (var error in command.Errors) { wrapper.ValidErrorModels.Add(error); } if (!command.RejectFile) { continue; } RejectFile = true; return; } } wrapper.SupplementaryDataModels = FilterOutInvalidRows(wrapper); }