public async Task ProcessedJobMessage(long jobId, Guid messageId, string messageName, List <GeneratedMessage> generatedMessages) { try { logger.LogVerbose($"Sending request to record successful processing of event. Job Id: {jobId}, Event: id: {messageId} "); var itemProcessedEvent = new RecordJobMessageProcessingStatus { JobId = jobId, Id = messageId, MessageName = messageName, EndTime = DateTimeOffset.UtcNow, GeneratedMessages = generatedMessages ?? new List <GeneratedMessage>(), Succeeded = true, }; var partitionedEndpointName = config.GetMonitoringEndpointName(jobId); await messageSession.Send(partitionedEndpointName, itemProcessedEvent).ConfigureAwait(false); logger.LogDebug($"Sent request to record successful processing of event. Job Id: {jobId}, Event: id: {messageId} "); } catch (Exception ex) { logger.LogWarning($"Failed to send the job status message. Job: {jobId}, Message: {messageId}, {messageName}, Error: {ex.Message}, {ex}"); } }
private async Task AuditDataCleanUp(Func <IList <SqlParameter>, string, string, Task> deleteAuditData, SubmissionJobsToBeDeletedBatch batch, string queueName) { try { var sqlParameters = batch.JobsToBeDeleted.ToSqlParameters(); var deleteMethodName = deleteAuditData.Method.Name; paymentLogger.LogInfo($"Started {deleteMethodName}"); var sqlParamName = string.Join(", ", sqlParameters.Select(pn => pn.ParameterName)); var paramValues = string.Join(", ", sqlParameters.Select(pn => pn.Value)); await deleteAuditData(sqlParameters, sqlParamName, paramValues); paymentLogger.LogInfo($"Finished {deleteMethodName}"); } catch (Exception e) { //we have already tried in single batch mode nothing more can be done here if (batch.JobsToBeDeleted.Length == 1) { paymentLogger.LogWarning($"Error Deleting Audit Data, internal Exception {e}"); throw; } //if SQL TimeOut or Dead-lock and we haven't already tried with single item Mode then try again with Batch Split into single items if (e.IsTimeOutException() || e.IsDeadLockException()) { paymentLogger.LogWarning($"Starting Audit Data Deletion in Single Item mode"); await SplitBatchAndEnqueueMessages(batch, queueName); } } }
public async Task <ReadOnlyCollection <PeriodisedRequiredPaymentEvent> > RefundLearningAim(IdentifiedRemovedLearningAim identifiedRemovedLearningAim, IDataCache <PaymentHistoryEntity[]> paymentHistoryCache, CancellationToken cancellationToken) { logger.LogDebug($"Now processing request to generate refunds for learning aim: learner: {identifiedRemovedLearningAim.Learner.ReferenceNumber}, Aim: {identifiedRemovedLearningAim.LearningAim.Reference}"); if (await duplicateEarningEventService.IsDuplicate(identifiedRemovedLearningAim, cancellationToken)) { logger.LogWarning($"Duplicate Identified Removed Learning Aim found for learner with JobId: {identifiedRemovedLearningAim.JobId}, " + $"Learner Ref Number: {identifiedRemovedLearningAim.Learner.ReferenceNumber}, Aim: {identifiedRemovedLearningAim.LearningAim.Reference}"); return(new List <PeriodisedRequiredPaymentEvent>().AsReadOnly()); } var cacheItem = await paymentHistoryCache.TryGet(CacheKeys.PaymentHistoryKey, cancellationToken) .ConfigureAwait(false); if (!cacheItem.HasValue) { throw new InvalidOperationException("No payment history found in the cache."); } var historicPayments = cacheItem.Value.Select(mapper.Map <PaymentHistoryEntity, Payment>).ToList(); logger.LogDebug($"Got {historicPayments.Count} historic payments. Now generating refunds per transaction type."); var requiredPaymentEvents = historicPayments.GroupBy(historicPayment => historicPayment.TransactionType) .SelectMany(group => CreateRefundPayments(identifiedRemovedLearningAim, group.ToList(), group.Key, cacheItem)) .ToList(); return(requiredPaymentEvents.AsReadOnly()); }
public async Task Handle(PeriodEndStoppedEvent message, IMessageHandlerContext context) { logger.LogInfo($"Processing Month End Period End Stopped Event with Message Id : {context.MessageId}"); var currentExecutionContext = (ESFA.DC.Logging.ExecutionContext)executionContext; currentExecutionContext.JobId = message.JobId.ToString(); await collectionPeriodStorageService.StoreCollectionPeriod(message); var commands = await completionPaymentService.GenerateProviderMonthEndAct1CompletionPaymentCommands(message).ConfigureAwait(false); if (!commands.Any()) { logger.LogWarning($"No Providers found with Act1 Completion payments for Collection Period: {message.CollectionPeriod.Period:00}-{message.CollectionPeriod.AcademicYear}, job: {message.JobId}"); return; } foreach (var command in commands) { logger.LogDebug($"Sending Process Provider Month End Act1 Completion Payment Command for provider: {command.Ukprn}"); await context.SendLocal(command).ConfigureAwait(false); } logger.LogInfo($"Successfully processed Period End Stopped Event for {message.CollectionPeriod.Period:00}-{message.CollectionPeriod.AcademicYear}, job: {message.JobId}"); }
public async Task <bool> WaitForJobToFinish(long jobId, CancellationToken cancellationToken) { //TODO: Temp brittle solution to wait for jobs to finish logger.LogDebug($"Waiting for job {jobId} to finish."); var endTime = DateTime.Now.Add(config.TimeToWaitForJobToComplete); while (DateTime.Now < endTime) { cancellationToken.ThrowIfCancellationRequested(); var job = await dataContext.GetJobByDcJobId(jobId).ConfigureAwait(false); if (job != null && (job.DataLocksCompletionTime != null || job.Status != Monitoring.Jobs.Model.JobStatus.InProgress)) { logger.LogInfo($"DC Job {jobId} finished. Status: {job.Status:G}. Finish time: {job.EndTime:G}"); return(true); } logger.LogVerbose($"DC Job {jobId} is still in progress"); await Task.Delay(config.TimeToPauseBetweenChecks); continue; } logger.LogWarning($"Waiting {config.TimeToWaitForJobToComplete} but Job {jobId} still not finished."); return(false); }
protected async Task HandleBulkCopy(CancellationToken cancellationToken, List <TEntity> list, SqlBulkCopy bulkCopy) { using (var reader = ObjectReader.Create(list)) { foreach (var columnMap in bulkCopyConfig.Columns) { bulkCopy.ColumnMappings.Add(columnMap.Key, columnMap.Value); } bulkCopy.BulkCopyTimeout = 0; bulkCopy.BatchSize = batchSize; bulkCopy.DestinationTableName = bulkCopyConfig.TableName; try { await bulkCopy.WriteToServerAsync(reader, cancellationToken).ConfigureAwait(false); } catch (SystemException) { logger.LogWarning("Error bulk writing to server. Processing single records."); await TrySingleRecord(bulkCopy, list, cancellationToken); } } logger.LogDebug($"Saved {list.Count} records of type {typeof(TEntity).Name}"); }
public async Task SaveLevyTransactionsIndividually(IList <LevyTransactionModel> levyTransactions, CancellationToken cancellationToken) { var mainContext = (FundingSourceDataContext)dataContextFactory.Create(); using (var mainTransaction = await mainContext.Database .BeginTransactionAsync(IsolationLevel.ReadUncommitted, cancellationToken) .ConfigureAwait(false)) { foreach (var model in levyTransactions) { try { model.Id = 0; var context = (FundingSourceDataContext)dataContextFactory.Create(mainTransaction.GetDbTransaction()); await context.LevyTransactions.AddAsync(model, cancellationToken).ConfigureAwait(false); await context.SaveChangesAsync(cancellationToken).ConfigureAwait(false); } catch (Exception e) { if (e.IsUniqueKeyConstraintException()) { logger.LogWarning($"Discarding duplicate LevyTransaction. JobId: {model.JobId}, Learn ref: {model.LearnerReferenceNumber}"); continue; } throw; } } mainTransaction.Commit(); } }
public async Task BuildMetrics(long ukprn, long jobId, short academicYear, byte collectionPeriod, CancellationToken cancellationToken) { try { logger.LogDebug($"Building metrics for job: {jobId}, provider: {ukprn}, Academic year: {academicYear}, Collection period: {collectionPeriod}"); var stopwatch = Stopwatch.StartNew(); var submissionSummary = submissionSummaryFactory.Create(ukprn, jobId, academicYear, collectionPeriod); var dcEarningsTask = dcMetricsDataContextFactory.CreateContext(academicYear).GetEarnings(ukprn, academicYear, collectionPeriod, cancellationToken); var dasEarningsTask = submissionRepository.GetDasEarnings(ukprn, jobId, cancellationToken); var dataLocksTask = submissionRepository.GetDataLockedEarnings(ukprn, jobId, cancellationToken); var dataLocksTotalTask = submissionRepository.GetDataLockedEarningsTotal(ukprn, jobId, cancellationToken); var dataLocksAlreadyPaid = submissionRepository.GetAlreadyPaidDataLockedEarnings(ukprn, jobId, cancellationToken); var requiredPaymentsTask = submissionRepository.GetRequiredPayments(ukprn, jobId, cancellationToken); var heldBackCompletionAmountsTask = submissionRepository.GetHeldBackCompletionPaymentsTotal(ukprn, jobId, cancellationToken); var yearToDateAmountsTask = submissionRepository.GetYearToDatePaymentsTotal(ukprn, academicYear, collectionPeriod, cancellationToken); var dataTask = Task.WhenAll(dcEarningsTask, dasEarningsTask, dataLocksTask, dataLocksTotalTask, dataLocksAlreadyPaid, requiredPaymentsTask, heldBackCompletionAmountsTask, yearToDateAmountsTask); var waitTask = Task.Delay(TimeSpan.FromSeconds(270), cancellationToken); Task.WaitAny(dataTask, waitTask); cancellationToken.ThrowIfCancellationRequested(); if (!dataTask.IsCompleted) { throw new InvalidOperationException($"Took too long to get data for the submission metrics. Ukprn: {ukprn}, job: {jobId}, Collection period: {collectionPeriod}"); } var dataDuration = stopwatch.ElapsedMilliseconds; logger.LogDebug($"finished getting data from databases for job: {jobId}, ukprn: {ukprn}. Took: {dataDuration}ms."); submissionSummary.AddEarnings(dcEarningsTask.Result, dasEarningsTask.Result); submissionSummary.AddDataLockTypeCounts(dataLocksTotalTask.Result, dataLocksTask.Result, dataLocksAlreadyPaid.Result); submissionSummary.AddRequiredPayments(requiredPaymentsTask.Result); submissionSummary.AddHeldBackCompletionPayments(heldBackCompletionAmountsTask.Result); submissionSummary.AddYearToDatePaymentTotals(yearToDateAmountsTask.Result); var metrics = submissionSummary.GetMetrics(); await submissionRepository.SaveSubmissionMetrics(metrics, cancellationToken); stopwatch.Stop(); SendMetricsTelemetry(metrics, stopwatch.ElapsedMilliseconds); logger.LogInfo($"Finished building metrics for submission job: {jobId}, provider: {ukprn}, Academic year: {academicYear}, Collection period: {collectionPeriod}. Took: {stopwatch.ElapsedMilliseconds}ms"); } catch (Exception e) { logger.LogWarning($"Error building the submission metrics report for job: {jobId}, ukprn: {ukprn}. Error: {e}"); throw; } }
public async Task StoreLevyTransactions(IList <CalculatedRequiredLevyAmount> calculatedRequiredLevyAmounts, CancellationToken cancellationToken, bool isReceiverTransferPayment = false) { logger.LogDebug($"Got {calculatedRequiredLevyAmounts.Count} levy transactions."); var models = calculatedRequiredLevyAmounts.Select(levyAmount => new LevyTransactionModel { CollectionPeriod = levyAmount.CollectionPeriod.Period, AcademicYear = levyAmount.CollectionPeriod.AcademicYear, JobId = levyAmount.JobId, Ukprn = levyAmount.Ukprn, Amount = levyAmount.AmountDue, EarningEventId = levyAmount.EarningEventId, DeliveryPeriod = levyAmount.DeliveryPeriod, AccountId = levyAmount.AccountId ?? 0, RequiredPaymentEventId = levyAmount.EventId, ClawbackSourcePaymentEventId = levyAmount.ClawbackSourcePaymentEventId ?? Guid.Empty, TransferSenderAccountId = levyAmount.TransferSenderAccountId, MessagePayload = levyAmount.ToJson(), MessageType = levyAmount.GetType().FullName, IlrSubmissionDateTime = levyAmount.IlrSubmissionDateTime, FundingAccountId = levyAmount.CalculateFundingAccountId(isReceiverTransferPayment), ApprenticeshipEmployerType = levyAmount.ApprenticeshipEmployerType, ApprenticeshipId = levyAmount.ApprenticeshipId, LearnerUln = levyAmount.Learner.Uln, LearnerReferenceNumber = levyAmount.Learner.ReferenceNumber, LearningAimFrameworkCode = levyAmount.LearningAim.FrameworkCode, LearningAimPathwayCode = levyAmount.LearningAim.PathwayCode, LearningAimFundingLineType = levyAmount.LearningAim.FundingLineType, LearningAimProgrammeType = levyAmount.LearningAim.ProgrammeType, LearningAimReference = levyAmount.LearningAim.Reference, LearningAimStandardCode = levyAmount.LearningAim.StandardCode, LearningStartDate = levyAmount.LearningStartDate, SfaContributionPercentage = levyAmount.SfaContributionPercentage, TransactionType = levyAmount.TransactionType }).ToList(); cancellationToken.ThrowIfCancellationRequested(); try { await levyTransactionRepository.SaveLevyTransactions(models, cancellationToken).ConfigureAwait(false); } catch (Exception e) { if (!e.IsUniqueKeyConstraintException() && !e.IsDeadLockException()) { throw; } logger.LogWarning($"Batch contained a duplicate LevyTransaction. Will store each individually and discard duplicate."); await levyTransactionRepository.SaveLevyTransactionsIndividually(models, cancellationToken); } logger.LogInfo($"Saved levy transactions to db. Duplicates skipped."); }
public async Task StartJob(long jobId, long ukprn, DateTime ilrSubmissionTime, short collectionYear, byte collectionPeriod, List <GeneratedMessage> generatedMessages, DateTimeOffset startTime) { logger.LogVerbose($"Sending request to record start of earnings job. Job Id: {jobId}, Ukprn: {ukprn}"); try { var batchSize = 1000; //TODO: this should come from config List <GeneratedMessage> batch; var providerEarningsEvent = new RecordEarningsJob { StartTime = startTime, JobId = jobId, Ukprn = ukprn, IlrSubmissionTime = ilrSubmissionTime, CollectionYear = collectionYear, CollectionPeriod = collectionPeriod, GeneratedMessages = generatedMessages.Take(batchSize).ToList(), LearnerCount = generatedMessages.Count }; var partitionedEndpointName = GetMonitoringEndpointForJob(jobId, ukprn); logger.LogVerbose($"Endpoint for RecordEarningsJob for Job Id {jobId} is `{partitionedEndpointName}`"); await messageSession.Send(partitionedEndpointName, providerEarningsEvent).ConfigureAwait(false); var skip = batchSize; while ((batch = generatedMessages.Skip(skip).Take(1000).ToList()).Count > 0) { skip += batchSize; var providerEarningsAdditionalMessages = new RecordJobAdditionalMessages { JobId = jobId, GeneratedMessages = batch, }; await messageSession.Send(partitionedEndpointName, providerEarningsAdditionalMessages).ConfigureAwait(false); } logger.LogDebug($"Sent request(s) to record start of earnings job. Job Id: {jobId}, Ukprn: {ukprn}"); } catch (Exception ex) { logger.LogWarning($"Failed to send the request to record the earnings job. Job: {jobId}, Error: {ex.Message}. {ex}"); throw; } }
private async Task Run() { await LoadExistingJobs().ConfigureAwait(false); while (!cancellationToken.IsCancellationRequested) { var tasks = currentJobs.Select(job => CheckJobStatus(job.Key)).ToList(); await Task.WhenAll(tasks).ConfigureAwait(false); var completedJobs = currentJobs.Where(item => item.Value != JobStatus.InProgress).ToList(); foreach (var completedJob in completedJobs) { if (!currentJobs.TryRemove(completedJob.Key, out _)) { logger.LogWarning($"Couldn't remove completed job from jobs list. JOb: {completedJob.Key}, status: {completedJob.Value}"); } } await Task.Delay(interval, cancellationToken).ConfigureAwait(false); } }
public async Task RecordEarningsJob(RecordEarningsJob message, CancellationToken cancellationToken) { try { using (var operation = telemetry.StartOperation("JobsService.RecordEarningsJob", message.CommandId.ToString())) { var stopwatch = Stopwatch.StartNew(); var service = lifetimeScope.Resolve <IEarningsJobService>(); await service.JobStarted(message, CancellationToken.None).ConfigureAwait(false); telemetry.TrackDuration("JobsService.RecordEarningsJob", stopwatch.Elapsed); telemetry.StopOperation(operation); } } catch (Exception e) { logger.LogWarning($"Error recording earning job. Job: {message.JobId}, ukprn: {message.Ukprn}, Error: {e.Message}. {e}"); throw; } }
public async Task ProcessedJobMessage(long jobId, Guid messageId, string messageName, List <GeneratedMessage> generatedMessages) { try { logger.LogVerbose($"Sending request to record successful processing of event. Job Id: {jobId}, Event: id: {messageId} "); var batchSize = 1000; //TODO: this should come from config List <GeneratedMessage> batch; var itemProcessedEvent = new RecordJobMessageProcessingStatus { JobId = jobId, Id = messageId, MessageName = messageName, EndTime = DateTimeOffset.UtcNow, GeneratedMessages = generatedMessages.Take(batchSize).ToList() ?? new List <GeneratedMessage>(), Succeeded = true, }; var partitionedEndpointName = config.GetMonitoringEndpointName(jobId); await messageSession.Send(partitionedEndpointName, itemProcessedEvent).ConfigureAwait(false); var skip = batchSize; while ((batch = generatedMessages.Skip(skip).Take(batchSize).ToList()).Count > 0) { skip += batchSize; var providerEarningsAdditionalMessages = new RecordJobAdditionalMessages { JobId = jobId, GeneratedMessages = batch, }; await messageSession.Send(partitionedEndpointName, providerEarningsAdditionalMessages).ConfigureAwait(false); } logger.LogDebug( $"Sent request to record successful processing of event. Job Id: {jobId}, Event: id: {messageId} "); } catch (Exception ex) { logger.LogWarning($"Failed to send the job status message. Job: {jobId}, Message: {messageId}, {messageName}, Error: {ex.Message}, {ex}"); } }
private async Task Run(string partitionEndpointName) { await LoadExistingJobs().ConfigureAwait(false); while (!cancellationToken.IsCancellationRequested) { var tasks = currentJobs.Select(job => CheckJobStatus(partitionEndpointName, job.Key)).ToList(); await Task.WhenAll(tasks).ConfigureAwait(false); var completedJobs = currentJobs.Where(item => item.Value).ToList(); foreach (var completedJob in completedJobs) { logger.LogInfo($"Found completed job. Will now stop monitoring job: {completedJob.Key}, ThreadId {Thread.CurrentThread.ManagedThreadId}, PartitionId {partitionEndpointName}"); if (!currentJobs.TryRemove(completedJob.Key, out _)) { logger.LogWarning($"Couldn't remove completed job from jobs list. ThreadId {Thread.CurrentThread.ManagedThreadId}, PartitionId {partitionEndpointName}, Job: {completedJob.Key}, status: {completedJob.Value}"); } } await Task.Delay(interval, cancellationToken).ConfigureAwait(false); } }
public async Task <IList <CalculatedRequiredLevyAmount> > GenerateClawbackForRemovedLearnerAim(IdentifiedRemovedLearningAim message, CancellationToken cancellationToken) { var learnerPaymentHistory = await paymentClawbackRepository.GetReadOnlyLearnerPaymentHistory( message.Ukprn, message.ContractType, message.Learner.ReferenceNumber, message.LearningAim.Reference, message.LearningAim.FrameworkCode, message.LearningAim.PathwayCode, message.LearningAim.ProgrammeType, message.LearningAim.StandardCode, message.CollectionPeriod.AcademicYear, message.CollectionPeriod.Period, cancellationToken).ConfigureAwait(false); if (!learnerPaymentHistory.Any() || learnerPaymentHistory.Sum(p => p.Amount) == 0) { logger.LogInfo("no previous payments or sum of all previous payments is already zero so no action required" + $"jobId:{message.JobId}, learnerRef:{message.Learner.ReferenceNumber}, frameworkCode:{message.LearningAim.FrameworkCode}, " + $"pathwayCode:{message.LearningAim.PathwayCode}, programmeType:{message.LearningAim.ProgrammeType}, " + $"standardCode:{message.LearningAim.StandardCode}, learningAimReference:{message.LearningAim.Reference}, " + $"academicYear:{message.CollectionPeriod.AcademicYear}, contractType:{message.ContractType}"); return(new List <CalculatedRequiredLevyAmount>()); } var paymentToIgnore = learnerPaymentHistory.Join(learnerPaymentHistory, payment => payment.EventId, clawbackPayment => clawbackPayment.ClawbackSourcePaymentEventId, (payment, clawbackPayment) => new[] { payment.EventId, clawbackPayment.EventId }) .SelectMany(paymentId => paymentId); if (learnerPaymentHistory.Where(payment => paymentToIgnore.Contains(payment.EventId)).Sum(p => p.Amount) != 0) { logger.LogWarning("Previous Payment and Clawback do not Match, this clawback will result in over or under payment" + $"jobId:{message.JobId}, learnerRef:{message.Learner.ReferenceNumber}, frameworkCode:{message.LearningAim.FrameworkCode}, " + $"pathwayCode:{message.LearningAim.PathwayCode}, programmeType:{message.LearningAim.ProgrammeType}, " + $"standardCode:{message.LearningAim.StandardCode}, learningAimReference:{message.LearningAim.Reference}, " + $"academicYear:{message.CollectionPeriod.AcademicYear}, contractType:{message.ContractType}"); } var paymentToClawback = learnerPaymentHistory .Where(payment => !paymentToIgnore.Contains(payment.EventId)) .Select(payment => { ConvertToClawbackPayment(message, payment); return(payment); }).ToList(); return(await ProcessPaymentToClawback(paymentToClawback, cancellationToken)); }
public async Task Process(ApprenticeshipCreatedEvent createdEvent) { try { logger.LogDebug($"Now processing the apprenticeship created event. " + $"Apprenticeship id: {createdEvent.ApprenticeshipId}, " + $"employer account id: {createdEvent.AccountId}, " + $"Ukprn: {createdEvent.ProviderId}."); var model = mapper.Map <ApprenticeshipModel>(createdEvent); var duplicates = await apprenticeshipService.NewApprenticeship(model).ConfigureAwait(false); logger.LogDebug($"Apprenticeship saved to database. " + $"Apprenticeship id: {createdEvent.ApprenticeshipId}, " + $"employer account id: {createdEvent.AccountId}, " + $"Ukprn: {createdEvent.ProviderId}."); var updatedEvent = mapper.Map <ApprenticeshipUpdated>(model); updatedEvent.Duplicates = duplicates.Select(duplicate => new ApprenticeshipDuplicate { Ukprn = duplicate.Ukprn, ApprenticeshipId = duplicate.ApprenticeshipId }).ToList(); var endpointInstance = await endpointInstanceFactory.GetEndpointInstance().ConfigureAwait(false); await endpointInstance.Publish(updatedEvent).ConfigureAwait(false); logger.LogInfo($"Finished processing the apprenticeship created event. " + $"Apprenticeship id: {createdEvent.ApprenticeshipId}, " + $"employer account id: {createdEvent.AccountId}, " + $"Ukprn: {createdEvent.ProviderId}."); } catch (ApprenticeshipAlreadyExistsException e) { logger.LogWarning($"Apprenticeship already exists while trying to add a new apprenticeship: {e.Message}\n" + $"Apprenticeship id: {createdEvent.ApprenticeshipId}, " + $"employer account id: {createdEvent.AccountId}, " + $"Ukprn: {createdEvent.ProviderId}."); } catch (InvalidOperationException e) { logger.LogError($"Unhandled exception while adding apprenticeship: {e.Message}\n" + $"Apprenticeship id: {createdEvent.ApprenticeshipId}, " + $"employer account id: {createdEvent.AccountId}, " + $"Ukprn: {createdEvent.ProviderId}.", e); throw; } catch (Exception ex) { logger.LogError($"Error processing the apprenticeship event. Error: {ex.Message}", ex); throw; } }
public async Task <bool> IsDuplicate(IPaymentsEvent earningEvent, CancellationToken cancellationToken) { logger.LogDebug($"Checking if earning event of type {earningEvent.GetType().Name} with guid: {earningEvent.EventId} has already been received."); var earningEventKey = new EarningEventKey(earningEvent); logger.LogDebug($"Earning event key: {earningEventKey.LogSafeKey}"); if (await cache.Contains(earningEventKey.Key, cancellationToken).ConfigureAwait(false)) { logger.LogWarning($"Key: {earningEventKey.LogSafeKey} found in the cache and is probably a duplicate."); return(true); } logger.LogDebug($"New earnings event. Event key: {earningEventKey.LogSafeKey}, event id: {earningEvent.EventId}"); await cache.Add(earningEventKey.Key, earningEventKey, cancellationToken); logger.LogInfo($"Added new earnings event to cache. Key: {earningEventKey.LogSafeKey}"); return(false); }
public async Task <bool> IsDuplicate(ProcessLearnerCommand processLearnerCommand, CancellationToken cancellationToken) { logger.LogDebug($"Checking if command of type {processLearnerCommand.GetType().Name} with guid: {processLearnerCommand.CommandId} has already been received."); var learnerKey = new LearnerKey(processLearnerCommand); logger.LogDebug($"learner key: {learnerKey.LogSafeKey}"); if (await cache.Contains(learnerKey.Key, cancellationToken).ConfigureAwait(false)) { logger.LogWarning($"Key: {learnerKey.LogSafeKey} found in the cache and is probably a duplicate."); return(true); } logger.LogDebug($"New learner command. Command key: {learnerKey.LogSafeKey}, command id: {processLearnerCommand.CommandId}"); await cache.Add(learnerKey.Key, learnerKey, cancellationToken); logger.LogInfo($"Added new earnings event to cache. Key: {learnerKey.LogSafeKey}"); return(false); }
public async Task <SubmissionsSummaryModel> ValidateSubmissionWindow(long jobId, short academicYear, byte collectionPeriod, CancellationToken cancellationToken) { var isEstimatingMetrics = jobId == 0; var logMessage = isEstimatingMetrics ? "estimating" : "building"; try { logger.LogDebug($"Started {logMessage} metrics for job: {jobId}, Academic year: {academicYear}, Collection period: {collectionPeriod}"); var stopwatch = Stopwatch.StartNew(); var submissionSummaries = await submissionMetricsRepository.GetSubmissionsSummaryMetrics(jobId, academicYear, collectionPeriod, cancellationToken); var metrics = submissionsSummary.GetMetrics(jobId, academicYear, collectionPeriod, submissionSummaries); var collectionPeriodTolerance = await submissionMetricsRepository.GetCollectionPeriodTolerance(collectionPeriod, academicYear, cancellationToken); submissionsSummary.CalculateIsWithinTolerance(collectionPeriodTolerance?.SubmissionToleranceLower, collectionPeriodTolerance?.SubmissionToleranceUpper); cancellationToken.ThrowIfCancellationRequested(); var dataDuration = stopwatch.ElapsedMilliseconds; logger.LogDebug($"finished getting data from databases for job: {jobId}, Took: {dataDuration}ms."); if (!isEstimatingMetrics) { await submissionMetricsRepository.SaveSubmissionsSummaryMetrics(metrics, cancellationToken); } stopwatch.Stop(); SendTelemetry(metrics, stopwatch.ElapsedMilliseconds); logger.LogInfo($"Finished {logMessage} Submissions Summary Metrics for job: {jobId}, Academic year: {academicYear}, Collection period: {collectionPeriod}. Took: {stopwatch.ElapsedMilliseconds}ms"); return(metrics); } catch (Exception e) { logger.LogWarning($"Error {logMessage} the Submissions Summary metrics report for job: {jobId}, Error: {e}"); throw; } }
public async Task End(Exception ex = null) { try { if (ex != null) { logger.LogWarning($"Rolling back the state manager transaction due to exception during message handling. Transaction Id: {reliableStateManagerTransactionProvider.Current.TransactionId}, Exception: {ex.Message}"); reliableStateManagerTransactionProvider.Current.Abort(); return; } logger.LogVerbose($"Completing state manager transaction. Transaction Id: {reliableStateManagerTransactionProvider.Current.TransactionId}"); await reliableStateManagerTransactionProvider.Current.CommitAsync().ConfigureAwait(false); logger.LogDebug($"Completed state manager transaction. TransactionId: {reliableStateManagerTransactionProvider.Current.TransactionId}"); } finally { reliableStateManagerTransactionProvider.Current.Dispose(); } }
public async Task ProcessPayment(ProviderPaymentEventModel payment, CancellationToken cancellationToken) { var stopwatch = new Stopwatch(); stopwatch.Start(); var isCurrentProviderIlr = await IsCurrentProviderIlr(payment.JobId, payment.Ukprn, payment.IlrSubmissionDateTime, cancellationToken).ConfigureAwait(false); if (!isCurrentProviderIlr) { paymentLogger.LogWarning($"Received out of sequence payment with Job Id {payment.JobId} for Ukprn {payment.Ukprn} "); telemetry.TrackEvent("Provider payments service received out of sequence payment"); return; } paymentLogger.LogVerbose($"Received valid payment with Job Id {payment.JobId} for Ukprn {payment.Ukprn} "); await paymentCache.AddPayment(payment, cancellationToken); stopwatch.Stop(); telemetry.TrackDuration(GetType().FullName + ".ProcessPayment", stopwatch.Elapsed); paymentLogger.LogInfo($"Finished adding the payment to the cache. EventId: {payment.EventId}, FundingSourceId: {payment.FundingSourceId}, UKPRN: {payment.Ukprn}"); }
public async Task Handle(ProcessProviderMonthEndAct1CompletionPaymentCommand message, IMessageHandlerContext context) { paymentLogger.LogInfo($"Processing Provider Month End Act1 Completion Payment Command with ukprn: {message.Ukprn}, Message Id : {context.MessageId}"); var paymentEvents = await completionPaymentService.GetAct1CompletionPaymentEvents(message); if (!paymentEvents.Any()) { paymentLogger.LogWarning($"No Act1 Completion Payment Event Found for ukprn: {message.Ukprn}, Collection: {message.CollectionPeriod.Period:00}-{message.CollectionPeriod.AcademicYear}, job: {message.JobId}"); return; } var dasEndPoint = await dasEndpointFactory.GetEndpointInstanceAsync(); foreach (var paymentEvent in paymentEvents) { paymentLogger.LogDebug($"Processing Act1 Completion Payment Event. Ukprn: {message.Ukprn}, Collection: {message.CollectionPeriod.Period:00}-{message.CollectionPeriod.AcademicYear}, job: {message.JobId}"); await dasEndPoint.Publish(paymentEvent); } paymentLogger.LogInfo($"Successfully Processed Month End Act1 Completion Payment Command for ukprn: {message.Ukprn}, Collection:{message.CollectionPeriod.Period:00}-{message.CollectionPeriod.AcademicYear}, job: {message.JobId}"); }
public void Warn(string message) => logger.LogWarning(message);
public async Task <bool> HandleAsync(JobContextMessage message, CancellationToken cancellationToken) { logger.LogDebug($"Processing Earning Event Service event for Job Id : {message.JobId}"); try { if (await HandleSubmissionEvents(message)) { return(true); } using (var operation = telemetry.StartOperation($"FM36Processing:{message.JobId}")) { var stopwatch = Stopwatch.StartNew(); if (await jobStatusService.JobCurrentlyRunning(message.JobId)) { logger.LogWarning($"Job {message.JobId} is already running."); return(false); } var collectionPeriod = int.Parse(message.KeyValuePairs[JobContextMessageKey.ReturnPeriod].ToString()); var fileName = message.KeyValuePairs[JobContextMessageKey.Filename]?.ToString(); var fm36Output = await GetFm36Global(message, collectionPeriod, cancellationToken) .ConfigureAwait(false); if (fm36Output == null) { return(true); } cancellationToken.ThrowIfCancellationRequested(); await ClearSubmittedLearnerAims(collectionPeriod, fm36Output.Year, message.SubmissionDateTimeUtc, fm36Output.UKPRN, cancellationToken).ConfigureAwait(false); cancellationToken.ThrowIfCancellationRequested(); await ProcessFm36Global(message, collectionPeriod, fm36Output, fileName, cancellationToken) .ConfigureAwait(false); await SendReceivedEarningsEvent(message.JobId, message.SubmissionDateTimeUtc, fm36Output.Year, collectionPeriod, fm36Output.UKPRN).ConfigureAwait(false); stopwatch.Stop(); var duration = stopwatch.ElapsedMilliseconds; telemetry.TrackEvent("Processed ILR Submission", new Dictionary <string, string> { { TelemetryKeys.Count, fm36Output.Learners.Count.ToString() }, { TelemetryKeys.CollectionPeriod, collectionPeriod.ToString() }, { TelemetryKeys.AcademicYear, fm36Output.Year }, { TelemetryKeys.JobId, message.JobId.ToString() }, { TelemetryKeys.Ukprn, fm36Output.UKPRN.ToString() }, }, new Dictionary <string, double> { { TelemetryKeys.Duration, duration }, { TelemetryKeys.Count, fm36Output.Learners.Count }, }); if (cancellationToken.IsCancellationRequested) { logger.LogError($"Job {message.JobId} has been cancelled after job has started processing. Ukprn: {fm36Output.UKPRN}"); return(false); } telemetry.StopOperation(operation); if (fm36Output.Learners.Count == 0) { logger.LogWarning($"Received ILR with 0 FM36 learners. Ukprn: {fm36Output.UKPRN}, job id: {message.JobId}."); return(true); } if (await jobStatusService.WaitForJobToFinish(message.JobId, cancellationToken)) { logger.LogInfo($"Successfully processed ILR Submission. Job Id: {message.JobId}, Ukprn: {fm36Output.UKPRN}, Submission Time: {message.SubmissionDateTimeUtc}"); return(true); } logger.LogError($"Job failed to finished within the allocated time. Job Id: {message.JobId}"); return(false); } } catch (OperationCanceledException) { logger.LogError($"Cancellation token cancelled for job: {message.JobId}"); return(false); } catch (Exception ex) { logger.LogFatal($"Error while handling EarningService event. Error: {ex.Message}", ex); return(false); //TODO: change back to throw when DC code is a little more defensive } }
public async Task <int> Process(int batchSize, CancellationToken cancellationToken) { logger.LogVerbose("Processing batch."); var batch = await cache.GetPayments(batchSize, cancellationToken); if (batch.Count < 1) { logger.LogVerbose("No records found to process."); return(0); } logger.LogDebug($"Processing {batch.Count} records: {string.Join(", ", batch.Select(m => m.EventId))}"); var data = dataTable.GetDataTable(batch); using (var scope = TransactionScopeFactory.CreateWriteOnlyTransaction()) using (var sqlConnection = new SqlConnection(connectionString)) { try { await sqlConnection.OpenAsync(cancellationToken); using (var bulkCopy = new SqlBulkCopy(sqlConnection)) { foreach (var table in data) { bulkCopy.ColumnMappings.Clear(); bulkCopy.DestinationTableName = data.Count > 1 ? table.TableName : dataTable.TableName; bulkCopy.BulkCopyTimeout = 0; bulkCopy.BatchSize = batchSize; foreach (DataRow tableRow in table.Rows) { logger.LogVerbose($"Saving row to table: {bulkCopy.DestinationTableName}, Row: {ToLogString(tableRow)}"); } foreach (DataColumn dataColumn in table.Columns) { bulkCopy.ColumnMappings.Add(dataColumn.ColumnName, dataColumn.ColumnName); } try { await bulkCopy.WriteToServerAsync(table, cancellationToken).ConfigureAwait(false); } catch (SystemException ex) { logger.LogWarning($"Error bulk writing to server. Processing single records. \n\n" + $"{ex.Message}\n\n" + $"{ex.StackTrace}"); await TrySingleRecord(bulkCopy, table, sqlConnection).ConfigureAwait(false); } } logger.LogDebug($"Finished bulk copying {batch.Count} of {typeof(T).Name} records."); } scope.Complete(); } catch (Exception e) { logger.LogError($"Error performing bulk copy for model type: {typeof(T).Name}. Error: {e.Message}", e); throw; } } return(batch.Count); }
private async Task Listen(CancellationToken cancellationToken) { var connection = new ServiceBusConnection(connectionString); var messageReceiver = new MessageReceiver(connection, EndpointName, ReceiveMode.PeekLock, RetryPolicy.Default, 0); var errorQueueSender = new MessageSender(connection, errorQueueName, RetryPolicy.Default); try { while (!cancellationToken.IsCancellationRequested) { try { var pipeLineStopwatch = Stopwatch.StartNew(); var messages = new List <Message>(); for (var i = 0; i < 10 && messages.Count <= 200; i++) { cancellationToken.ThrowIfCancellationRequested(); var receivedMessages = await messageReceiver.ReceiveAsync(200, TimeSpan.FromSeconds(2)) .ConfigureAwait(false); if (receivedMessages == null || !receivedMessages.Any()) { break; } messages.AddRange(receivedMessages); } if (!messages.Any()) { await Task.Delay(2000, cancellationToken); continue; } var groupedMessages = new Dictionary <Type, List <(string, object)> >(); foreach (var message in messages) { cancellationToken.ThrowIfCancellationRequested(); try { var applicationMessage = DeserializeMessage(message); var key = applicationMessage.GetType(); var applicationMessages = groupedMessages.ContainsKey(key) ? groupedMessages[key] : groupedMessages[key] = new List <(string, object)>(); applicationMessages.Add((message.SystemProperties.LockToken, applicationMessage)); } catch (Exception e) { logger.LogError($"Error deserialising the message. Error: {e.Message}", e); //TODO: should use the error queue instead of dead letter queue await messageReceiver.DeadLetterAsync(message.SystemProperties.LockToken) .ConfigureAwait(false); } } var stopwatch = Stopwatch.StartNew(); await Task.WhenAll(groupedMessages.Select(group => ProcessMessages(group.Key, group.Value, messageReceiver, cancellationToken))); stopwatch.Stop(); //RecordAllBatchProcessTelemetry(stopwatch.ElapsedMilliseconds, messages.Count); pipeLineStopwatch.Stop(); //RecordPipelineTelemetry(pipeLineStopwatch.ElapsedMilliseconds, messages.Count); } catch (TaskCanceledException) { logger.LogWarning("Cancelling communication listener."); return; } catch (OperationCanceledException) { logger.LogWarning("Cancelling communication listener."); return; } catch (Exception ex) { logger.LogError($"Error listening for message. Error: {ex.Message}", ex); } } } finally { if (!messageReceiver.IsClosedOrClosing) { await messageReceiver.CloseAsync(); } if (!connection.IsClosedOrClosing) { await connection.CloseAsync(); } } }
private async Task Listen(CancellationToken cancellationToken) { var connection = new ServiceBusConnection(connectionString); var messageReceivers = new List <BatchMessageReceiver>(); messageReceivers.AddRange(Enumerable.Range(0, 3) .Select(i => new BatchMessageReceiver(connection, EndpointName))); var errorQueueSender = new MessageSender(connection, errorQueueName, RetryPolicy.Default); try { while (!cancellationToken.IsCancellationRequested) { try { var pipeLineStopwatch = Stopwatch.StartNew(); var receiveTimer = Stopwatch.StartNew(); var receiveTasks = messageReceivers.Select(receiver => ReceiveMessages(receiver, cancellationToken)).ToList(); await Task.WhenAll(receiveTasks).ConfigureAwait(false); var messages = receiveTasks.SelectMany(task => task.Result).ToList(); receiveTimer.Stop(); if (!messages.Any()) { await Task.Delay(2000, cancellationToken); continue; } RecordMetric("ReceiveMessages", receiveTimer.ElapsedMilliseconds, messages.Count); var groupedMessages = new Dictionary <Type, List <(object Message, BatchMessageReceiver MessageReceiver, Message ReceivedMessage)> >(); foreach (var message in messages) { cancellationToken.ThrowIfCancellationRequested(); var key = message.Message.GetType(); var applicationMessages = groupedMessages.ContainsKey(key) ? groupedMessages[key] : groupedMessages[key] = new List <(object Message, BatchMessageReceiver MessageReceiver, Message ReceivedMessage)>(); applicationMessages.Add(message); } var stopwatch = Stopwatch.StartNew(); await Task.WhenAll(groupedMessages.Select(group => ProcessMessages(group.Key, group.Value, cancellationToken))); stopwatch.Stop(); RecordProcessedAllBatchesTelemetry(stopwatch.ElapsedMilliseconds, messages.Count); pipeLineStopwatch.Stop(); RecordPipelineTelemetry(pipeLineStopwatch.ElapsedMilliseconds, messages.Count); } catch (TaskCanceledException) { logger.LogWarning("Cancelling communication listener."); return; } catch (OperationCanceledException) { logger.LogWarning("Cancelling communication listener."); return; } catch (Exception ex) { logger.LogError($"Error listening for message. Error: {ex.Message}", ex); } } } finally { await Task.WhenAll(messageReceivers.Select(receiver => receiver.Close())).ConfigureAwait(false); if (!connection.IsClosedOrClosing) { await connection.CloseAsync(); } } }
public async Task <List <DataLockStatusChanged> > ProcessDataLockFailure(DataLockEvent dataLockEvent) { var result = new List <DataLockStatusChanged>(); var changedToFailed = new DataLockStatusChangedToFailed { TransactionTypesAndPeriods = new Dictionary <TransactionType, List <EarningPeriod> >() }; var changedToPassed = new DataLockStatusChangedToPassed { TransactionTypesAndPeriods = new Dictionary <TransactionType, List <EarningPeriod> >() }; var failureChanged = new DataLockFailureChanged { TransactionTypesAndPeriods = new Dictionary <TransactionType, List <EarningPeriod> >() }; var failuresToDelete = new List <long>(); var failuresToRecord = new List <DataLockFailureEntity>(); var newFailuresGroupedByTypeAndPeriod = GetFailuresGroupedByTypeAndPeriod(dataLockEvent); using (var scope = TransactionScopeFactory.CreateSerialisableTransaction()) { var oldFailures = await dataLockFailureRepository.GetFailures( dataLockEvent.Ukprn, dataLockEvent.Learner.ReferenceNumber, dataLockEvent.LearningAim.FrameworkCode, dataLockEvent.LearningAim.PathwayCode, dataLockEvent.LearningAim.ProgrammeType, dataLockEvent.LearningAim.StandardCode, dataLockEvent.LearningAim.Reference, dataLockEvent.CollectionYear ).ConfigureAwait(false); var fullListOfKeys = newFailuresGroupedByTypeAndPeriod.Keys .Concat(oldFailures.Select(f => (f.TransactionType, f.DeliveryPeriod))) .Distinct() .ToList(); foreach (var key in fullListOfKeys) { var transactionType = key.Item1; var period = key.Item2; if (!newFailuresGroupedByTypeAndPeriod.TryGetValue(key, out var newPeriod)) { paymentLogger.LogWarning( $"Earning does not have transaction type {transactionType} for period {period} which is present in DataLockFailure. UKPRN {dataLockEvent.Ukprn}, LearnRefNumber: {dataLockEvent.Learner.ReferenceNumber}"); continue; } var oldFailureEntity = oldFailures.FirstOrDefault(f => f.TransactionType == transactionType && f.DeliveryPeriod == period); var oldFailure = oldFailureEntity?.EarningPeriod.DataLockFailures; var newFailure = newPeriod?.DataLockFailures; var statusChange = dataLockStatusService.GetStatusChange(oldFailure, newFailure); switch (statusChange) { case DataLockStatusChange.ChangedToFailed: AddTypeAndPeriodToEvent(changedToFailed, transactionType, newPeriod, dataLockEvent); failuresToRecord.Add(CreateEntity(dataLockEvent, transactionType, period, newPeriod)); break; case DataLockStatusChange.ChangedToPassed: AddTypeAndPeriodToEvent(changedToPassed, transactionType, newPeriod, dataLockEvent); failuresToDelete.Add(oldFailureEntity.Id); break; case DataLockStatusChange.FailureChanged: AddTypeAndPeriodToEvent(failureChanged, transactionType, newPeriod, dataLockEvent); failuresToRecord.Add(CreateEntity(dataLockEvent, transactionType, period, newPeriod)); failuresToDelete.Add(oldFailureEntity.Id); break; } } if (changedToFailed.TransactionTypesAndPeriods.Count > 0) { result.Add(changedToFailed); } if (changedToPassed.TransactionTypesAndPeriods.Count > 0) { result.Add(changedToPassed); } if (failureChanged.TransactionTypesAndPeriods.Count > 0) { result.Add(failureChanged); } foreach (var dataLockStatusChanged in result) { mapper.Map(dataLockEvent, dataLockStatusChanged); } await dataLockFailureRepository.ReplaceFailures(failuresToDelete, failuresToRecord, dataLockEvent.EarningEventId, dataLockEvent.EventId).ConfigureAwait(false); scope.Complete(); paymentLogger.LogDebug( $"Deleted {failuresToDelete.Count} old DL failures, created {failuresToRecord.Count} new for UKPRN {dataLockEvent.Ukprn} Learner Ref {dataLockEvent.Learner.ReferenceNumber} on R{dataLockEvent.CollectionPeriod.Period:00}"); return(result); } }
public async Task <bool> HandleAsync(JobContextMessage message, CancellationToken cancellationToken) { try { logger.LogDebug("Getting task type from period end message."); var taskType = GetTask(message); logger.LogDebug("Got period end type now create the period end event."); var periodEndEvent = CreatePeriodEndEvent(taskType); logger.LogDebug($"Created period end event. Type: {periodEndEvent.GetType().Name}"); periodEndEvent.JobId = message.JobId; periodEndEvent.CollectionPeriod = new CollectionPeriod { AcademicYear = Convert.ToInt16(GetMessageValue(message, JobContextMessageConstants.KeyValuePairs.CollectionYear)), Period = Convert.ToByte(GetMessageValue(message, JobContextMessageConstants.KeyValuePairs.ReturnPeriod)) }; logger.LogDebug($"Got period end event: {periodEndEvent.ToJson()}"); var jobIdToWaitFor = message.JobId; if (taskType == PeriodEndTaskType.PeriodEndReports) { var endpointInstance = await endpointInstanceFactory.GetEndpointInstance(); await endpointInstance.Publish(periodEndEvent); logger.LogInfo( $"Finished publishing the period end event. Name: {periodEndEvent.GetType().Name}, JobId: {periodEndEvent.JobId}, Collection Period: {periodEndEvent.CollectionPeriod.Period}-{periodEndEvent.CollectionPeriod.AcademicYear}."); } else if (taskType == PeriodEndTaskType.PeriodEndSubmissionWindowValidation) { await RecordPeriodEndJob(taskType, periodEndEvent).ConfigureAwait(false); var endpointInstance = await endpointInstanceFactory.GetEndpointInstance(); await endpointInstance.Publish(periodEndEvent); logger.LogInfo( $"Finished publishing the period end event. Name: {periodEndEvent.GetType().Name}, JobId: {periodEndEvent.JobId}, Collection Period: {periodEndEvent.CollectionPeriod.Period}-{periodEndEvent.CollectionPeriod.AcademicYear}."); } else { var existingNonFailedJobId = await jobsDataContext.GetNonFailedDcJobId(GetJobType(taskType), periodEndEvent.CollectionPeriod.AcademicYear, periodEndEvent.CollectionPeriod.Period); if (existingNonFailedJobId.GetValueOrDefault() == 0) { await RecordPeriodEndJob(taskType, periodEndEvent).ConfigureAwait(false); var endpointInstance = await endpointInstanceFactory.GetEndpointInstance(); await endpointInstance.Publish(periodEndEvent); logger.LogInfo( $"Finished publishing the period end event. Name: {periodEndEvent.GetType().Name}, JobId: {periodEndEvent.JobId}, Collection Period: {periodEndEvent.CollectionPeriod.Period}-{periodEndEvent.CollectionPeriod.AcademicYear}."); } else { jobIdToWaitFor = existingNonFailedJobId.GetValueOrDefault(); logger.LogWarning($"Job already exists, will not be published. Name: {periodEndEvent.GetType().Name}, JobId: {periodEndEvent.JobId}, Collection Period: {periodEndEvent.CollectionPeriod.Period}-{periodEndEvent.CollectionPeriod.AcademicYear}."); } } // TODO: This is a temporary workaround to enable the PeriodEndStart and PeriodEndStop messages to return true as otherwise the service will // TODO: just hang as there is nothing implemented to handle the Start and Stop events and so the job status service will never get a completion and so this will never return true. // PV2-1345 will handle PeriodEndStart // PeriodEndStoppedEvent will be handled by the PeriodEndStoppedEventHandler which in turn is handled by the ProcessProviderMonthEndCommandHandler but we don't want to wait for it if (periodEndEvent is PeriodEndStoppedEvent || periodEndEvent is PeriodEndRequestReportsEvent) { logger.LogDebug("Returning as this is either a PeriodEndStop or PeriodEndRequestReports event"); return(true); } if (periodEndEvent is PeriodEndStartedEvent periodEndStartedEvent) { return(await jobStatusService.WaitForPeriodEndStartedToFinish(jobIdToWaitFor, cancellationToken)); } if (periodEndEvent is PeriodEndRequestValidateSubmissionWindowEvent) { return(await jobStatusService.WaitForPeriodEndSubmissionWindowValidationToFinish(jobIdToWaitFor, cancellationToken)); } await jobStatusService.WaitForJobToFinish(jobIdToWaitFor, cancellationToken); return(true); } catch (Exception ex) { logger.LogError($"Failed to process job context message. Message: {message.ToJson()}", ex); throw; } }
public async Task BuildMetrics(long jobId, short academicYear, byte collectionPeriod, CancellationToken cancellationToken) { try { logger.LogDebug($"Building period end metrics for {academicYear}, {collectionPeriod} using job id {jobId}"); var stopwatch = Stopwatch.StartNew(); var dcDataContext = dcMetricsDataContextFactory.CreateContext(academicYear); var dcEarningsTask = dcDataContext.GetEarnings(academicYear, collectionPeriod, cancellationToken); var transactionTypesTask = periodEndMetricsRepository.GetTransactionTypesByContractType(academicYear, collectionPeriod, cancellationToken); var fundingSourceTask = periodEndMetricsRepository.GetFundingSourceAmountsByContractType(academicYear, collectionPeriod, cancellationToken); var currentPaymentTotals = periodEndMetricsRepository.GetYearToDatePayments(academicYear, collectionPeriod, cancellationToken); var dataLockedEarningsTask = periodEndMetricsRepository.GetDataLockedEarningsTotals(academicYear, collectionPeriod, cancellationToken); var dataLockedAlreadyPaidTask = periodEndMetricsRepository.GetAlreadyPaidDataLockedEarnings(academicYear, collectionPeriod, cancellationToken); var heldBackCompletionAmountsTask = periodEndMetricsRepository.GetHeldBackCompletionPaymentsTotals(academicYear, collectionPeriod, cancellationToken); var dataTask = Task.WhenAll( dcEarningsTask, transactionTypesTask, fundingSourceTask, currentPaymentTotals, dataLockedEarningsTask, dataLockedAlreadyPaidTask, heldBackCompletionAmountsTask); var waitTask = Task.Delay(TimeSpan.FromSeconds(270), cancellationToken); Task.WaitAny(dataTask, waitTask); cancellationToken.ThrowIfCancellationRequested(); if (!dataTask.IsCompleted) { throw new InvalidOperationException($"Took too long to get data for the period end metrics. job: {jobId}, Collection period: {collectionPeriod}, Academic Year: {academicYear}"); } var providerSummaries = new List <ProviderPeriodEndSummaryModel>(); var providersFromPayments = currentPaymentTotals.Result.Select(x => x.Ukprn).Distinct(); var providersFromEarnings = dcEarningsTask.Result.Select(x => x.Ukprn).Distinct(); var distinctProviderUkprns = providersFromEarnings.Union(providersFromPayments); var periodEndSummary = periodEndSummaryFactory.CreatePeriodEndSummary(jobId, collectionPeriod, academicYear); foreach (var ukprn in distinctProviderUkprns) { var providerSummary = periodEndSummaryFactory.CreatePeriodEndProviderSummary(ukprn, jobId, collectionPeriod, academicYear); providerSummary.AddDcEarnings(dcEarningsTask.Result.Where(x => x.Ukprn == ukprn)); providerSummary.AddTransactionTypes(transactionTypesTask.Result.Where(x => x.Ukprn == ukprn)); providerSummary.AddFundingSourceAmounts(fundingSourceTask.Result.Where(x => x.Ukprn == ukprn)); providerSummary.AddPaymentsYearToDate(currentPaymentTotals.Result.FirstOrDefault(x => x.Ukprn == ukprn) ?? new ProviderContractTypeAmounts()); providerSummary.AddDataLockedEarnings(dataLockedEarningsTask.Result.FirstOrDefault(x => x.Ukprn == ukprn)?.TotalAmount ?? 0m); providerSummary.AddDataLockedAlreadyPaid(dataLockedAlreadyPaidTask.Result.FirstOrDefault(x => x.Ukprn == ukprn)?.TotalAmount ?? 0m); providerSummary.AddHeldBackCompletionPayments(heldBackCompletionAmountsTask.Result.FirstOrDefault(x => x.Ukprn == ukprn) ?? new ProviderContractTypeAmounts()); var providerSummaryModel = providerSummary.GetMetrics(); providerSummaries.Add(providerSummaryModel); } periodEndSummary.AddProviderSummaries(providerSummaries); var overallPeriodEndSummary = periodEndSummary.GetMetrics(); stopwatch.Stop(); var dataDuration = stopwatch.ElapsedMilliseconds; await periodEndMetricsRepository.SaveProviderSummaries(providerSummaries, overallPeriodEndSummary, cancellationToken); SendSummaryMetricsTelemetry(overallPeriodEndSummary, stopwatch.ElapsedMilliseconds); SendAllProviderMetricsTelemetry(providerSummaries, overallPeriodEndSummary); logger.LogInfo($"Finished building period end metrics for {academicYear}, {collectionPeriod} using job id {jobId}, DataDuration: {dataDuration} milliseconds"); } catch (Exception e) { logger.LogWarning($"Error building period end metrics for {academicYear}, {collectionPeriod} using job id {jobId}. Error: {e}"); throw; } }