public async Task <int> Process(int batchSize, CancellationToken cancellationToken) { logger.LogVerbose("Processing batch."); var batch = await cache.GetPayments(batchSize, cancellationToken).ConfigureAwait(false); if (batch.Count < 1) { logger.LogVerbose("No records found to process."); return(0); } using (var scope = new TransactionScope(TransactionScopeOption.RequiresNew, TransactionScopeAsyncFlowOption.Enabled)) { try { foreach (var item in batch) { logger.LogVerbose($"Saving {typeof(T).Name} to table: {item.ToString()}"); await bulkWriter.Write(item, cancellationToken).ConfigureAwait(false); } await bulkWriter.Flush(cancellationToken).ConfigureAwait(false); scope.Complete(); } catch (Exception e) { logger.LogError($"Error performing bulk copy for model type: {typeof(T).Name}. Error: {e.Message}", e); throw; } } return(batch.Count); }
public async Task <int> Process(int batchSize, CancellationToken cancellationToken) { logger.LogVerbose("Processing batch."); var batch = await cache.GetPayments(batchSize, cancellationToken).ConfigureAwait(false); if (batch.Count < 1) { logger.LogVerbose("No records found to process."); return(0); } try { using (var scope = new TransactionScope(TransactionScopeOption.RequiresNew, TransactionScopeAsyncFlowOption.Enabled)) { foreach (var changeEvent in batch) { await SaveDataLockEvent(cancellationToken, changeEvent); foreach (var period in changeEvent.Periods) { await SaveEventPeriods(period, changeEvent, cancellationToken); } foreach (var commitment in changeEvent.CommitmentVersions) { await SaveCommitmentVersion(commitment, changeEvent, cancellationToken); } foreach (var error in changeEvent.Errors) { await SaveErrorCode(error, changeEvent, cancellationToken); } logger.LogDebug( $"Saved PriceEpisodeStatusChange event {changeEvent.DataLock.DataLockEventId} for UKPRN {changeEvent.DataLock.UKPRN}. " + $"Commitment versions: {changeEvent.CommitmentVersions.Length}, " + $"periods: {changeEvent.Periods.Length}, errors: {changeEvent.Errors.Length}"); } await dataLockEventWriter.Flush(cancellationToken); await dataLockEventCommitmentVersionWriter.Flush(cancellationToken); await dataLockEventPeriodWriter.Flush(cancellationToken); await dataLockEventErrorWriter.Flush(cancellationToken); scope.Complete(); } } catch (Exception e) { logger.LogError($"Error saving batch of DataLockStatusChanged events. Error: {e.Message}", e); throw; } return(batch.Count); }
public async Task AddProviderAdjustments(List <ProviderAdjustment> payments) { foreach (var providerAdjustment in payments) { await bulkWriter.Write(providerAdjustment, default(CancellationToken)); } await bulkWriter.Flush(default(CancellationToken)).ConfigureAwait(false); }
private async Task <double> ProcessFm36Global(JobContextMessage message, int collectionPeriod, FM36Global fm36Output, string ilrFileName, CancellationToken cancellationToken) { logger.LogVerbose("Now building commands."); var startTime = DateTimeOffset.UtcNow; var learners = fm36Output.Learners ?? new List <FM36Learner>(); var commands = learners .Select(learner => Build(learner, message.JobId, message.SubmissionDateTimeUtc, short.Parse(fm36Output.Year), collectionPeriod, fm36Output.UKPRN, ilrFileName)) .ToList(); var jobStatusClient = jobClientFactory.Create(); var messageName = typeof(ProcessLearnerCommand).FullName; logger.LogVerbose($"Now sending the start job command for job: {message.JobId}"); await jobStatusClient.StartJob(message.JobId, fm36Output.UKPRN, message.SubmissionDateTimeUtc, short.Parse(fm36Output.Year), (byte)collectionPeriod, commands.Select(cmd => new GeneratedMessage { StartTime = startTime, MessageId = cmd.CommandId, MessageName = messageName }).ToList(), startTime); logger.LogDebug($"Now sending the process learner commands for job: {message.JobId}"); var stopwatch = new Stopwatch(); stopwatch.Start(); var endpointInstance = await factory.GetEndpointInstance(); foreach (var learnerCommand in commands) { try { if (cancellationToken.IsCancellationRequested) { logger.LogWarning($"Cancellation requested, will now stop sending learners for job: {message.JobId}"); return(stopwatch.ElapsedMilliseconds); } await endpointInstance.Send(learnerCommand).ConfigureAwait(false); var aims = submittedLearnerAimBuilder.Build(learnerCommand); await Task.WhenAll(aims.Select(aim => submittedAimWriter.Write(aim, cancellationToken))).ConfigureAwait(false); logger.LogVerbose($"Successfully sent ProcessLearnerCommand JobId: {learnerCommand.JobId}, LearnRefNumber: {learnerCommand.Learner.LearnRefNumber}, SubmissionTime: {message.SubmissionDateTimeUtc}, Collection Year: {fm36Output.Year}, Collection period: {collectionPeriod}"); } catch (Exception ex) { logger.LogError($"Error sending the command: ProcessLearnerCommand. Job Id: {message.JobId}, Ukprn: {fm36Output.UKPRN}, Error: {ex.Message}", ex); throw; } } await submittedAimWriter.Flush(cancellationToken).ConfigureAwait(false); stopwatch.Stop(); var duration = stopwatch.ElapsedMilliseconds; telemetry.TrackEvent("Sent All ProcessLearnerCommand Messages", new Dictionary <string, string> { { TelemetryKeys.Count, fm36Output.Learners.Count.ToString() }, { TelemetryKeys.CollectionPeriod, collectionPeriod.ToString() }, { TelemetryKeys.AcademicYear, fm36Output.Year }, { TelemetryKeys.JobId, message.JobId.ToString() }, { TelemetryKeys.Ukprn, fm36Output.UKPRN.ToString() }, }, new Dictionary <string, double> { { TelemetryKeys.Duration, duration }, { TelemetryKeys.Count, fm36Output.Learners.Count }, }); logger.LogDebug($"Took {stopwatch.ElapsedMilliseconds}ms to send {commands.Count} Process Learner Commands for Job: {message.JobId}"); return(stopwatch.ElapsedMilliseconds); }
public async Task <int> Process(int batchSize, CancellationToken cancellationToken) { logger.LogVerbose("Processing batch."); var batch = await cache.GetPayments(batchSize, cancellationToken).ConfigureAwait(false); if (batch.Count < 1) { logger.LogVerbose("No records found to process."); return(0); } try { await PopulateApprenticeshipCache(batch, cancellationToken).ConfigureAwait(false); using (var scope = new TransactionScope(TransactionScopeOption.RequiresNew, TransactionScopeAsyncFlowOption.Enabled)) { foreach (var dataLockStatusChangedEvent in batch) { int savedEvents = 0, savedCommitmentVersions = 0, savedPeriods = 0, savedErrors = 0; var isError = dataLockStatusChangedEvent is DataLockStatusChangedToFailed || dataLockStatusChangedEvent is DataLockFailureChanged; var flatPeriodList = dataLockStatusChangedEvent.TransactionTypesAndPeriods.SelectMany(tp => tp.Value).ToList(); var writtenErrors = new HashSet <DataLockErrorCode>(); // there may be multiple commitment IDs when employer changes or separate errors against two employers var apprenticeshipIds = flatPeriodList.Select(p => p.ApprenticeshipId) .Concat(flatPeriodList.Where(p => p.DataLockFailures != null).SelectMany(p => p.DataLockFailures.Select(f => f.ApprenticeshipId))) .Distinct() .ToList(); foreach (var apprenticeshipId in apprenticeshipIds) { // v1 doesn't use delivery period, get one earning period for each price episode var earningPeriodsByPriceEpisode = flatPeriodList .GroupBy(p => p.PriceEpisodeIdentifier) .Select(g => g.FirstOrDefault(p => p.ApprenticeshipId == apprenticeshipId || (p.DataLockFailures != null && p.DataLockFailures.Any(f => f.ApprenticeshipId == apprenticeshipId)))) .Where(g => g != null) .ToList(); foreach (var earningPeriod in earningPeriodsByPriceEpisode) { // only records null apprenticeship when DLOCK 01 & 02 if (!apprenticeshipId.HasValue) { if (earningPeriod.DataLockFailures.Any(f => f.ApprenticeshipId.HasValue)) { continue; } } await SaveDataLockEvent(cancellationToken, dataLockStatusChangedEvent, earningPeriod, apprenticeshipId).ConfigureAwait(false); savedEvents++; if (apprenticeshipId.HasValue) { if (isError) { var writtenVersions = new HashSet <(long apprenticeshipId, long apprenticeshipPriceEpisodeId)>(); foreach (var dataLockFailure in earningPeriod.DataLockFailures.Where(f => f.ApprenticeshipId == apprenticeshipId)) { foreach (var apprenticeshipPriceEpisodeId in dataLockFailure.ApprenticeshipPriceEpisodeIds) { // there are multiple errors recorded for the same apprenticeship episode if (writtenVersions.Contains((apprenticeshipId.Value, apprenticeshipPriceEpisodeId))) { continue; } savedPeriods += await SaveCommitmentVersionAndPeriods(dataLockStatusChangedEvent, dataLockFailure.ApprenticeshipId.Value, apprenticeshipPriceEpisodeId, isError, cancellationToken); savedCommitmentVersions++; writtenVersions.Add((apprenticeshipId.Value, apprenticeshipPriceEpisodeId)); } } } else { savedPeriods += await SaveCommitmentVersionAndPeriods(dataLockStatusChangedEvent, earningPeriod.ApprenticeshipId.Value, earningPeriod.ApprenticeshipPriceEpisodeId.Value, isError, cancellationToken); savedCommitmentVersions++; } } if (earningPeriod.DataLockFailures?.Count > 0) { await SaveErrorCodes(dataLockStatusChangedEvent, earningPeriod.DataLockFailures, writtenErrors, cancellationToken).ConfigureAwait(false); savedErrors += earningPeriod.DataLockFailures.Count; } } } logger.LogDebug($"Saved DataLockStatusChanged event {dataLockStatusChangedEvent.EventId} for UKPRN {dataLockStatusChangedEvent.Ukprn}. Legacy events: {savedEvents}, commitment versions: {savedCommitmentVersions}, periods: {savedPeriods}, errors: {savedErrors}"); } await dataLockEventWriter.Flush(cancellationToken).ConfigureAwait(false); await dataLockEventCommitmentVersionWriter.Flush(cancellationToken).ConfigureAwait(false); await dataLockEventPeriodWriter.Flush(cancellationToken).ConfigureAwait(false); await dataLockEventErrorWriter.Flush(cancellationToken).ConfigureAwait(false); scope.Complete(); } } catch (Exception e) { logger.LogError($"Error saving batch of DataLockStatusChanged events. Error: {e.Message}", e); throw; } return(batch.Count); }